xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision 5edf65166e22df6feda2ed6bd8a6e348419dee17)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 
13 typedef struct {
14   PetscInt   N,n,nmax;
15   PetscInt   overlap;             /* overlap requested by user */
16   KSP        *ksp;                /* linear solvers for each block */
17   Vec        gx,gy;               /* Merged work vectors */
18   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
19   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
20   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
21   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
22   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
23   Mat        *pmat;               /* subdomain block matrices */
24   PCGASMType type;                /* use reduced interpolation, restriction or both */
25   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
26   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
27   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
28   PetscBool  sort_indices;           /* flag to sort subdomain indices */
29 } PC_GASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCGASMSubdomainView_Private"
33 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
34 {
35   PC_GASM        *osm  = (PC_GASM*)pc->data;
36   PetscInt       j,nidx;
37   const PetscInt *idx;
38   PetscViewer    sviewer;
39   char           *cidx;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   if (i < 0 || i > osm->n) SETERRQ2(((PetscObject)viewer)->comm, PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
44   /* Inner subdomains. */
45   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
46   /*
47    No more than 15 characters per index plus a space.
48    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
49    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
50    For nidx == 0, the whole string 16 '\0'.
51    */
52   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
53   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer); CHKERRQ(ierr);
54   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
55   for (j = 0; j < nidx; ++j) {
56     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
57   }
58   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
59   ierr = PetscViewerDestroy(&sviewer); CHKERRQ(ierr);
60   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");  CHKERRQ(ierr);
61   ierr = PetscViewerFlush(viewer);                              CHKERRQ(ierr);
62   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);  CHKERRQ(ierr);
63   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
64   ierr = PetscViewerFlush(viewer);                              CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE); CHKERRQ(ierr);
66   ierr = PetscViewerASCIIPrintf(viewer, "\n");                  CHKERRQ(ierr);
67   ierr = PetscViewerFlush(viewer);                              CHKERRQ(ierr);
68   ierr = PetscFree(cidx);                                       CHKERRQ(ierr);
69   /* Outer subdomains. */
70   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
71   /*
72    No more than 15 characters per index plus a space.
73    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
74    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
75    For nidx == 0, the whole string 16 '\0'.
76    */
77   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
78   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer); CHKERRQ(ierr);
79   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
80   for (j = 0; j < nidx; ++j) {
81     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]); CHKERRQ(ierr);
82   }
83   ierr = PetscViewerDestroy(&sviewer); CHKERRQ(ierr);
84   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
85   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
86   ierr = PetscViewerFlush(viewer);                            CHKERRQ(ierr);
87   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
88   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
89   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
91   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
92   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
93   ierr = PetscFree(cidx);CHKERRQ(ierr);
94 
95   PetscFunctionReturn(0);
96 }
97 
98 #undef __FUNCT__
99 #define __FUNCT__ "PCGASMPrintSubdomains"
100 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
101 {
102   PC_GASM        *osm  = (PC_GASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
106   PetscBool      found;
107   PetscViewer    viewer, sviewer = PETSC_NULL;
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering); CHKERRQ(ierr);
112   for (i = 0; i < osm->n; ++i) permutation[i] = i;
113   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering); CHKERRQ(ierr);
114   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation); CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
117   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
119   /*
120    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
121    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
122   */
123   ierr = PetscObjectName((PetscObject)viewer);                  CHKERRQ(ierr);
124   l = 0;
125   for (count = 0; count < gcount; ++count) {
126     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
127     if (l<osm->n){
128       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
129       if (numbering[d] == count) {
130         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
131         ierr = PCGASMSubdomainView_Private(pc,d,sviewer); CHKERRQ(ierr);
132         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
133         ++l;
134       }
135     }
136     ierr = MPI_Barrier(((PetscObject)pc)->comm); CHKERRQ(ierr);
137   }
138   ierr = PetscViewerDestroy(&viewer); CHKERRQ(ierr);
139   ierr = PetscFree2(permutation,numbering); CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "PCView_GASM"
146 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
147 {
148   PC_GASM        *osm = (PC_GASM*)pc->data;
149   const char     *prefix;
150   PetscErrorCode ierr;
151   PetscMPIInt    rank, size;
152   PetscInt       i,bsz;
153   PetscBool      iascii,view_subdomains=PETSC_FALSE;
154   PetscViewer    sviewer;
155   PetscInt       count, l, gcount, *numbering, *permutation;
156   char overlap[256]     = "user-defined overlap";
157   char gsubdomains[256] = "unknown total number of subdomains";
158   char lsubdomains[256] = "unknown number of local  subdomains";
159   char msubdomains[256] = "unknown max number of local subdomains";
160   PetscFunctionBegin;
161   ierr = MPI_Comm_size(((PetscObject)pc)->comm, &size);CHKERRQ(ierr);
162   ierr = MPI_Comm_rank(((PetscObject)pc)->comm, &rank);CHKERRQ(ierr);
163 
164 
165   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering); CHKERRQ(ierr);
166   for (i = 0; i < osm->n; ++i) permutation[i] = i;
167   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering); CHKERRQ(ierr);
168   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation); CHKERRQ(ierr);
169 
170   if (osm->overlap >= 0) {
171     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);        CHKERRQ(ierr);
172   }
173   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);      CHKERRQ(ierr);
174   if (osm->N > 0) {
175     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);     CHKERRQ(ierr);
176   }
177   if (osm->nmax > 0){
178     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
179   }
180 
181   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
182   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,PETSC_NULL);CHKERRQ(ierr);
183 
184   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii); CHKERRQ(ierr);
185   if (iascii) {
186     /*
187      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
188      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
189      collectively on the comm.
190      */
191     ierr = PetscObjectName((PetscObject)viewer);                  CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n"); CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);    CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
197     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
198     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr);
200     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
201     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
202     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
203     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
204     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
205     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
206     /* Make sure that everybody waits for the banner to be printed. */
207     ierr = MPI_Barrier(((PetscObject)viewer)->comm); CHKERRQ(ierr);
208     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
209     l = 0;
210     for (count = 0; count < gcount; ++count) {
211       PetscMPIInt srank, ssize;
212       if (l<osm->n){
213         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
214         if (numbering[d] == count) {
215           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize); CHKERRQ(ierr);
216           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank); CHKERRQ(ierr);
217           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
218           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
219           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr);
221           ierr = PetscViewerFlush(sviewer); CHKERRQ(ierr);
222           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
223           if (view_subdomains) {
224             ierr = PCGASMSubdomainView_Private(pc,d,sviewer); CHKERRQ(ierr);
225           }
226           if (!pc->setupcalled) {
227             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n"); CHKERRQ(ierr);
228           }
229           else {
230             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
231           }
232           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
233           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
234           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
235           ++l;
236         }
237       }
238       ierr = MPI_Barrier(((PetscObject)pc)->comm); CHKERRQ(ierr);
239     }
240     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
241   } else {
242     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
243   }
244   ierr = PetscFree2(permutation,numbering); CHKERRQ(ierr);
245   PetscFunctionReturn(0);
246 }
247 
248 
249 
250 
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "PCSetUp_GASM"
254 static PetscErrorCode PCSetUp_GASM(PC pc)
255 {
256   PC_GASM         *osm  = (PC_GASM*)pc->data;
257   PetscErrorCode ierr;
258   PetscBool      symset,flg;
259   PetscInt       i;
260   PetscMPIInt    rank, size;
261   MatReuse       scall = MAT_REUSE_MATRIX;
262   KSP            ksp;
263   PC             subpc;
264   const char     *prefix,*pprefix;
265   Vec            x,y;
266   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
267   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
268   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
269   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
270   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
271   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
272   PetscScalar    *gxarray, *gyarray;
273   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy --
274                              over the disjoint union of outer subdomains. */
275   DM             *subdomain_dm = PETSC_NULL;
276 
277   PetscFunctionBegin;
278   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
279   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
280   if (!pc->setupcalled) {
281 
282     if (!osm->type_set) {
283       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
284       if (symset && flg) { osm->type = PC_GASM_BASIC; }
285     }
286 
287     /*
288      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
289      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
290      */
291     if (osm->n == PETSC_DECIDE) {
292       /* no subdomains given */
293       /* try pc->dm first */
294       if (pc->dm) {
295         char      ddm_name[1024];
296         DM        ddm;
297         PetscBool flg;
298         PetscInt     num_subdomains, d;
299         char         **subdomain_names;
300         IS           *inner_subdomain_is, *outer_subdomain_is;
301         /* Allow the user to request a decomposition DM by name */
302         ierr = PetscStrncpy(ddm_name, "", 1024); CHKERRQ(ierr);
303         ierr = PetscOptionsString("-pc_gasm_decomposition","Name of the DM defining the decomposition", "PCSetDM",ddm_name,ddm_name,1024,&flg); CHKERRQ(ierr);
304         if (flg) {
305           ierr = DMCreateDomainDecompositionDM(pc->dm, ddm_name, &ddm); CHKERRQ(ierr);
306           if (!ddm) {
307             SETERRQ1(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Uknown DM decomposition name %s", ddm_name);
308           }
309           ierr = PetscInfo(pc,"Using decomposition DM defined using options database\n");CHKERRQ(ierr);
310           ierr = PCSetDM(pc,ddm); CHKERRQ(ierr);
311         }
312         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);    CHKERRQ(ierr);
313         if (num_subdomains) {
314           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
315         }
316         for (d = 0; d < num_subdomains; ++d) {
317           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);       CHKERRQ(ierr);}
318           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);   CHKERRQ(ierr);}
319           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);   CHKERRQ(ierr);}
320         }
321         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
322         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
323         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
324       }
325       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
326         osm->nmax = osm->n = 1;
327         ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
328         osm->N = size;
329       }
330     }
331     if (!osm->iis){
332       /*
333        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
334        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
335        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
336        */
337       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
338     }
339     if (osm->N == PETSC_DECIDE) {
340       PetscInt inwork[2], outwork[2];
341       /* determine global number of subdomains and the max number of local subdomains */
342       inwork[0] = inwork[1] = osm->n;
343       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
344       osm->nmax = outwork[0];
345       osm->N    = outwork[1];
346     }
347 
348     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
349     flg  = PETSC_FALSE;
350     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
351     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
352 
353     if (osm->sort_indices) {
354       for (i=0; i<osm->n; i++) {
355         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
356 	ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
357       }
358     }
359     /*
360      Merge the ISs, create merged vectors and restrictions.
361      */
362     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
363     on = 0;
364     for (i=0; i<osm->n; i++) {
365       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
366       on += oni;
367     }
368     ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr);
369     on = 0;
370     for (i=0; i<osm->n; i++) {
371       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
372       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
373       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
374       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
375       on += oni;
376     }
377     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
378     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
379     ierr = VecCreateMPI(((PetscObject)pc)->comm, on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
380     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
381     ierr = VecGetOwnershipRange(osm->gx, &gofirst, PETSC_NULL);CHKERRQ(ierr);
382     ierr = ISCreateStride(((PetscObject)pc)->comm,on,gofirst,1, &goid);CHKERRQ(ierr);
383     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
384     ierr = VecDestroy(&x); CHKERRQ(ierr);
385     ierr = ISDestroy(&gois); CHKERRQ(ierr);
386     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
387     { PetscInt       ini;     /* Number of indices the i-th a local inner subdomain. */
388       PetscInt       in;      /* Number of indices in the disjoint uniont of local inner subdomains. */
389       PetscInt       *iidx;   /* Global indices in the merged local inner subdomain. */
390       PetscInt       *ioidx;  /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
391       IS             giis;    /* IS for the disjoint union of inner subdomains. */
392       IS             giois;   /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
393       /**/
394       in = 0;
395       for (i=0; i<osm->n; i++) {
396 	ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
397 	in += ini;
398       }
399       ierr = PetscMalloc(in*sizeof(PetscInt), &iidx); CHKERRQ(ierr);
400       ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr);
401       ierr = VecGetOwnershipRange(osm->gx,&gofirst, PETSC_NULL); CHKERRQ(ierr);
402       in = 0;
403       on = 0;
404       for (i=0; i<osm->n; i++) {
405         const PetscInt *iidxi;        /* Global indices of the i-th local inner subdomain. */
406         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
407         PetscInt       *ioidxi;       /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
408         PetscInt       ioni;          /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
409         PetscInt       k;
410 	ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
411 	ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
412 	ierr = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
413 	ierr = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
414         ierr = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
415         ioidxi = ioidx+in;
416         ierr = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
417         ierr = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
418 	ierr = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
419         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
420         for (k = 0; k < ini; ++k) {
421           ioidxi[k] += gofirst+on;
422         }
423 	in += ini;
424         on += oni;
425       }
426       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
427       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
428       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
429       ierr = VecDestroy(&y); CHKERRQ(ierr);
430       ierr = ISDestroy(&giis);  CHKERRQ(ierr);
431       ierr = ISDestroy(&giois); CHKERRQ(ierr);
432     }
433     ierr = ISDestroy(&goid);CHKERRQ(ierr);
434     /* Create the subdomain work vectors. */
435     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
436     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
437     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
438     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
439     for (i=0, on=0; i<osm->n; ++i, on += oni) {
440       PetscInt oNi;
441       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
442       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
443       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
444       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
445     }
446     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
447     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
448     /* Create the local solvers */
449     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
450     for (i=0; i<osm->n; i++) { /* KSPs are local */
451       ierr = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
452       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
453       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
454       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
455       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
456       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
457       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
458       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
459       osm->ksp[i] = ksp;
460     }
461     scall = MAT_INITIAL_MATRIX;
462 
463   }/*if (!pc->setupcalled)*/
464   else {
465     /*
466        Destroy the blocks from the previous iteration
467     */
468     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
469       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
470       scall = MAT_INITIAL_MATRIX;
471     }
472   }
473 
474   /*
475      Extract out the submatrices.
476   */
477   if (size > 1) {
478     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
479   }
480   else {
481     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
482   }
483   if (scall == MAT_INITIAL_MATRIX) {
484     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
485     for (i=0; i<osm->n; i++) {
486       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
487       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
488     }
489   }
490 
491   /* Return control to the user so that the submatrices can be modified (e.g., to apply
492      different boundary conditions for the submatrices than for the global problem) */
493   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
494 
495   /*
496      Loop over submatrices putting them into local ksps
497   */
498   for (i=0; i<osm->n; i++) {
499     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
500     if (!pc->setupcalled) {
501       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
502     }
503   }
504 
505   PetscFunctionReturn(0);
506 }
507 
508 #undef __FUNCT__
509 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
510 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
511 {
512   PC_GASM         *osm = (PC_GASM*)pc->data;
513   PetscErrorCode ierr;
514   PetscInt       i;
515 
516   PetscFunctionBegin;
517   for (i=0; i<osm->n; i++) {
518     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
519   }
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "PCApply_GASM"
525 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
526 {
527   PC_GASM         *osm = (PC_GASM*)pc->data;
528   PetscErrorCode ierr;
529   PetscInt       i;
530   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
531 
532   PetscFunctionBegin;
533   /*
534      Support for limiting the restriction or interpolation only to the inner
535      subdomain values (leaving the other values 0).
536   */
537   if (!(osm->type & PC_GASM_RESTRICT)) {
538     /* have to zero the work RHS since scatter may leave some slots empty */
539     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
540     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
541   }
542   else {
543     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
544   }
545   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
546   if (!(osm->type & PC_GASM_RESTRICT)) {
547     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
548   }
549   else {
550     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
551   }
552   /* do the subdomain solves */
553   for (i=0; i<osm->n; ++i) {
554     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
555   }
556   ierr = VecZeroEntries(y); CHKERRQ(ierr);
557   if (!(osm->type & PC_GASM_INTERPOLATE)) {
558     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
559     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
560   }
561   else {
562     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
563     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
564   }
565 }
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "PCApplyTranspose_GASM"
569 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
570 {
571   PC_GASM         *osm = (PC_GASM*)pc->data;
572   PetscErrorCode ierr;
573   PetscInt       i;
574   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
575 
576   PetscFunctionBegin;
577   /*
578      Support for limiting the restriction or interpolation to only local
579      subdomain values (leaving the other values 0).
580 
581      Note: these are reversed from the PCApply_GASM() because we are applying the
582      transpose of the three terms
583   */
584   if (!(osm->type & PC_GASM_INTERPOLATE)) {
585     /* have to zero the work RHS since scatter may leave some slots empty */
586     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
587     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
588   }
589   else {
590     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
591   }
592   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
593   if (!(osm->type & PC_GASM_INTERPOLATE)) {
594     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
595   }
596   else {
597     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
598   }
599   /* do the local solves */
600   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
601     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
602   }
603   ierr = VecZeroEntries(y); CHKERRQ(ierr);
604   if (!(osm->type & PC_GASM_RESTRICT)) {
605     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
606     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
607   }
608   else {
609     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
610     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
611   }
612 
613   PetscFunctionReturn(0);
614 }
615 
616 #undef __FUNCT__
617 #define __FUNCT__ "PCReset_GASM"
618 static PetscErrorCode PCReset_GASM(PC pc)
619 {
620   PC_GASM        *osm = (PC_GASM*)pc->data;
621   PetscErrorCode ierr;
622   PetscInt       i;
623 
624   PetscFunctionBegin;
625   if (osm->ksp) {
626     for (i=0; i<osm->n; i++) {
627       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
628     }
629   }
630   if (osm->pmat) {
631     if (osm->n > 0) {
632       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
633     }
634   }
635   if (osm->x) {
636     for (i=0; i<osm->n; i++) {
637       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
638       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
639     }
640   }
641   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
642   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
643 
644   ierr = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
645   ierr = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
646   ierr = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
647   osm->ois = 0;
648   osm->iis = 0;
649   PetscFunctionReturn(0);
650 }
651 
652 #undef __FUNCT__
653 #define __FUNCT__ "PCDestroy_GASM"
654 static PetscErrorCode PCDestroy_GASM(PC pc)
655 {
656   PC_GASM         *osm = (PC_GASM*)pc->data;
657   PetscErrorCode ierr;
658   PetscInt       i;
659 
660   PetscFunctionBegin;
661   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
662   if (osm->ksp) {
663     for (i=0; i<osm->n; i++) {
664       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
665     }
666     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
667   }
668   ierr = PetscFree(osm->x);CHKERRQ(ierr);
669   ierr = PetscFree(osm->y);CHKERRQ(ierr);
670   ierr = PetscFree(pc->data);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 
674 #undef __FUNCT__
675 #define __FUNCT__ "PCSetFromOptions_GASM"
676 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
677   PC_GASM         *osm = (PC_GASM*)pc->data;
678   PetscErrorCode ierr;
679   PetscInt       blocks,ovl;
680   PetscBool      symset,flg;
681   PCGASMType      gasmtype;
682 
683   PetscFunctionBegin;
684   /* set the type to symmetric if matrix is symmetric */
685   if (!osm->type_set && pc->pmat) {
686     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
687     if (symset && flg) { osm->type = PC_GASM_BASIC; }
688   }
689   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
690     ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
691     osm->create_local = PETSC_TRUE;
692     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,&flg);CHKERRQ(ierr);
693     if (!osm->create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No support for autocreation of nonlocal subdomains yet.");
694 
695     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr); }
696     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
697     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
698     flg  = PETSC_FALSE;
699     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
700     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
701   ierr = PetscOptionsTail();CHKERRQ(ierr);
702   PetscFunctionReturn(0);
703 }
704 
705 /*------------------------------------------------------------------------------------*/
706 
707 EXTERN_C_BEGIN
708 #undef __FUNCT__
709 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
710 PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
711 {
712   PC_GASM         *osm = (PC_GASM*)pc->data;
713   PetscErrorCode ierr;
714   PetscInt       i;
715 
716   PetscFunctionBegin;
717   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
718   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
719 
720   if (!pc->setupcalled) {
721     osm->n       = n;
722     osm->ois     = 0;
723     osm->iis     = 0;
724     if (ois) {
725       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
726     }
727     if (iis) {
728       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
729     }
730     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
731     if (ois) {
732       ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
733       for (i=0; i<n; i++) { osm->ois[i] = ois[i]; }
734       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
735       osm->overlap = -1;
736       if (!iis) {
737         ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
738         for (i=0; i<n; i++) {
739           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
740           osm->iis[i] = ois[i];
741         }
742       }
743     }
744     if (iis) {
745       ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
746       for (i=0; i<n; i++) { osm->iis[i] = iis[i]; }
747       if (!ois) {
748         ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
749         for (i=0; i<n; i++) {
750           for (i=0; i<n; i++) {
751             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
752             osm->ois[i] = iis[i];
753           }
754         }
755         if (osm->overlap > 0) {
756           /* Extend the "overlapping" regions by a number of steps */
757           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
758         }
759       }
760     }
761   }
762   PetscFunctionReturn(0);
763 }
764 EXTERN_C_END
765 
766 EXTERN_C_BEGIN
767 #undef __FUNCT__
768 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
769 PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local) {
770   PC_GASM         *osm = (PC_GASM*)pc->data;
771   PetscErrorCode ierr;
772   PetscMPIInt    rank,size;
773   PetscInt       n;
774   PetscInt       Nmin, Nmax;
775   PetscFunctionBegin;
776   if (!create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
777   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
778   ierr = MPI_Allreduce(&N,&Nmin,1,MPI_INT,MPI_MIN,((PetscObject)pc)->comm); CHKERRQ(ierr);
779   ierr = MPI_Allreduce(&N,&Nmax,1,MPI_INT,MPI_MAX,((PetscObject)pc)->comm); CHKERRQ(ierr);
780   if (Nmin != Nmax)
781     SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
782 
783   osm->create_local = create_local;
784   /*
785      Split the subdomains equally among all processors
786   */
787   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
788   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
789   n = N/size + ((N % size) > rank);
790   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
791   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
792   if (!pc->setupcalled) {
793     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
794     osm->N            = N;
795     osm->n            = n;
796     osm->nmax         = N/size + ((N%size)?1:0);
797     osm->ois           = 0;
798     osm->iis     = 0;
799   }
800   PetscFunctionReturn(0);
801 }
802 EXTERN_C_END
803 
804 EXTERN_C_BEGIN
805 #undef __FUNCT__
806 #define __FUNCT__ "PCGASMSetOverlap_GASM"
807 PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
808 {
809   PC_GASM *osm = (PC_GASM*)pc->data;
810 
811   PetscFunctionBegin;
812   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
813   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
814   if (!pc->setupcalled) {
815     osm->overlap = ovl;
816   }
817   PetscFunctionReturn(0);
818 }
819 EXTERN_C_END
820 
821 EXTERN_C_BEGIN
822 #undef __FUNCT__
823 #define __FUNCT__ "PCGASMSetType_GASM"
824 PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
825 {
826   PC_GASM *osm = (PC_GASM*)pc->data;
827 
828   PetscFunctionBegin;
829   osm->type     = type;
830   osm->type_set = PETSC_TRUE;
831   PetscFunctionReturn(0);
832 }
833 EXTERN_C_END
834 
835 EXTERN_C_BEGIN
836 #undef __FUNCT__
837 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
838 PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
839 {
840   PC_GASM *osm = (PC_GASM*)pc->data;
841 
842   PetscFunctionBegin;
843   osm->sort_indices = doSort;
844   PetscFunctionReturn(0);
845 }
846 EXTERN_C_END
847 
848 EXTERN_C_BEGIN
849 #undef __FUNCT__
850 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
851 /*
852    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
853         In particular, it would upset the global subdomain number calculation.
854 */
855 PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
856 {
857   PC_GASM         *osm = (PC_GASM*)pc->data;
858   PetscErrorCode ierr;
859 
860   PetscFunctionBegin;
861   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
862 
863   if (n) {
864     *n = osm->n;
865   }
866   if (first) {
867     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
868     *first -= osm->n;
869   }
870   if (ksp) {
871     /* Assume that local solves are now different; not necessarily
872        true, though!  This flag is used only for PCView_GASM() */
873     *ksp                   = osm->ksp;
874     osm->same_subdomain_solvers = PETSC_FALSE;
875   }
876   PetscFunctionReturn(0);
877 }/* PCGASMGetSubKSP_GASM() */
878 EXTERN_C_END
879 
880 
881 #undef __FUNCT__
882 #define __FUNCT__ "PCGASMSetSubdomains"
883 /*@C
884     PCGASMSetSubdomains - Sets the subdomains for this processor
885     for the additive Schwarz preconditioner.
886 
887     Collective on PC
888 
889     Input Parameters:
890 +   pc  - the preconditioner context
891 .   n   - the number of subdomains for this processor
892 .   iis - the index sets that define this processor's local inner subdomains
893          (or PETSC_NULL for PETSc to determine subdomains)
894 -   ois- the index sets that define this processor's local outer subdomains
895          (or PETSC_NULL to use the same as iis)
896 
897     Notes:
898     The IS indices use the parallel, global numbering of the vector entries.
899     Inner subdomains are those where the correction is applied.
900     Outer subdomains are those where the residual necessary to obtain the
901     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
902     Both inner and outer subdomains can extend over several processors.
903     This processor's portion of a subdomain is known as a local subdomain.
904 
905     By default the GASM preconditioner uses 1 (local) subdomain per processor.
906     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
907     all processors that PCGASM will create automatically, and to specify whether
908     they should be local or not.
909 
910 
911     Level: advanced
912 
913 .keywords: PC, GASM, set, subdomains, additive Schwarz
914 
915 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
916           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
917 @*/
918 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
919 {
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
924   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "PCGASMSetTotalSubdomains"
930 /*@C
931     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
932     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
933     communicator. Either all or no processors in the PC communicator must call this routine with
934     the same N.  The subdomains will be created automatically during PCSetUp().
935 
936     Collective on PC
937 
938     Input Parameters:
939 +   pc           - the preconditioner context
940 .   N            - the total number of subdomains cumulative across all processors
941 -   create_local - whether the subdomains to be created are to be local
942 
943     Options Database Key:
944     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
945 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
946 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
947 
948     By default the GASM preconditioner uses 1 subdomain per processor.
949 
950 
951     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
952     of subdomains per processor.
953 
954     Level: advanced
955 
956 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
957 
958 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
959           PCGASMCreateSubdomains2D()
960 @*/
961 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
962 {
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin;
966   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
967   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
968   PetscFunctionReturn(0);
969 }
970 
971 #undef __FUNCT__
972 #define __FUNCT__ "PCGASMSetOverlap"
973 /*@
974     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
975     additive Schwarz preconditioner.  Either all or no processors in the
976     PC communicator must call this routine.
977 
978     Logically Collective on PC
979 
980     Input Parameters:
981 +   pc  - the preconditioner context
982 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
983 
984     Options Database Key:
985 .   -pc_gasm_overlap <overlap> - Sets overlap
986 
987     Notes:
988     By default the GASM preconditioner uses 1 subdomain per processor.  To use
989     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
990     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
991 
992     The overlap defaults to 1, so if one desires that no additional
993     overlap be computed beyond what may have been set with a call to
994     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
995     must be set to be 0.  In particular, if one does not explicitly set
996     the subdomains in application code, then all overlap would be computed
997     internally by PETSc, and using an overlap of 0 would result in an GASM
998     variant that is equivalent to the block Jacobi preconditioner.
999 
1000     Note that one can define initial index sets with any overlap via
1001     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
1002     PETSc to extend that overlap further, if desired.
1003 
1004     Level: intermediate
1005 
1006 .keywords: PC, GASM, set, overlap
1007 
1008 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1009           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
1010 @*/
1011 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
1012 {
1013   PetscErrorCode ierr;
1014 
1015   PetscFunctionBegin;
1016   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1017   PetscValidLogicalCollectiveInt(pc,ovl,2);
1018   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 #undef __FUNCT__
1023 #define __FUNCT__ "PCGASMSetType"
1024 /*@
1025     PCGASMSetType - Sets the type of restriction and interpolation used
1026     for local problems in the additive Schwarz method.
1027 
1028     Logically Collective on PC
1029 
1030     Input Parameters:
1031 +   pc  - the preconditioner context
1032 -   type - variant of GASM, one of
1033 .vb
1034       PC_GASM_BASIC       - full interpolation and restriction
1035       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1036       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1037       PC_GASM_NONE        - local processor restriction and interpolation
1038 .ve
1039 
1040     Options Database Key:
1041 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1042 
1043     Level: intermediate
1044 
1045 .keywords: PC, GASM, set, type
1046 
1047 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1048           PCGASMCreateSubdomains2D()
1049 @*/
1050 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1051 {
1052   PetscErrorCode ierr;
1053 
1054   PetscFunctionBegin;
1055   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1056   PetscValidLogicalCollectiveEnum(pc,type,2);
1057   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 #undef __FUNCT__
1062 #define __FUNCT__ "PCGASMSetSortIndices"
1063 /*@
1064     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1065 
1066     Logically Collective on PC
1067 
1068     Input Parameters:
1069 +   pc  - the preconditioner context
1070 -   doSort - sort the subdomain indices
1071 
1072     Level: intermediate
1073 
1074 .keywords: PC, GASM, set, type
1075 
1076 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1077           PCGASMCreateSubdomains2D()
1078 @*/
1079 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool  doSort)
1080 {
1081   PetscErrorCode ierr;
1082 
1083   PetscFunctionBegin;
1084   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1085   PetscValidLogicalCollectiveBool(pc,doSort,2);
1086   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1087   PetscFunctionReturn(0);
1088 }
1089 
1090 #undef __FUNCT__
1091 #define __FUNCT__ "PCGASMGetSubKSP"
1092 /*@C
1093    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1094    this processor.
1095 
1096    Collective on PC iff first_local is requested
1097 
1098    Input Parameter:
1099 .  pc - the preconditioner context
1100 
1101    Output Parameters:
1102 +  n_local - the number of blocks on this processor or PETSC_NULL
1103 .  first_local - the global number of the first block on this processor or PETSC_NULL,
1104                  all processors must request or all must pass PETSC_NULL
1105 -  ksp - the array of KSP contexts
1106 
1107    Note:
1108    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1109 
1110    Currently for some matrix implementations only 1 block per processor
1111    is supported.
1112 
1113    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1114 
1115    Level: advanced
1116 
1117 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1118 
1119 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1120           PCGASMCreateSubdomains2D(),
1121 @*/
1122 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1123 {
1124   PetscErrorCode ierr;
1125 
1126   PetscFunctionBegin;
1127   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1128   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1129   PetscFunctionReturn(0);
1130 }
1131 
1132 /* -------------------------------------------------------------------------------------*/
1133 /*MC
1134    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1135            its own KSP object.
1136 
1137    Options Database Keys:
1138 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1139 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1140 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1141 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1142 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1143 
1144      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1145       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1146       -pc_gasm_type basic to use the standard GASM.
1147 
1148    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1149      than one processor. Defaults to one block per processor.
1150 
1151      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1152         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1153 
1154      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1155          and set the options directly on the resulting KSP object (you can access its PC
1156          with KSPGetPC())
1157 
1158 
1159    Level: beginner
1160 
1161    Concepts: additive Schwarz method
1162 
1163     References:
1164     An additive variant of the Schwarz alternating method for the case of many subregions
1165     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1166 
1167     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1168     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1169 
1170 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1171            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1172            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1173 
1174 M*/
1175 
1176 EXTERN_C_BEGIN
1177 #undef __FUNCT__
1178 #define __FUNCT__ "PCCreate_GASM"
1179 PetscErrorCode  PCCreate_GASM(PC pc)
1180 {
1181   PetscErrorCode ierr;
1182   PC_GASM         *osm;
1183 
1184   PetscFunctionBegin;
1185   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
1186   osm->N                 = PETSC_DECIDE;
1187   osm->n                 = PETSC_DECIDE;
1188   osm->nmax              = 0;
1189   osm->overlap           = 1;
1190   osm->ksp               = 0;
1191   osm->gorestriction     = 0;
1192   osm->girestriction     = 0;
1193   osm->gx                = 0;
1194   osm->gy                = 0;
1195   osm->x                 = 0;
1196   osm->y                 = 0;
1197   osm->ois               = 0;
1198   osm->iis               = 0;
1199   osm->pmat              = 0;
1200   osm->type              = PC_GASM_RESTRICT;
1201   osm->same_subdomain_solvers = PETSC_TRUE;
1202   osm->sort_indices           = PETSC_TRUE;
1203 
1204   pc->data                   = (void*)osm;
1205   pc->ops->apply             = PCApply_GASM;
1206   pc->ops->applytranspose    = PCApplyTranspose_GASM;
1207   pc->ops->setup             = PCSetUp_GASM;
1208   pc->ops->reset             = PCReset_GASM;
1209   pc->ops->destroy           = PCDestroy_GASM;
1210   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
1211   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
1212   pc->ops->view              = PCView_GASM;
1213   pc->ops->applyrichardson   = 0;
1214 
1215   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM",
1216                     PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1217   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1218                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1219   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1220                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1221   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1222                     PCGASMSetType_GASM);CHKERRQ(ierr);
1223   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1224                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1225   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1226                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1227   PetscFunctionReturn(0);
1228 }
1229 EXTERN_C_END
1230 
1231 
1232 #undef __FUNCT__
1233 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1234 /*@C
1235    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1236    Schwarz preconditioner for a any problem based on its matrix.
1237 
1238    Collective
1239 
1240    Input Parameters:
1241 +  A       - The global matrix operator
1242 .  overlap - amount of overlap in outer subdomains
1243 -  n       - the number of local subdomains
1244 
1245    Output Parameters:
1246 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1247 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1248 
1249    Level: advanced
1250 
1251    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1252          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1253          nonzero overlap with PCGASMSetOverlap()
1254 
1255     In the Fortran version you must provide the array outis[] already allocated of length n.
1256 
1257 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1258 
1259 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1260 @*/
1261 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS* iis[], IS* ois[])
1262 {
1263   MatPartitioning           mpart;
1264   const char                *prefix;
1265   PetscErrorCode            (*f)(Mat,MatReuse,Mat*);
1266   PetscMPIInt               size;
1267   PetscInt                  i,j,rstart,rend,bs;
1268   PetscBool                 isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1269   Mat                       Ad = PETSC_NULL, adj;
1270   IS                        ispart,isnumb,*is;
1271   PetscErrorCode            ierr;
1272 
1273   PetscFunctionBegin;
1274   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1275   PetscValidPointer(iis,4);
1276   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1277 
1278   /* Get prefix, row distribution, and block size */
1279   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1280   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1281   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1282   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1283 
1284   /* Get diagonal block from matrix if possible */
1285   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1286   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1287   if (f) {
1288     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1289   } else if (size == 1) {
1290     Ad = A;
1291   }
1292   if (Ad) {
1293     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1294     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1295   }
1296   if (Ad && n > 1) {
1297     PetscBool  match,done;
1298     /* Try to setup a good matrix partitioning if available */
1299     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1300     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1301     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1302     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1303     if (!match) {
1304       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1305     }
1306     if (!match) { /* assume a "good" partitioner is available */
1307       PetscInt na;
1308       const PetscInt *ia,*ja;
1309       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1310       if (done) {
1311         /* Build adjacency matrix by hand. Unfortunately a call to
1312            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1313            remove the block-aij structure and we cannot expect
1314            MatPartitioning to split vertices as we need */
1315         PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1316         const PetscInt *row;
1317         nnz = 0;
1318         for (i=0; i<na; i++) { /* count number of nonzeros */
1319           len = ia[i+1] - ia[i];
1320           row = ja + ia[i];
1321           for (j=0; j<len; j++) {
1322             if (row[j] == i) { /* don't count diagonal */
1323               len--; break;
1324             }
1325           }
1326           nnz += len;
1327         }
1328         ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1329         ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1330         nnz    = 0;
1331         iia[0] = 0;
1332         for (i=0; i<na; i++) { /* fill adjacency */
1333           cnt = 0;
1334           len = ia[i+1] - ia[i];
1335           row = ja + ia[i];
1336           for (j=0; j<len; j++) {
1337             if (row[j] != i) { /* if not diagonal */
1338               jja[nnz+cnt++] = row[j];
1339             }
1340           }
1341           nnz += cnt;
1342           iia[i+1] = nnz;
1343         }
1344         /* Partitioning of the adjacency matrix */
1345         ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1346         ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1347         ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1348         ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1349         ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1350         ierr = MatDestroy(&adj);CHKERRQ(ierr);
1351         foundpart = PETSC_TRUE;
1352       }
1353       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1354     }
1355     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1356   }
1357   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1358   if (!foundpart) {
1359 
1360     /* Partitioning by contiguous chunks of rows */
1361 
1362     PetscInt mbs   = (rend-rstart)/bs;
1363     PetscInt start = rstart;
1364     for (i=0; i<n; i++) {
1365       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1366       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1367       start += count;
1368     }
1369 
1370   } else {
1371 
1372     /* Partitioning by adjacency of diagonal block  */
1373 
1374     const PetscInt *numbering;
1375     PetscInt       *count,nidx,*indices,*newidx,start=0;
1376     /* Get node count in each partition */
1377     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1378     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1379     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1380       for (i=0; i<n; i++) count[i] *= bs;
1381     }
1382     /* Build indices from node numbering */
1383     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1384     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1385     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1386     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1387     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1388     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1389     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1390       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1391       for (i=0; i<nidx; i++)
1392         for (j=0; j<bs; j++)
1393           newidx[i*bs+j] = indices[i]*bs + j;
1394       ierr = PetscFree(indices);CHKERRQ(ierr);
1395       nidx   *= bs;
1396       indices = newidx;
1397     }
1398     /* Shift to get global indices */
1399     for (i=0; i<nidx; i++) indices[i] += rstart;
1400 
1401     /* Build the index sets for each block */
1402     for (i=0; i<n; i++) {
1403       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1404       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1405       start += count[i];
1406     }
1407 
1408     ierr = PetscFree(count);
1409     ierr = PetscFree(indices);
1410     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1411     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1412   }
1413   *iis = is;
1414   if (!ois) PetscFunctionReturn(0);
1415   /*
1416    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1417    has been requested, copy the inner subdomains over so they can be modified.
1418    */
1419   ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr);
1420   for (i=0; i<n; ++i) {
1421     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1422       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1423       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1424     } else {
1425       ierr = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1426       (*ois)[i] = (*iis)[i];
1427     }
1428   }
1429   if (overlap > 0) {
1430     /* Extend the "overlapping" regions by a number of steps */
1431     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1432   }
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 #undef __FUNCT__
1437 #define __FUNCT__ "PCGASMDestroySubdomains"
1438 /*@C
1439    PCGASMDestroySubdomains - Destroys the index sets created with
1440    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1441    called after setting subdomains with PCGASMSetSubdomains().
1442 
1443    Collective
1444 
1445    Input Parameters:
1446 +  n   - the number of index sets
1447 .  iis - the array of inner subdomains,
1448 -  ois - the array of outer subdomains, can be PETSC_NULL
1449 
1450    Level: intermediate
1451 
1452    Notes: this is merely a convenience subroutine that walks each list,
1453    destroys each IS on the list, and then frees the list.
1454 
1455 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1456 
1457 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1458 @*/
1459 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1460 {
1461   PetscInt       i;
1462   PetscErrorCode ierr;
1463   PetscFunctionBegin;
1464   if (n <= 0) PetscFunctionReturn(0);
1465   if (iis) {
1466     PetscValidPointer(iis,2);
1467     for (i=0; i<n; i++) {
1468       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1469     }
1470     ierr = PetscFree(iis);CHKERRQ(ierr);
1471   }
1472   if (ois) {
1473     for (i=0; i<n; i++) {
1474       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1475     }
1476     ierr = PetscFree(ois);CHKERRQ(ierr);
1477   }
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 
1482 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1483 {                                                                                                       \
1484  PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1485   /*                                                                                                    \
1486    Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1487    of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1488    subdomain).                                                                                          \
1489    Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1490    of the intersection.                                                                                 \
1491   */                                                                                                    \
1492   /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1493   *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1494   /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1495   *xleft_loc = *ylow_loc==first_row?PetscMax(first%M,xleft):xleft;                                                                            \
1496   /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1497   *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1498   /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1499   *xright_loc = *yhigh_loc==last_row?PetscMin(xright,last%M):xright;                                                                          \
1500   /* Now compute the size of the local subdomain n. */ \
1501   *n = 0;                                               \
1502   if (*ylow_loc < *yhigh_loc) {                           \
1503     PetscInt width = xright-xleft;                     \
1504     *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1505     *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1506     *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1507   }\
1508 }
1509 
1510 
1511 
1512 #undef __FUNCT__
1513 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1514 /*@
1515    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1516    preconditioner for a two-dimensional problem on a regular grid.
1517 
1518    Collective
1519 
1520    Input Parameters:
1521 +  M, N               - the global number of grid points in the x and y directions
1522 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1523 .  dof                - degrees of freedom per node
1524 -  overlap            - overlap in mesh lines
1525 
1526    Output Parameters:
1527 +  Nsub - the number of local subdomains created
1528 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1529 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1530 
1531 
1532    Level: advanced
1533 
1534 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1535 
1536 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1537           PCGASMSetOverlap()
1538 @*/
1539 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1540 {
1541   PetscErrorCode ierr;
1542   PetscMPIInt    size, rank;
1543   PetscInt       i, j;
1544   PetscInt       maxheight, maxwidth;
1545   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1546   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1547   PetscInt       x[2][2], y[2][2], n[2];
1548   PetscInt       first, last;
1549   PetscInt       nidx, *idx;
1550   PetscInt       ii,jj,s,q,d;
1551   PetscInt       k,kk;
1552   PetscMPIInt    color;
1553   MPI_Comm       comm, subcomm;
1554   IS             **xis = 0, **is = ois, **is_local = iis;
1555 
1556   PetscFunctionBegin;
1557   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1558   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1559   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1560   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1561   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1562 	     "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1563 
1564   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1565   s = 0;
1566   ystart = 0;
1567   for (j=0; j<Ndomains; ++j) {
1568     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1569     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1570     /* Vertical domain limits with an overlap. */
1571     ylow = PetscMax(ystart - overlap,0);
1572     yhigh = PetscMin(ystart + maxheight + overlap,N);
1573     xstart = 0;
1574     for (i=0; i<Mdomains; ++i) {
1575       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1576       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1577       /* Horizontal domain limits with an overlap. */
1578       xleft   = PetscMax(xstart - overlap,0);
1579       xright  = PetscMin(xstart + maxwidth + overlap,M);
1580       /*
1581 	 Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1582       */
1583       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1584       if (nidx) {
1585         ++s;
1586       }
1587       xstart += maxwidth;
1588     }/* for (i = 0; i < Mdomains; ++i) */
1589     ystart += maxheight;
1590   }/* for (j = 0; j < Ndomains; ++j) */
1591   /* Now we can allocate the necessary number of ISs. */
1592   *nsub = s;
1593   ierr = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1594   ierr = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1595   s = 0;
1596   ystart = 0;
1597   for (j=0; j<Ndomains; ++j) {
1598     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1599     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1600     /* Vertical domain limits with an overlap. */
1601     y[0][0] = PetscMax(ystart - overlap,0);
1602     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1603     /* Vertical domain limits without an overlap. */
1604     y[1][0] = ystart;
1605     y[1][1] = PetscMin(ystart + maxheight,N);
1606     xstart = 0;
1607     for (i=0; i<Mdomains; ++i) {
1608       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1609       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1610       /* Horizontal domain limits with an overlap. */
1611       x[0][0]  = PetscMax(xstart - overlap,0);
1612       x[0][1]  = PetscMin(xstart + maxwidth + overlap,M);
1613       /* Horizontal domain limits without an overlap. */
1614       x[1][0] = xstart;
1615       x[1][1] = PetscMin(xstart+maxwidth,M);
1616       /*
1617 	 Determine whether this domain intersects this processor's ownership range of pc->pmat.
1618 	 Do this twice: first for the domains with overlaps, and once without.
1619 	 During the first pass create the subcommunicators, and use them on the second pass as well.
1620       */
1621       for (q = 0; q < 2; ++q) {
1622 	/*
1623 	  domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1624 	  according to whether the domain with an overlap or without is considered.
1625 	*/
1626 	xleft = x[q][0]; xright = x[q][1];
1627 	ylow  = y[q][0]; yhigh  = y[q][1];
1628         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1629 	nidx *= dof;
1630         n[q] = nidx;
1631         /*
1632          Based on the counted number of indices in the local domain *with an overlap*,
1633          construct a subcommunicator of all the processors supporting this domain.
1634          Observe that a domain with an overlap might have nontrivial local support,
1635          while the domain without an overlap might not.  Hence, the decision to participate
1636          in the subcommunicator must be based on the domain with an overlap.
1637          */
1638 	if (q == 0) {
1639 	  if (nidx) {
1640 	    color = 1;
1641 	  } else {
1642 	    color = MPI_UNDEFINED;
1643 	  }
1644 	  ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1645 	}
1646         /*
1647          Proceed only if the number of local indices *with an overlap* is nonzero.
1648          */
1649         if (n[0]) {
1650           if (q == 0) {
1651             xis = is;
1652           }
1653           if (q == 1) {
1654             /*
1655              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1656              Moreover, if the overlap is zero, the two ISs are identical.
1657              */
1658             if (overlap == 0) {
1659               (*is_local)[s] = (*is)[s];
1660               ierr = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1661               continue;
1662             } else {
1663               xis = is_local;
1664               subcomm = ((PetscObject)(*is)[s])->comm;
1665             }
1666           }/* if (q == 1) */
1667           idx = PETSC_NULL;
1668 	  ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1669           if (nidx) {
1670             k    = 0;
1671             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1672               PetscInt x0 = (jj==ylow_loc)?xleft_loc:xleft;
1673               PetscInt x1 = (jj==yhigh_loc-1)?xright_loc:xright;
1674               kk = dof*(M*jj + x0);
1675               for (ii=x0; ii<x1; ++ii) {
1676                 for (d = 0; d < dof; ++d) {
1677                   idx[k++] = kk++;
1678                 }
1679               }
1680             }
1681           }
1682 	  ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1683 	}/* if (n[0]) */
1684       }/* for (q = 0; q < 2; ++q) */
1685       if (n[0]) {
1686         ++s;
1687       }
1688       xstart += maxwidth;
1689     }/* for (i = 0; i < Mdomains; ++i) */
1690     ystart += maxheight;
1691   }/* for (j = 0; j < Ndomains; ++j) */
1692   PetscFunctionReturn(0);
1693 }
1694 
1695 #undef __FUNCT__
1696 #define __FUNCT__ "PCGASMGetSubdomains"
1697 /*@C
1698     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1699     for the additive Schwarz preconditioner.
1700 
1701     Not Collective
1702 
1703     Input Parameter:
1704 .   pc - the preconditioner context
1705 
1706     Output Parameters:
1707 +   n   - the number of subdomains for this processor (default value = 1)
1708 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be PETSC_NULL)
1709 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be PETSC_NULL)
1710 
1711 
1712     Notes:
1713     The user is responsible for destroying the ISs and freeing the returned arrays.
1714     The IS numbering is in the parallel, global numbering of the vector.
1715 
1716     Level: advanced
1717 
1718 .keywords: PC, GASM, get, subdomains, additive Schwarz
1719 
1720 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1721           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1722 @*/
1723 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1724 {
1725   PC_GASM         *osm;
1726   PetscErrorCode ierr;
1727   PetscBool      match;
1728   PetscInt       i;
1729   PetscFunctionBegin;
1730   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1731   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1732   if (!match)
1733     SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1734   osm = (PC_GASM*)pc->data;
1735   if (n)  *n  = osm->n;
1736   if (iis) {
1737     ierr = PetscMalloc(osm->n*sizeof(IS), iis); CHKERRQ(ierr);
1738   }
1739   if (ois) {
1740     ierr = PetscMalloc(osm->n*sizeof(IS), ois); CHKERRQ(ierr);
1741   }
1742   if (iis || ois) {
1743     for (i = 0; i < osm->n; ++i) {
1744       if (iis) (*iis)[i] = osm->iis[i];
1745       if (ois) (*ois)[i] = osm->ois[i];
1746     }
1747   }
1748   PetscFunctionReturn(0);
1749 }
1750 
1751 #undef __FUNCT__
1752 #define __FUNCT__ "PCGASMGetSubmatrices"
1753 /*@C
1754     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1755     only) for the additive Schwarz preconditioner.
1756 
1757     Not Collective
1758 
1759     Input Parameter:
1760 .   pc - the preconditioner context
1761 
1762     Output Parameters:
1763 +   n   - the number of matrices for this processor (default value = 1)
1764 -   mat - the matrices
1765 
1766     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1767            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1768            subdomains were defined using PCGASMSetTotalSubdomains().
1769     Level: advanced
1770 
1771 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1772 
1773 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1774           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1775 @*/
1776 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1777 {
1778   PC_GASM         *osm;
1779   PetscErrorCode ierr;
1780   PetscBool      match;
1781 
1782   PetscFunctionBegin;
1783   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1784   PetscValidIntPointer(n,2);
1785   if (mat) PetscValidPointer(mat,3);
1786   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1787   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1788   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1789   osm = (PC_GASM*)pc->data;
1790   if (n)   *n   = osm->n;
1791   if (mat) *mat = osm->pmat;
1792 
1793   PetscFunctionReturn(0);
1794 }
1795