xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision 3bf036e263fd78807e2931ff42d9ddcd8aae3fd4)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 
13 typedef struct {
14   PetscInt   N,n,nmax;
15   PetscInt   overlap;             /* overlap requested by user */
16   KSP        *ksp;                /* linear solvers for each block */
17   Vec        gx,gy;               /* Merged work vectors */
18   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
19   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
20   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
21   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
22   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
23   Mat        *pmat;               /* subdomain block matrices */
24   PCGASMType type;                /* use reduced interpolation, restriction or both */
25   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
26   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
27   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
28   PetscBool  sort_indices;           /* flag to sort subdomain indices */
29 } PC_GASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCGASMSubdomainView_Private"
33 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
34 {
35   PC_GASM        *osm  = (PC_GASM*)pc->data;
36   PetscInt       j,nidx;
37   const PetscInt *idx;
38   PetscViewer    sviewer;
39   char           *cidx;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   if (i < 0 || i > osm->n) SETERRQ2(((PetscObject)viewer)->comm, PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
44   /* Inner subdomains. */
45   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
46   /*
47    No more than 15 characters per index plus a space.
48    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
49    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
50    For nidx == 0, the whole string 16 '\0'.
51    */
52   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
53   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
54   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
55   for (j = 0; j < nidx; ++j) {
56     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
57   }
58   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
59   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
60   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
61   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
63   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
64   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
66   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
67   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68   ierr = PetscFree(cidx);CHKERRQ(ierr);
69   /* Outer subdomains. */
70   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
71   /*
72    No more than 15 characters per index plus a space.
73    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
74    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
75    For nidx == 0, the whole string 16 '\0'.
76    */
77   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
78   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
79   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
80   for (j = 0; j < nidx; ++j) {
81     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
82   }
83   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
84   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
85   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
86   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
87   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
88   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
89   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
91   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
92   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
93   ierr = PetscFree(cidx);CHKERRQ(ierr);
94 
95   PetscFunctionReturn(0);
96 }
97 
98 #undef __FUNCT__
99 #define __FUNCT__ "PCGASMPrintSubdomains"
100 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
101 {
102   PC_GASM        *osm  = (PC_GASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
106   PetscBool      found;
107   PetscViewer    viewer, sviewer = PETSC_NULL;
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
112   for (i = 0; i < osm->n; ++i) permutation[i] = i;
113   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
114   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
117   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
119   /*
120    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
121    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
122   */
123   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
124   l = 0;
125   for (count = 0; count < gcount; ++count) {
126     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
127     if (l<osm->n){
128       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
129       if (numbering[d] == count) {
130         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
131         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
132         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
133         ++l;
134       }
135     }
136     ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
137   }
138   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
139   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "PCView_GASM"
146 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
147 {
148   PC_GASM        *osm = (PC_GASM*)pc->data;
149   const char     *prefix;
150   PetscErrorCode ierr;
151   PetscMPIInt    rank, size;
152   PetscInt       i,bsz;
153   PetscBool      iascii,view_subdomains=PETSC_FALSE;
154   PetscViewer    sviewer;
155   PetscInt       count, l, gcount, *numbering, *permutation;
156   char overlap[256]     = "user-defined overlap";
157   char gsubdomains[256] = "unknown total number of subdomains";
158   char lsubdomains[256] = "unknown number of local  subdomains";
159   char msubdomains[256] = "unknown max number of local subdomains";
160   PetscFunctionBegin;
161   ierr = MPI_Comm_size(((PetscObject)pc)->comm, &size);CHKERRQ(ierr);
162   ierr = MPI_Comm_rank(((PetscObject)pc)->comm, &rank);CHKERRQ(ierr);
163 
164 
165   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
166   for (i = 0; i < osm->n; ++i) permutation[i] = i;
167   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
168   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
169 
170   if (osm->overlap >= 0) {
171     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
172   }
173   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
174   if (osm->N > 0) {
175     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
176   }
177   if (osm->nmax > 0){
178     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
179   }
180 
181   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
182   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,PETSC_NULL);CHKERRQ(ierr);
183 
184   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
185   if (iascii) {
186     /*
187      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
188      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
189      collectively on the comm.
190      */
191     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
197     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
198     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr);
200     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
201     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
202     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
203     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
204     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
205     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
206     /* Make sure that everybody waits for the banner to be printed. */
207     ierr = MPI_Barrier(((PetscObject)viewer)->comm);CHKERRQ(ierr);
208     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
209     l = 0;
210     for (count = 0; count < gcount; ++count) {
211       PetscMPIInt srank, ssize;
212       if (l<osm->n){
213         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
214         if (numbering[d] == count) {
215           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
216           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
217           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
218           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
219           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr);
221           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
222           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
223           if (view_subdomains) {
224             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
225           }
226           if (!pc->setupcalled) {
227             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
228           }
229           else {
230             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
231           }
232           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
233           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
234           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
235           ++l;
236         }
237       }
238       ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
239     }
240     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
241   } else {
242     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
243   }
244   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
245   PetscFunctionReturn(0);
246 }
247 
248 
249 
250 
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "PCSetUp_GASM"
254 static PetscErrorCode PCSetUp_GASM(PC pc)
255 {
256   PC_GASM         *osm  = (PC_GASM*)pc->data;
257   PetscErrorCode ierr;
258   PetscBool      symset,flg;
259   PetscInt       i;
260   PetscMPIInt    rank, size;
261   MatReuse       scall = MAT_REUSE_MATRIX;
262   KSP            ksp;
263   PC             subpc;
264   const char     *prefix,*pprefix;
265   Vec            x,y;
266   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
267   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
268   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
269   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
270   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
271   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
272   PetscScalar    *gxarray, *gyarray;
273   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy --
274                              over the disjoint union of outer subdomains. */
275   DM             *subdomain_dm = PETSC_NULL;
276 
277   PetscFunctionBegin;
278   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
279   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
280   if (!pc->setupcalled) {
281 
282     if (!osm->type_set) {
283       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
284       if (symset && flg) { osm->type = PC_GASM_BASIC; }
285     }
286 
287     /*
288      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
289      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
290      */
291     if (osm->n == PETSC_DECIDE) {
292       /* no subdomains given */
293       /* try pc->dm first */
294       if (pc->dm) {
295         char      ddm_name[1024];
296         DM        ddm;
297         PetscBool flg;
298         PetscInt     num_subdomains, d;
299         char         **subdomain_names;
300         IS           *inner_subdomain_is, *outer_subdomain_is;
301         /* Allow the user to request a decomposition DM by name */
302         ierr = PetscStrncpy(ddm_name, "", 1024);CHKERRQ(ierr);
303         ierr = PetscOptionsString("-pc_gasm_decomposition","Name of the DM defining the decomposition", "PCSetDM",ddm_name,ddm_name,1024,&flg);CHKERRQ(ierr);
304         if (flg) {
305           ierr = DMCreateDomainDecompositionDM(pc->dm, ddm_name, &ddm);CHKERRQ(ierr);
306           if (!ddm) {
307             SETERRQ1(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Uknown DM decomposition name %s", ddm_name);
308           }
309           ierr = PetscInfo(pc,"Using decomposition DM defined using options database\n");CHKERRQ(ierr);
310           ierr = PCSetDM(pc,ddm);CHKERRQ(ierr);
311         }
312         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
313         if (num_subdomains) {
314           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
315         }
316         for (d = 0; d < num_subdomains; ++d) {
317           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
318           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
319           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
320         }
321         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
322         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
323         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
324       }
325       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
326         osm->nmax = osm->n = 1;
327         ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
328         osm->N = size;
329       }
330     }
331     if (!osm->iis){
332       /*
333        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
334        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
335        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
336        */
337       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
338     }
339     if (osm->N == PETSC_DECIDE) {
340       struct {PetscInt max,sum;} inwork,outwork;
341       /* determine global number of subdomains and the max number of local subdomains */
342       inwork.max = osm->n;
343       inwork.sum = osm->n;
344       ierr = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
345       osm->nmax = outwork.max;
346       osm->N    = outwork.sum;
347     }
348 
349     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
350     flg  = PETSC_FALSE;
351     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
352     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
353 
354     if (osm->sort_indices) {
355       for (i=0; i<osm->n; i++) {
356         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
357 	ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
358       }
359     }
360     /*
361      Merge the ISs, create merged vectors and restrictions.
362      */
363     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
364     on = 0;
365     for (i=0; i<osm->n; i++) {
366       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
367       on += oni;
368     }
369     ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr);
370     on = 0;
371     for (i=0; i<osm->n; i++) {
372       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
373       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
374       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
375       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
376       on += oni;
377     }
378     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
379     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
380     ierr = VecCreateMPI(((PetscObject)pc)->comm, on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
381     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
382     ierr = VecGetOwnershipRange(osm->gx, &gofirst, PETSC_NULL);CHKERRQ(ierr);
383     ierr = ISCreateStride(((PetscObject)pc)->comm,on,gofirst,1, &goid);CHKERRQ(ierr);
384     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
385     ierr = VecDestroy(&x);CHKERRQ(ierr);
386     ierr = ISDestroy(&gois);CHKERRQ(ierr);
387     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
388     { PetscInt       ini;     /* Number of indices the i-th a local inner subdomain. */
389       PetscInt       in;      /* Number of indices in the disjoint uniont of local inner subdomains. */
390       PetscInt       *iidx;   /* Global indices in the merged local inner subdomain. */
391       PetscInt       *ioidx;  /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
392       IS             giis;    /* IS for the disjoint union of inner subdomains. */
393       IS             giois;   /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
394       /**/
395       in = 0;
396       for (i=0; i<osm->n; i++) {
397 	ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
398 	in += ini;
399       }
400       ierr = PetscMalloc(in*sizeof(PetscInt), &iidx);CHKERRQ(ierr);
401       ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr);
402       ierr = VecGetOwnershipRange(osm->gx,&gofirst, PETSC_NULL);CHKERRQ(ierr);
403       in = 0;
404       on = 0;
405       for (i=0; i<osm->n; i++) {
406         const PetscInt *iidxi;        /* Global indices of the i-th local inner subdomain. */
407         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
408         PetscInt       *ioidxi;       /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
409         PetscInt       ioni;          /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
410         PetscInt       k;
411 	ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
412 	ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
413 	ierr = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
414 	ierr = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
415         ierr = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
416         ioidxi = ioidx+in;
417         ierr = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
418         ierr = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
419 	ierr = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
420         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
421         for (k = 0; k < ini; ++k) {
422           ioidxi[k] += gofirst+on;
423         }
424 	in += ini;
425         on += oni;
426       }
427       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
428       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
429       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
430       ierr = VecDestroy(&y);CHKERRQ(ierr);
431       ierr = ISDestroy(&giis);CHKERRQ(ierr);
432       ierr = ISDestroy(&giois);CHKERRQ(ierr);
433     }
434     ierr = ISDestroy(&goid);CHKERRQ(ierr);
435     /* Create the subdomain work vectors. */
436     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
437     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
438     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
439     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
440     for (i=0, on=0; i<osm->n; ++i, on += oni) {
441       PetscInt oNi;
442       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
443       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
444       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
445       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
446     }
447     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
448     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
449     /* Create the local solvers */
450     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
451     for (i=0; i<osm->n; i++) { /* KSPs are local */
452       ierr = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
453       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
454       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
455       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
456       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
457       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
458       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
459       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
460       osm->ksp[i] = ksp;
461     }
462     scall = MAT_INITIAL_MATRIX;
463 
464   }/*if (!pc->setupcalled)*/
465   else {
466     /*
467        Destroy the blocks from the previous iteration
468     */
469     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
470       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
471       scall = MAT_INITIAL_MATRIX;
472     }
473   }
474 
475   /*
476      Extract out the submatrices.
477   */
478   if (size > 1) {
479     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
480   }
481   else {
482     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
483   }
484   if (scall == MAT_INITIAL_MATRIX) {
485     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
486     for (i=0; i<osm->n; i++) {
487       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
488       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
489     }
490   }
491 
492   /* Return control to the user so that the submatrices can be modified (e.g., to apply
493      different boundary conditions for the submatrices than for the global problem) */
494   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
495 
496   /*
497      Loop over submatrices putting them into local ksps
498   */
499   for (i=0; i<osm->n; i++) {
500     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
501     if (!pc->setupcalled) {
502       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
503     }
504   }
505 
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
511 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
512 {
513   PC_GASM         *osm = (PC_GASM*)pc->data;
514   PetscErrorCode ierr;
515   PetscInt       i;
516 
517   PetscFunctionBegin;
518   for (i=0; i<osm->n; i++) {
519     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
520   }
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "PCApply_GASM"
526 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
527 {
528   PC_GASM         *osm = (PC_GASM*)pc->data;
529   PetscErrorCode ierr;
530   PetscInt       i;
531   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
532 
533   PetscFunctionBegin;
534   /*
535      Support for limiting the restriction or interpolation only to the inner
536      subdomain values (leaving the other values 0).
537   */
538   if (!(osm->type & PC_GASM_RESTRICT)) {
539     /* have to zero the work RHS since scatter may leave some slots empty */
540     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
541     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
542   }
543   else {
544     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
545   }
546   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
547   if (!(osm->type & PC_GASM_RESTRICT)) {
548     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
549   }
550   else {
551     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
552   }
553   /* do the subdomain solves */
554   for (i=0; i<osm->n; ++i) {
555     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
556   }
557   ierr = VecZeroEntries(y);CHKERRQ(ierr);
558   if (!(osm->type & PC_GASM_INTERPOLATE)) {
559     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
560     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
561   }
562   else {
563     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
564     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
565   }
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "PCApplyTranspose_GASM"
570 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
571 {
572   PC_GASM         *osm = (PC_GASM*)pc->data;
573   PetscErrorCode ierr;
574   PetscInt       i;
575   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
576 
577   PetscFunctionBegin;
578   /*
579      Support for limiting the restriction or interpolation to only local
580      subdomain values (leaving the other values 0).
581 
582      Note: these are reversed from the PCApply_GASM() because we are applying the
583      transpose of the three terms
584   */
585   if (!(osm->type & PC_GASM_INTERPOLATE)) {
586     /* have to zero the work RHS since scatter may leave some slots empty */
587     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
588     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
589   }
590   else {
591     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
592   }
593   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
594   if (!(osm->type & PC_GASM_INTERPOLATE)) {
595     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
596   }
597   else {
598     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
599   }
600   /* do the local solves */
601   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
602     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
603   }
604   ierr = VecZeroEntries(y);CHKERRQ(ierr);
605   if (!(osm->type & PC_GASM_RESTRICT)) {
606     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
607     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
608   }
609   else {
610     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
611     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
612   }
613 
614   PetscFunctionReturn(0);
615 }
616 
617 #undef __FUNCT__
618 #define __FUNCT__ "PCReset_GASM"
619 static PetscErrorCode PCReset_GASM(PC pc)
620 {
621   PC_GASM        *osm = (PC_GASM*)pc->data;
622   PetscErrorCode ierr;
623   PetscInt       i;
624 
625   PetscFunctionBegin;
626   if (osm->ksp) {
627     for (i=0; i<osm->n; i++) {
628       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
629     }
630   }
631   if (osm->pmat) {
632     if (osm->n > 0) {
633       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
634     }
635   }
636   if (osm->x) {
637     for (i=0; i<osm->n; i++) {
638       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
639       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
640     }
641   }
642   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
643   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
644 
645   ierr = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
646   ierr = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
647   ierr = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
648   osm->ois = 0;
649   osm->iis = 0;
650   PetscFunctionReturn(0);
651 }
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "PCDestroy_GASM"
655 static PetscErrorCode PCDestroy_GASM(PC pc)
656 {
657   PC_GASM         *osm = (PC_GASM*)pc->data;
658   PetscErrorCode ierr;
659   PetscInt       i;
660 
661   PetscFunctionBegin;
662   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
663   if (osm->ksp) {
664     for (i=0; i<osm->n; i++) {
665       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
666     }
667     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
668   }
669   ierr = PetscFree(osm->x);CHKERRQ(ierr);
670   ierr = PetscFree(osm->y);CHKERRQ(ierr);
671   ierr = PetscFree(pc->data);CHKERRQ(ierr);
672   PetscFunctionReturn(0);
673 }
674 
675 #undef __FUNCT__
676 #define __FUNCT__ "PCSetFromOptions_GASM"
677 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
678   PC_GASM         *osm = (PC_GASM*)pc->data;
679   PetscErrorCode ierr;
680   PetscInt       blocks,ovl;
681   PetscBool      symset,flg;
682   PCGASMType      gasmtype;
683 
684   PetscFunctionBegin;
685   /* set the type to symmetric if matrix is symmetric */
686   if (!osm->type_set && pc->pmat) {
687     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
688     if (symset && flg) { osm->type = PC_GASM_BASIC; }
689   }
690   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
691     ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
692     osm->create_local = PETSC_TRUE;
693     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,&flg);CHKERRQ(ierr);
694     if (!osm->create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No support for autocreation of nonlocal subdomains yet.");
695 
696     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr); }
697     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
698     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
699     flg  = PETSC_FALSE;
700     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
701     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
702   ierr = PetscOptionsTail();CHKERRQ(ierr);
703   PetscFunctionReturn(0);
704 }
705 
706 /*------------------------------------------------------------------------------------*/
707 
708 EXTERN_C_BEGIN
709 #undef __FUNCT__
710 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
711 PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
712 {
713   PC_GASM         *osm = (PC_GASM*)pc->data;
714   PetscErrorCode ierr;
715   PetscInt       i;
716 
717   PetscFunctionBegin;
718   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
719   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
720 
721   if (!pc->setupcalled) {
722     osm->n       = n;
723     osm->ois     = 0;
724     osm->iis     = 0;
725     if (ois) {
726       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
727     }
728     if (iis) {
729       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
730     }
731     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
732     if (ois) {
733       ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
734       for (i=0; i<n; i++) { osm->ois[i] = ois[i]; }
735       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
736       osm->overlap = -1;
737       if (!iis) {
738         ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
739         for (i=0; i<n; i++) {
740           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
741           osm->iis[i] = ois[i];
742         }
743       }
744     }
745     if (iis) {
746       ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
747       for (i=0; i<n; i++) { osm->iis[i] = iis[i]; }
748       if (!ois) {
749         ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
750         for (i=0; i<n; i++) {
751           for (i=0; i<n; i++) {
752             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
753             osm->ois[i] = iis[i];
754           }
755         }
756         if (osm->overlap > 0) {
757           /* Extend the "overlapping" regions by a number of steps */
758           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
759         }
760       }
761     }
762   }
763   PetscFunctionReturn(0);
764 }
765 EXTERN_C_END
766 
767 EXTERN_C_BEGIN
768 #undef __FUNCT__
769 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
770 PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local) {
771   PC_GASM         *osm = (PC_GASM*)pc->data;
772   PetscErrorCode ierr;
773   PetscMPIInt    rank,size;
774   PetscInt       n;
775   PetscInt       Nmin, Nmax;
776   PetscFunctionBegin;
777   if (!create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
778   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
779   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,((PetscObject)pc)->comm);CHKERRQ(ierr);
780   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
781   if (Nmin != Nmax)
782     SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
783 
784   osm->create_local = create_local;
785   /*
786      Split the subdomains equally among all processors
787   */
788   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
789   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
790   n = N/size + ((N % size) > rank);
791   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
792   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
793   if (!pc->setupcalled) {
794     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
795     osm->N            = N;
796     osm->n            = n;
797     osm->nmax         = N/size + ((N%size)?1:0);
798     osm->ois           = 0;
799     osm->iis     = 0;
800   }
801   PetscFunctionReturn(0);
802 }
803 EXTERN_C_END
804 
805 EXTERN_C_BEGIN
806 #undef __FUNCT__
807 #define __FUNCT__ "PCGASMSetOverlap_GASM"
808 PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
809 {
810   PC_GASM *osm = (PC_GASM*)pc->data;
811 
812   PetscFunctionBegin;
813   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
814   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
815   if (!pc->setupcalled) {
816     osm->overlap = ovl;
817   }
818   PetscFunctionReturn(0);
819 }
820 EXTERN_C_END
821 
822 EXTERN_C_BEGIN
823 #undef __FUNCT__
824 #define __FUNCT__ "PCGASMSetType_GASM"
825 PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
826 {
827   PC_GASM *osm = (PC_GASM*)pc->data;
828 
829   PetscFunctionBegin;
830   osm->type     = type;
831   osm->type_set = PETSC_TRUE;
832   PetscFunctionReturn(0);
833 }
834 EXTERN_C_END
835 
836 EXTERN_C_BEGIN
837 #undef __FUNCT__
838 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
839 PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
840 {
841   PC_GASM *osm = (PC_GASM*)pc->data;
842 
843   PetscFunctionBegin;
844   osm->sort_indices = doSort;
845   PetscFunctionReturn(0);
846 }
847 EXTERN_C_END
848 
849 EXTERN_C_BEGIN
850 #undef __FUNCT__
851 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
852 /*
853    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
854         In particular, it would upset the global subdomain number calculation.
855 */
856 PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
857 {
858   PC_GASM         *osm = (PC_GASM*)pc->data;
859   PetscErrorCode ierr;
860 
861   PetscFunctionBegin;
862   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
863 
864   if (n) {
865     *n = osm->n;
866   }
867   if (first) {
868     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
869     *first -= osm->n;
870   }
871   if (ksp) {
872     /* Assume that local solves are now different; not necessarily
873        true, though!  This flag is used only for PCView_GASM() */
874     *ksp                   = osm->ksp;
875     osm->same_subdomain_solvers = PETSC_FALSE;
876   }
877   PetscFunctionReturn(0);
878 }/* PCGASMGetSubKSP_GASM() */
879 EXTERN_C_END
880 
881 
882 #undef __FUNCT__
883 #define __FUNCT__ "PCGASMSetSubdomains"
884 /*@C
885     PCGASMSetSubdomains - Sets the subdomains for this processor
886     for the additive Schwarz preconditioner.
887 
888     Collective on PC
889 
890     Input Parameters:
891 +   pc  - the preconditioner context
892 .   n   - the number of subdomains for this processor
893 .   iis - the index sets that define this processor's local inner subdomains
894          (or PETSC_NULL for PETSc to determine subdomains)
895 -   ois- the index sets that define this processor's local outer subdomains
896          (or PETSC_NULL to use the same as iis)
897 
898     Notes:
899     The IS indices use the parallel, global numbering of the vector entries.
900     Inner subdomains are those where the correction is applied.
901     Outer subdomains are those where the residual necessary to obtain the
902     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
903     Both inner and outer subdomains can extend over several processors.
904     This processor's portion of a subdomain is known as a local subdomain.
905 
906     By default the GASM preconditioner uses 1 (local) subdomain per processor.
907     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
908     all processors that PCGASM will create automatically, and to specify whether
909     they should be local or not.
910 
911 
912     Level: advanced
913 
914 .keywords: PC, GASM, set, subdomains, additive Schwarz
915 
916 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
917           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
918 @*/
919 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
920 {
921   PetscErrorCode ierr;
922 
923   PetscFunctionBegin;
924   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
925   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "PCGASMSetTotalSubdomains"
931 /*@C
932     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
933     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
934     communicator. Either all or no processors in the PC communicator must call this routine with
935     the same N.  The subdomains will be created automatically during PCSetUp().
936 
937     Collective on PC
938 
939     Input Parameters:
940 +   pc           - the preconditioner context
941 .   N            - the total number of subdomains cumulative across all processors
942 -   create_local - whether the subdomains to be created are to be local
943 
944     Options Database Key:
945     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
946 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
947 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
948 
949     By default the GASM preconditioner uses 1 subdomain per processor.
950 
951 
952     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
953     of subdomains per processor.
954 
955     Level: advanced
956 
957 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
958 
959 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
960           PCGASMCreateSubdomains2D()
961 @*/
962 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
963 {
964   PetscErrorCode ierr;
965 
966   PetscFunctionBegin;
967   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
968   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "PCGASMSetOverlap"
974 /*@
975     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
976     additive Schwarz preconditioner.  Either all or no processors in the
977     PC communicator must call this routine.
978 
979     Logically Collective on PC
980 
981     Input Parameters:
982 +   pc  - the preconditioner context
983 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
984 
985     Options Database Key:
986 .   -pc_gasm_overlap <overlap> - Sets overlap
987 
988     Notes:
989     By default the GASM preconditioner uses 1 subdomain per processor.  To use
990     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
991     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
992 
993     The overlap defaults to 1, so if one desires that no additional
994     overlap be computed beyond what may have been set with a call to
995     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
996     must be set to be 0.  In particular, if one does not explicitly set
997     the subdomains in application code, then all overlap would be computed
998     internally by PETSc, and using an overlap of 0 would result in an GASM
999     variant that is equivalent to the block Jacobi preconditioner.
1000 
1001     Note that one can define initial index sets with any overlap via
1002     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
1003     PETSc to extend that overlap further, if desired.
1004 
1005     Level: intermediate
1006 
1007 .keywords: PC, GASM, set, overlap
1008 
1009 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1010           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
1011 @*/
1012 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
1013 {
1014   PetscErrorCode ierr;
1015 
1016   PetscFunctionBegin;
1017   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1018   PetscValidLogicalCollectiveInt(pc,ovl,2);
1019   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 #undef __FUNCT__
1024 #define __FUNCT__ "PCGASMSetType"
1025 /*@
1026     PCGASMSetType - Sets the type of restriction and interpolation used
1027     for local problems in the additive Schwarz method.
1028 
1029     Logically Collective on PC
1030 
1031     Input Parameters:
1032 +   pc  - the preconditioner context
1033 -   type - variant of GASM, one of
1034 .vb
1035       PC_GASM_BASIC       - full interpolation and restriction
1036       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1037       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1038       PC_GASM_NONE        - local processor restriction and interpolation
1039 .ve
1040 
1041     Options Database Key:
1042 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1043 
1044     Level: intermediate
1045 
1046 .keywords: PC, GASM, set, type
1047 
1048 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1049           PCGASMCreateSubdomains2D()
1050 @*/
1051 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1052 {
1053   PetscErrorCode ierr;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1057   PetscValidLogicalCollectiveEnum(pc,type,2);
1058   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1059   PetscFunctionReturn(0);
1060 }
1061 
1062 #undef __FUNCT__
1063 #define __FUNCT__ "PCGASMSetSortIndices"
1064 /*@
1065     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1066 
1067     Logically Collective on PC
1068 
1069     Input Parameters:
1070 +   pc  - the preconditioner context
1071 -   doSort - sort the subdomain indices
1072 
1073     Level: intermediate
1074 
1075 .keywords: PC, GASM, set, type
1076 
1077 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1078           PCGASMCreateSubdomains2D()
1079 @*/
1080 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool  doSort)
1081 {
1082   PetscErrorCode ierr;
1083 
1084   PetscFunctionBegin;
1085   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1086   PetscValidLogicalCollectiveBool(pc,doSort,2);
1087   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 #undef __FUNCT__
1092 #define __FUNCT__ "PCGASMGetSubKSP"
1093 /*@C
1094    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1095    this processor.
1096 
1097    Collective on PC iff first_local is requested
1098 
1099    Input Parameter:
1100 .  pc - the preconditioner context
1101 
1102    Output Parameters:
1103 +  n_local - the number of blocks on this processor or PETSC_NULL
1104 .  first_local - the global number of the first block on this processor or PETSC_NULL,
1105                  all processors must request or all must pass PETSC_NULL
1106 -  ksp - the array of KSP contexts
1107 
1108    Note:
1109    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1110 
1111    Currently for some matrix implementations only 1 block per processor
1112    is supported.
1113 
1114    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1115 
1116    Level: advanced
1117 
1118 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1119 
1120 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1121           PCGASMCreateSubdomains2D(),
1122 @*/
1123 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1124 {
1125   PetscErrorCode ierr;
1126 
1127   PetscFunctionBegin;
1128   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1129   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 /* -------------------------------------------------------------------------------------*/
1134 /*MC
1135    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1136            its own KSP object.
1137 
1138    Options Database Keys:
1139 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1140 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1141 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1142 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1143 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1144 
1145      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1146       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1147       -pc_gasm_type basic to use the standard GASM.
1148 
1149    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1150      than one processor. Defaults to one block per processor.
1151 
1152      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1153         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1154 
1155      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1156          and set the options directly on the resulting KSP object (you can access its PC
1157          with KSPGetPC())
1158 
1159 
1160    Level: beginner
1161 
1162    Concepts: additive Schwarz method
1163 
1164     References:
1165     An additive variant of the Schwarz alternating method for the case of many subregions
1166     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1167 
1168     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1169     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1170 
1171 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1172            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1173            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1174 
1175 M*/
1176 
1177 EXTERN_C_BEGIN
1178 #undef __FUNCT__
1179 #define __FUNCT__ "PCCreate_GASM"
1180 PetscErrorCode  PCCreate_GASM(PC pc)
1181 {
1182   PetscErrorCode ierr;
1183   PC_GASM         *osm;
1184 
1185   PetscFunctionBegin;
1186   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
1187   osm->N                 = PETSC_DECIDE;
1188   osm->n                 = PETSC_DECIDE;
1189   osm->nmax              = 0;
1190   osm->overlap           = 1;
1191   osm->ksp               = 0;
1192   osm->gorestriction     = 0;
1193   osm->girestriction     = 0;
1194   osm->gx                = 0;
1195   osm->gy                = 0;
1196   osm->x                 = 0;
1197   osm->y                 = 0;
1198   osm->ois               = 0;
1199   osm->iis               = 0;
1200   osm->pmat              = 0;
1201   osm->type              = PC_GASM_RESTRICT;
1202   osm->same_subdomain_solvers = PETSC_TRUE;
1203   osm->sort_indices           = PETSC_TRUE;
1204 
1205   pc->data                   = (void*)osm;
1206   pc->ops->apply             = PCApply_GASM;
1207   pc->ops->applytranspose    = PCApplyTranspose_GASM;
1208   pc->ops->setup             = PCSetUp_GASM;
1209   pc->ops->reset             = PCReset_GASM;
1210   pc->ops->destroy           = PCDestroy_GASM;
1211   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
1212   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
1213   pc->ops->view              = PCView_GASM;
1214   pc->ops->applyrichardson   = 0;
1215 
1216   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM",
1217                     PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1218   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1219                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1220   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1221                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1222   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1223                     PCGASMSetType_GASM);CHKERRQ(ierr);
1224   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1225                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1226   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1227                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1228   PetscFunctionReturn(0);
1229 }
1230 EXTERN_C_END
1231 
1232 
1233 #undef __FUNCT__
1234 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1235 /*@C
1236    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1237    Schwarz preconditioner for a any problem based on its matrix.
1238 
1239    Collective
1240 
1241    Input Parameters:
1242 +  A       - The global matrix operator
1243 .  overlap - amount of overlap in outer subdomains
1244 -  n       - the number of local subdomains
1245 
1246    Output Parameters:
1247 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1248 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1249 
1250    Level: advanced
1251 
1252    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1253          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1254          nonzero overlap with PCGASMSetOverlap()
1255 
1256     In the Fortran version you must provide the array outis[] already allocated of length n.
1257 
1258 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1259 
1260 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1261 @*/
1262 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS* iis[], IS* ois[])
1263 {
1264   MatPartitioning           mpart;
1265   const char                *prefix;
1266   PetscErrorCode            (*f)(Mat,MatReuse,Mat*);
1267   PetscMPIInt               size;
1268   PetscInt                  i,j,rstart,rend,bs;
1269   PetscBool                 isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1270   Mat                       Ad = PETSC_NULL, adj;
1271   IS                        ispart,isnumb,*is;
1272   PetscErrorCode            ierr;
1273 
1274   PetscFunctionBegin;
1275   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1276   PetscValidPointer(iis,4);
1277   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1278 
1279   /* Get prefix, row distribution, and block size */
1280   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1281   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1282   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1283   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1284 
1285   /* Get diagonal block from matrix if possible */
1286   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1287   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1288   if (f) {
1289     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1290   } else if (size == 1) {
1291     Ad = A;
1292   }
1293   if (Ad) {
1294     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1295     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1296   }
1297   if (Ad && n > 1) {
1298     PetscBool  match,done;
1299     /* Try to setup a good matrix partitioning if available */
1300     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1301     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1302     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1303     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1304     if (!match) {
1305       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1306     }
1307     if (!match) { /* assume a "good" partitioner is available */
1308       PetscInt na;
1309       const PetscInt *ia,*ja;
1310       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1311       if (done) {
1312         /* Build adjacency matrix by hand. Unfortunately a call to
1313            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1314            remove the block-aij structure and we cannot expect
1315            MatPartitioning to split vertices as we need */
1316         PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1317         const PetscInt *row;
1318         nnz = 0;
1319         for (i=0; i<na; i++) { /* count number of nonzeros */
1320           len = ia[i+1] - ia[i];
1321           row = ja + ia[i];
1322           for (j=0; j<len; j++) {
1323             if (row[j] == i) { /* don't count diagonal */
1324               len--; break;
1325             }
1326           }
1327           nnz += len;
1328         }
1329         ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1330         ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1331         nnz    = 0;
1332         iia[0] = 0;
1333         for (i=0; i<na; i++) { /* fill adjacency */
1334           cnt = 0;
1335           len = ia[i+1] - ia[i];
1336           row = ja + ia[i];
1337           for (j=0; j<len; j++) {
1338             if (row[j] != i) { /* if not diagonal */
1339               jja[nnz+cnt++] = row[j];
1340             }
1341           }
1342           nnz += cnt;
1343           iia[i+1] = nnz;
1344         }
1345         /* Partitioning of the adjacency matrix */
1346         ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1347         ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1348         ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1349         ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1350         ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1351         ierr = MatDestroy(&adj);CHKERRQ(ierr);
1352         foundpart = PETSC_TRUE;
1353       }
1354       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1355     }
1356     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1357   }
1358   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1359   if (!foundpart) {
1360 
1361     /* Partitioning by contiguous chunks of rows */
1362 
1363     PetscInt mbs   = (rend-rstart)/bs;
1364     PetscInt start = rstart;
1365     for (i=0; i<n; i++) {
1366       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1367       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1368       start += count;
1369     }
1370 
1371   } else {
1372 
1373     /* Partitioning by adjacency of diagonal block  */
1374 
1375     const PetscInt *numbering;
1376     PetscInt       *count,nidx,*indices,*newidx,start=0;
1377     /* Get node count in each partition */
1378     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1379     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1380     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1381       for (i=0; i<n; i++) count[i] *= bs;
1382     }
1383     /* Build indices from node numbering */
1384     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1385     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1386     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1387     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1388     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1389     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1390     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1391       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1392       for (i=0; i<nidx; i++)
1393         for (j=0; j<bs; j++)
1394           newidx[i*bs+j] = indices[i]*bs + j;
1395       ierr = PetscFree(indices);CHKERRQ(ierr);
1396       nidx   *= bs;
1397       indices = newidx;
1398     }
1399     /* Shift to get global indices */
1400     for (i=0; i<nidx; i++) indices[i] += rstart;
1401 
1402     /* Build the index sets for each block */
1403     for (i=0; i<n; i++) {
1404       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1405       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1406       start += count[i];
1407     }
1408 
1409     ierr = PetscFree(count);
1410     ierr = PetscFree(indices);
1411     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1412     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1413   }
1414   *iis = is;
1415   if (!ois) PetscFunctionReturn(0);
1416   /*
1417    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1418    has been requested, copy the inner subdomains over so they can be modified.
1419    */
1420   ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr);
1421   for (i=0; i<n; ++i) {
1422     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1423       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1424       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1425     } else {
1426       ierr = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1427       (*ois)[i] = (*iis)[i];
1428     }
1429   }
1430   if (overlap > 0) {
1431     /* Extend the "overlapping" regions by a number of steps */
1432     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1433   }
1434   PetscFunctionReturn(0);
1435 }
1436 
1437 #undef __FUNCT__
1438 #define __FUNCT__ "PCGASMDestroySubdomains"
1439 /*@C
1440    PCGASMDestroySubdomains - Destroys the index sets created with
1441    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1442    called after setting subdomains with PCGASMSetSubdomains().
1443 
1444    Collective
1445 
1446    Input Parameters:
1447 +  n   - the number of index sets
1448 .  iis - the array of inner subdomains,
1449 -  ois - the array of outer subdomains, can be PETSC_NULL
1450 
1451    Level: intermediate
1452 
1453    Notes: this is merely a convenience subroutine that walks each list,
1454    destroys each IS on the list, and then frees the list.
1455 
1456 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1457 
1458 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1459 @*/
1460 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1461 {
1462   PetscInt       i;
1463   PetscErrorCode ierr;
1464   PetscFunctionBegin;
1465   if (n <= 0) PetscFunctionReturn(0);
1466   if (iis) {
1467     PetscValidPointer(iis,2);
1468     for (i=0; i<n; i++) {
1469       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1470     }
1471     ierr = PetscFree(iis);CHKERRQ(ierr);
1472   }
1473   if (ois) {
1474     for (i=0; i<n; i++) {
1475       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1476     }
1477     ierr = PetscFree(ois);CHKERRQ(ierr);
1478   }
1479   PetscFunctionReturn(0);
1480 }
1481 
1482 
1483 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1484 {                                                                                                       \
1485  PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1486   /*                                                                                                    \
1487    Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1488    of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1489    subdomain).                                                                                          \
1490    Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1491    of the intersection.                                                                                 \
1492   */                                                                                                    \
1493   /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1494   *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1495   /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1496   *xleft_loc = *ylow_loc==first_row?PetscMax(first%M,xleft):xleft;                                                                            \
1497   /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1498   *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1499   /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1500   *xright_loc = *yhigh_loc==last_row?PetscMin(xright,last%M):xright;                                                                          \
1501   /* Now compute the size of the local subdomain n. */ \
1502   *n = 0;                                               \
1503   if (*ylow_loc < *yhigh_loc) {                           \
1504     PetscInt width = xright-xleft;                     \
1505     *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1506     *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1507     *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1508   }\
1509 }
1510 
1511 
1512 
1513 #undef __FUNCT__
1514 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1515 /*@
1516    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1517    preconditioner for a two-dimensional problem on a regular grid.
1518 
1519    Collective
1520 
1521    Input Parameters:
1522 +  M, N               - the global number of grid points in the x and y directions
1523 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1524 .  dof                - degrees of freedom per node
1525 -  overlap            - overlap in mesh lines
1526 
1527    Output Parameters:
1528 +  Nsub - the number of local subdomains created
1529 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1530 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1531 
1532 
1533    Level: advanced
1534 
1535 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1536 
1537 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1538           PCGASMSetOverlap()
1539 @*/
1540 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1541 {
1542   PetscErrorCode ierr;
1543   PetscMPIInt    size, rank;
1544   PetscInt       i, j;
1545   PetscInt       maxheight, maxwidth;
1546   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1547   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1548   PetscInt       x[2][2], y[2][2], n[2];
1549   PetscInt       first, last;
1550   PetscInt       nidx, *idx;
1551   PetscInt       ii,jj,s,q,d;
1552   PetscInt       k,kk;
1553   PetscMPIInt    color;
1554   MPI_Comm       comm, subcomm;
1555   IS             **xis = 0, **is = ois, **is_local = iis;
1556 
1557   PetscFunctionBegin;
1558   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1559   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1560   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1561   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1562   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1563 	     "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1564 
1565   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1566   s = 0;
1567   ystart = 0;
1568   for (j=0; j<Ndomains; ++j) {
1569     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1570     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1571     /* Vertical domain limits with an overlap. */
1572     ylow = PetscMax(ystart - overlap,0);
1573     yhigh = PetscMin(ystart + maxheight + overlap,N);
1574     xstart = 0;
1575     for (i=0; i<Mdomains; ++i) {
1576       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1577       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1578       /* Horizontal domain limits with an overlap. */
1579       xleft   = PetscMax(xstart - overlap,0);
1580       xright  = PetscMin(xstart + maxwidth + overlap,M);
1581       /*
1582 	 Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1583       */
1584       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1585       if (nidx) {
1586         ++s;
1587       }
1588       xstart += maxwidth;
1589     }/* for (i = 0; i < Mdomains; ++i) */
1590     ystart += maxheight;
1591   }/* for (j = 0; j < Ndomains; ++j) */
1592   /* Now we can allocate the necessary number of ISs. */
1593   *nsub = s;
1594   ierr = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1595   ierr = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1596   s = 0;
1597   ystart = 0;
1598   for (j=0; j<Ndomains; ++j) {
1599     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1600     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1601     /* Vertical domain limits with an overlap. */
1602     y[0][0] = PetscMax(ystart - overlap,0);
1603     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1604     /* Vertical domain limits without an overlap. */
1605     y[1][0] = ystart;
1606     y[1][1] = PetscMin(ystart + maxheight,N);
1607     xstart = 0;
1608     for (i=0; i<Mdomains; ++i) {
1609       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1610       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1611       /* Horizontal domain limits with an overlap. */
1612       x[0][0]  = PetscMax(xstart - overlap,0);
1613       x[0][1]  = PetscMin(xstart + maxwidth + overlap,M);
1614       /* Horizontal domain limits without an overlap. */
1615       x[1][0] = xstart;
1616       x[1][1] = PetscMin(xstart+maxwidth,M);
1617       /*
1618 	 Determine whether this domain intersects this processor's ownership range of pc->pmat.
1619 	 Do this twice: first for the domains with overlaps, and once without.
1620 	 During the first pass create the subcommunicators, and use them on the second pass as well.
1621       */
1622       for (q = 0; q < 2; ++q) {
1623 	/*
1624 	  domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1625 	  according to whether the domain with an overlap or without is considered.
1626 	*/
1627 	xleft = x[q][0]; xright = x[q][1];
1628 	ylow  = y[q][0]; yhigh  = y[q][1];
1629         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1630 	nidx *= dof;
1631         n[q] = nidx;
1632         /*
1633          Based on the counted number of indices in the local domain *with an overlap*,
1634          construct a subcommunicator of all the processors supporting this domain.
1635          Observe that a domain with an overlap might have nontrivial local support,
1636          while the domain without an overlap might not.  Hence, the decision to participate
1637          in the subcommunicator must be based on the domain with an overlap.
1638          */
1639 	if (q == 0) {
1640 	  if (nidx) {
1641 	    color = 1;
1642 	  } else {
1643 	    color = MPI_UNDEFINED;
1644 	  }
1645 	  ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1646 	}
1647         /*
1648          Proceed only if the number of local indices *with an overlap* is nonzero.
1649          */
1650         if (n[0]) {
1651           if (q == 0) {
1652             xis = is;
1653           }
1654           if (q == 1) {
1655             /*
1656              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1657              Moreover, if the overlap is zero, the two ISs are identical.
1658              */
1659             if (overlap == 0) {
1660               (*is_local)[s] = (*is)[s];
1661               ierr = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1662               continue;
1663             } else {
1664               xis = is_local;
1665               subcomm = ((PetscObject)(*is)[s])->comm;
1666             }
1667           }/* if (q == 1) */
1668           idx = PETSC_NULL;
1669 	  ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1670           if (nidx) {
1671             k    = 0;
1672             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1673               PetscInt x0 = (jj==ylow_loc)?xleft_loc:xleft;
1674               PetscInt x1 = (jj==yhigh_loc-1)?xright_loc:xright;
1675               kk = dof*(M*jj + x0);
1676               for (ii=x0; ii<x1; ++ii) {
1677                 for (d = 0; d < dof; ++d) {
1678                   idx[k++] = kk++;
1679                 }
1680               }
1681             }
1682           }
1683 	  ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1684 	}/* if (n[0]) */
1685       }/* for (q = 0; q < 2; ++q) */
1686       if (n[0]) {
1687         ++s;
1688       }
1689       xstart += maxwidth;
1690     }/* for (i = 0; i < Mdomains; ++i) */
1691     ystart += maxheight;
1692   }/* for (j = 0; j < Ndomains; ++j) */
1693   PetscFunctionReturn(0);
1694 }
1695 
1696 #undef __FUNCT__
1697 #define __FUNCT__ "PCGASMGetSubdomains"
1698 /*@C
1699     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1700     for the additive Schwarz preconditioner.
1701 
1702     Not Collective
1703 
1704     Input Parameter:
1705 .   pc - the preconditioner context
1706 
1707     Output Parameters:
1708 +   n   - the number of subdomains for this processor (default value = 1)
1709 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be PETSC_NULL)
1710 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be PETSC_NULL)
1711 
1712 
1713     Notes:
1714     The user is responsible for destroying the ISs and freeing the returned arrays.
1715     The IS numbering is in the parallel, global numbering of the vector.
1716 
1717     Level: advanced
1718 
1719 .keywords: PC, GASM, get, subdomains, additive Schwarz
1720 
1721 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1722           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1723 @*/
1724 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1725 {
1726   PC_GASM         *osm;
1727   PetscErrorCode ierr;
1728   PetscBool      match;
1729   PetscInt       i;
1730   PetscFunctionBegin;
1731   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1732   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1733   if (!match)
1734     SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1735   osm = (PC_GASM*)pc->data;
1736   if (n)  *n  = osm->n;
1737   if (iis) {
1738     ierr = PetscMalloc(osm->n*sizeof(IS), iis);CHKERRQ(ierr);
1739   }
1740   if (ois) {
1741     ierr = PetscMalloc(osm->n*sizeof(IS), ois);CHKERRQ(ierr);
1742   }
1743   if (iis || ois) {
1744     for (i = 0; i < osm->n; ++i) {
1745       if (iis) (*iis)[i] = osm->iis[i];
1746       if (ois) (*ois)[i] = osm->ois[i];
1747     }
1748   }
1749   PetscFunctionReturn(0);
1750 }
1751 
1752 #undef __FUNCT__
1753 #define __FUNCT__ "PCGASMGetSubmatrices"
1754 /*@C
1755     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1756     only) for the additive Schwarz preconditioner.
1757 
1758     Not Collective
1759 
1760     Input Parameter:
1761 .   pc - the preconditioner context
1762 
1763     Output Parameters:
1764 +   n   - the number of matrices for this processor (default value = 1)
1765 -   mat - the matrices
1766 
1767     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1768            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1769            subdomains were defined using PCGASMSetTotalSubdomains().
1770     Level: advanced
1771 
1772 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1773 
1774 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1775           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1776 @*/
1777 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1778 {
1779   PC_GASM         *osm;
1780   PetscErrorCode ierr;
1781   PetscBool      match;
1782 
1783   PetscFunctionBegin;
1784   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1785   PetscValidIntPointer(n,2);
1786   if (mat) PetscValidPointer(mat,3);
1787   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1788   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1789   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1790   osm = (PC_GASM*)pc->data;
1791   if (n)   *n   = osm->n;
1792   if (mat) *mat = osm->pmat;
1793 
1794   PetscFunctionReturn(0);
1795 }
1796