xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision 047240e14af00aad1ef65e96f6fface8924f7f7e)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 
13 typedef struct {
14   PetscInt   N,n,nmax;
15   PetscInt   overlap;             /* overlap requested by user */
16   KSP        *ksp;                /* linear solvers for each block */
17   Vec        gx,gy;               /* Merged work vectors */
18   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
19   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
20   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
21   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
22   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
23   Mat        *pmat;               /* subdomain block matrices */
24   PCGASMType type;                /* use reduced interpolation, restriction or both */
25   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
26   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
27   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
28   PetscBool  sort_indices;           /* flag to sort subdomain indices */
29 } PC_GASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCGASMSubdomainView_Private"
33 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
34 {
35   PC_GASM        *osm = (PC_GASM*)pc->data;
36   PetscInt       j,nidx;
37   const PetscInt *idx;
38   PetscViewer    sviewer;
39   char           *cidx;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   if (i < 0 || i > osm->n) SETERRQ2(((PetscObject)viewer)->comm, PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
44   /* Inner subdomains. */
45   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
46   /*
47    No more than 15 characters per index plus a space.
48    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
49    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
50    For nidx == 0, the whole string 16 '\0'.
51    */
52   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
53   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
54   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
55   for (j = 0; j < nidx; ++j) {
56     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
57   }
58   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
59   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
60   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
61   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
63   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
64   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
66   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
67   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68   ierr = PetscFree(cidx);CHKERRQ(ierr);
69   /* Outer subdomains. */
70   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
71   /*
72    No more than 15 characters per index plus a space.
73    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
74    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
75    For nidx == 0, the whole string 16 '\0'.
76    */
77   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
78   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
79   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
80   for (j = 0; j < nidx; ++j) {
81     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
82   }
83   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
84   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
85   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
86   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
87   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
88   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
89   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
91   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
92   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
93   ierr = PetscFree(cidx);CHKERRQ(ierr);
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCGASMPrintSubdomains"
99 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
100 {
101   PC_GASM        *osm = (PC_GASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
105   PetscBool      found;
106   PetscViewer    viewer, sviewer = PETSC_NULL;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
111   for (i = 0; i < osm->n; ++i) permutation[i] = i;
112   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
113   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
114   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
116   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
117   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
118   /*
119    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
120    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
121   */
122   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
123   l    = 0;
124   for (count = 0; count < gcount; ++count) {
125     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
126     if (l<osm->n) {
127       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
128       if (numbering[d] == count) {
129         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
130         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
131         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
132         ++l;
133       }
134     }
135     ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
136   }
137   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
138   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
139   PetscFunctionReturn(0);
140 }
141 
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "PCView_GASM"
145 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
146 {
147   PC_GASM        *osm = (PC_GASM*)pc->data;
148   const char     *prefix;
149   PetscErrorCode ierr;
150   PetscMPIInt    rank, size;
151   PetscInt       i,bsz;
152   PetscBool      iascii,view_subdomains=PETSC_FALSE;
153   PetscViewer    sviewer;
154   PetscInt       count, l, gcount, *numbering, *permutation;
155   char           overlap[256]     = "user-defined overlap";
156   char           gsubdomains[256] = "unknown total number of subdomains";
157   char           lsubdomains[256] = "unknown number of local  subdomains";
158   char           msubdomains[256] = "unknown max number of local subdomains";
159 
160   PetscFunctionBegin;
161   ierr = MPI_Comm_size(((PetscObject)pc)->comm, &size);CHKERRQ(ierr);
162   ierr = MPI_Comm_rank(((PetscObject)pc)->comm, &rank);CHKERRQ(ierr);
163   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
164   for (i = 0; i < osm->n; ++i) permutation[i] = i;
165   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
166   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
167 
168   if (osm->overlap >= 0) {
169     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
170   }
171   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
172   if (osm->N > 0) {
173     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
174   }
175   if (osm->nmax > 0) {
176     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
177   }
178 
179   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
180   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,PETSC_NULL);CHKERRQ(ierr);
181 
182   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
183   if (iascii) {
184     /*
185      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
186      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
187      collectively on the comm.
188      */
189     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
190     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
191     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
197     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr);
198     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
200     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
201     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
202     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
203     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
204     /* Make sure that everybody waits for the banner to be printed. */
205     ierr = MPI_Barrier(((PetscObject)viewer)->comm);CHKERRQ(ierr);
206     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
207     l = 0;
208     for (count = 0; count < gcount; ++count) {
209       PetscMPIInt srank, ssize;
210       if (l<osm->n) {
211         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
212         if (numbering[d] == count) {
213           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
214           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
215           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
216           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
217           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
218           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr);
219           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
221           if (view_subdomains) {
222             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
223           }
224           if (!pc->setupcalled) {
225             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
226           } else {
227             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
228           }
229           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
230           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
231           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
232           ++l;
233         }
234       }
235       ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
236     }
237     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
238   }
239   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
240   PetscFunctionReturn(0);
241 }
242 
243 
244 
245 
246 
247 #undef __FUNCT__
248 #define __FUNCT__ "PCSetUp_GASM"
249 static PetscErrorCode PCSetUp_GASM(PC pc)
250 {
251   PC_GASM        *osm = (PC_GASM*)pc->data;
252   PetscErrorCode ierr;
253   PetscBool      symset,flg;
254   PetscInt       i;
255   PetscMPIInt    rank, size;
256   MatReuse       scall = MAT_REUSE_MATRIX;
257   KSP            ksp;
258   PC             subpc;
259   const char     *prefix,*pprefix;
260   Vec            x,y;
261   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
262   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
263   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
264   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
265   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
266   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
267   PetscScalar    *gxarray, *gyarray;
268   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
269   DM             *subdomain_dm = PETSC_NULL;
270 
271   PetscFunctionBegin;
272   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
273   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
274   if (!pc->setupcalled) {
275 
276     if (!osm->type_set) {
277       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
278       if (symset && flg) osm->type = PC_GASM_BASIC;
279     }
280 
281     /*
282      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
283      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
284      */
285     if (osm->n == PETSC_DECIDE) {
286       /* no subdomains given */
287       /* try pc->dm first */
288       if (pc->dm) {
289         char      ddm_name[1024];
290         DM        ddm;
291         PetscBool flg;
292         PetscInt  num_subdomains, d;
293         char      **subdomain_names;
294         IS        *inner_subdomain_is, *outer_subdomain_is;
295         /* Allow the user to request a decomposition DM by name */
296         ierr = PetscStrncpy(ddm_name, "", 1024);CHKERRQ(ierr);
297         ierr = PetscOptionsString("-pc_gasm_decomposition","Name of the DM defining the decomposition", "PCSetDM",ddm_name,ddm_name,1024,&flg);CHKERRQ(ierr);
298         if (flg) {
299           ierr = DMCreateDomainDecompositionDM(pc->dm, ddm_name, &ddm);CHKERRQ(ierr);
300           if (!ddm) SETERRQ1(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Uknown DM decomposition name %s", ddm_name);
301           ierr = PetscInfo(pc,"Using decomposition DM defined using options database\n");CHKERRQ(ierr);
302           ierr = PCSetDM(pc,ddm);CHKERRQ(ierr);
303         }
304         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
305         if (num_subdomains) {
306           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
307         }
308         for (d = 0; d < num_subdomains; ++d) {
309           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
310           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
311           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
312         }
313         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
314         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
315         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
316       }
317       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
318         osm->nmax = osm->n = 1;
319         ierr      = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
320         osm->N    = size;
321       }
322     }
323     if (!osm->iis) {
324       /*
325        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
326        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
327        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
328        */
329       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
330     }
331     if (osm->N == PETSC_DECIDE) {
332       struct {PetscInt max,sum;} inwork,outwork;
333       /* determine global number of subdomains and the max number of local subdomains */
334       inwork.max = osm->n;
335       inwork.sum = osm->n;
336       ierr       = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
337       osm->nmax  = outwork.max;
338       osm->N     = outwork.sum;
339     }
340 
341     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
342     flg  = PETSC_FALSE;
343     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
344     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
345 
346     if (osm->sort_indices) {
347       for (i=0; i<osm->n; i++) {
348         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
349         ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
350       }
351     }
352     /*
353      Merge the ISs, create merged vectors and restrictions.
354      */
355     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
356     on = 0;
357     for (i=0; i<osm->n; i++) {
358       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
359       on  += oni;
360     }
361     ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr);
362     on   = 0;
363     for (i=0; i<osm->n; i++) {
364       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
365       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
366       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
367       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
368       on  += oni;
369     }
370     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
371     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
372     ierr = VecCreateMPI(((PetscObject)pc)->comm, on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
373     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
374     ierr = VecGetOwnershipRange(osm->gx, &gofirst, PETSC_NULL);CHKERRQ(ierr);
375     ierr = ISCreateStride(((PetscObject)pc)->comm,on,gofirst,1, &goid);CHKERRQ(ierr);
376     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
377     ierr = VecDestroy(&x);CHKERRQ(ierr);
378     ierr = ISDestroy(&gois);CHKERRQ(ierr);
379 
380     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
381     {
382       PetscInt ini;           /* Number of indices the i-th a local inner subdomain. */
383       PetscInt in;            /* Number of indices in the disjoint uniont of local inner subdomains. */
384       PetscInt *iidx;         /* Global indices in the merged local inner subdomain. */
385       PetscInt *ioidx;        /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
386       IS       giis;          /* IS for the disjoint union of inner subdomains. */
387       IS       giois;         /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
388       /**/
389       in = 0;
390       for (i=0; i<osm->n; i++) {
391         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
392         in  += ini;
393       }
394       ierr = PetscMalloc(in*sizeof(PetscInt), &iidx);CHKERRQ(ierr);
395       ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr);
396       ierr = VecGetOwnershipRange(osm->gx,&gofirst, PETSC_NULL);CHKERRQ(ierr);
397       in   = 0;
398       on   = 0;
399       for (i=0; i<osm->n; i++) {
400         const PetscInt         *iidxi; /* Global indices of the i-th local inner subdomain. */
401         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
402         PetscInt               *ioidxi; /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
403         PetscInt               ioni;  /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
404         PetscInt               k;
405         ierr   = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
406         ierr   = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
407         ierr   = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
408         ierr   = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
409         ierr   = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
410         ioidxi = ioidx+in;
411         ierr   = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
412         ierr   = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
413         ierr   = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
414         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
415         for (k = 0; k < ini; ++k) ioidxi[k] += gofirst+on;
416         in += ini;
417         on += oni;
418       }
419       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
420       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
421       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
422       ierr = VecDestroy(&y);CHKERRQ(ierr);
423       ierr = ISDestroy(&giis);CHKERRQ(ierr);
424       ierr = ISDestroy(&giois);CHKERRQ(ierr);
425     }
426     ierr = ISDestroy(&goid);CHKERRQ(ierr);
427 
428     /* Create the subdomain work vectors. */
429     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
430     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
431     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
432     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
433     for (i=0, on=0; i<osm->n; ++i, on += oni) {
434       PetscInt oNi;
435       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
436       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
437       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
438       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
439     }
440     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
441     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
442     /* Create the local solvers */
443     ierr = PetscMalloc(osm->n*sizeof(KSP*),&osm->ksp);CHKERRQ(ierr);
444     for (i=0; i<osm->n; i++) { /* KSPs are local */
445       ierr        = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
446       ierr        = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
447       ierr        = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
448       ierr        = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
449       ierr        = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
450       ierr        = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
451       ierr        = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
452       ierr        = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
453       osm->ksp[i] = ksp;
454     }
455     scall = MAT_INITIAL_MATRIX;
456 
457   } else { /*if (!pc->setupcalled)*/
458     /*
459        Destroy the blocks from the previous iteration
460     */
461     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
462       ierr  = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
463       scall = MAT_INITIAL_MATRIX;
464     }
465   }
466 
467   /*
468      Extract out the submatrices.
469   */
470   if (size > 1) {
471     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
472   } else {
473     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
474   }
475   if (scall == MAT_INITIAL_MATRIX) {
476     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
477     for (i=0; i<osm->n; i++) {
478       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
479       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
480     }
481   }
482 
483   /* Return control to the user so that the submatrices can be modified (e.g., to apply
484      different boundary conditions for the submatrices than for the global problem) */
485   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
486 
487   /*
488      Loop over submatrices putting them into local ksps
489   */
490   for (i=0; i<osm->n; i++) {
491     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
492     if (!pc->setupcalled) {
493       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
494     }
495   }
496   PetscFunctionReturn(0);
497 }
498 
499 #undef __FUNCT__
500 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
501 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
502 {
503   PC_GASM        *osm = (PC_GASM*)pc->data;
504   PetscErrorCode ierr;
505   PetscInt       i;
506 
507   PetscFunctionBegin;
508   for (i=0; i<osm->n; i++) {
509     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
510   }
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "PCApply_GASM"
516 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
517 {
518   PC_GASM        *osm = (PC_GASM*)pc->data;
519   PetscErrorCode ierr;
520   PetscInt       i;
521   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
522 
523   PetscFunctionBegin;
524   /*
525      Support for limiting the restriction or interpolation only to the inner
526      subdomain values (leaving the other values 0).
527   */
528   if (!(osm->type & PC_GASM_RESTRICT)) {
529     /* have to zero the work RHS since scatter may leave some slots empty */
530     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
531     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
532   } else {
533     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
534   }
535   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
536   if (!(osm->type & PC_GASM_RESTRICT)) {
537     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
538   } else {
539     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
540   }
541   /* do the subdomain solves */
542   for (i=0; i<osm->n; ++i) {
543     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
544   }
545   ierr = VecZeroEntries(y);CHKERRQ(ierr);
546   if (!(osm->type & PC_GASM_INTERPOLATE)) {
547     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
548     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
549   } else {
550     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
551     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
552   }
553 }
554 
555 #undef __FUNCT__
556 #define __FUNCT__ "PCApplyTranspose_GASM"
557 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
558 {
559   PC_GASM        *osm = (PC_GASM*)pc->data;
560   PetscErrorCode ierr;
561   PetscInt       i;
562   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
563 
564   PetscFunctionBegin;
565   /*
566      Support for limiting the restriction or interpolation to only local
567      subdomain values (leaving the other values 0).
568 
569      Note: these are reversed from the PCApply_GASM() because we are applying the
570      transpose of the three terms
571   */
572   if (!(osm->type & PC_GASM_INTERPOLATE)) {
573     /* have to zero the work RHS since scatter may leave some slots empty */
574     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
575     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
576   } else {
577     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
578   }
579   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
580   if (!(osm->type & PC_GASM_INTERPOLATE)) {
581     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
582   } else {
583     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
584   }
585   /* do the local solves */
586   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
587     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
588   }
589   ierr = VecZeroEntries(y);CHKERRQ(ierr);
590   if (!(osm->type & PC_GASM_RESTRICT)) {
591     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
592     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
593   } else {
594     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
595     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
596   }
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "PCReset_GASM"
602 static PetscErrorCode PCReset_GASM(PC pc)
603 {
604   PC_GASM        *osm = (PC_GASM*)pc->data;
605   PetscErrorCode ierr;
606   PetscInt       i;
607 
608   PetscFunctionBegin;
609   if (osm->ksp) {
610     for (i=0; i<osm->n; i++) {
611       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
612     }
613   }
614   if (osm->pmat) {
615     if (osm->n > 0) {
616       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
617     }
618   }
619   if (osm->x) {
620     for (i=0; i<osm->n; i++) {
621       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
622       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
623     }
624   }
625   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
626   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
627 
628   ierr     = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
629   ierr     = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
630   ierr     = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
631   osm->ois = 0;
632   osm->iis = 0;
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "PCDestroy_GASM"
638 static PetscErrorCode PCDestroy_GASM(PC pc)
639 {
640   PC_GASM        *osm = (PC_GASM*)pc->data;
641   PetscErrorCode ierr;
642   PetscInt       i;
643 
644   PetscFunctionBegin;
645   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
646   if (osm->ksp) {
647     for (i=0; i<osm->n; i++) {
648       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
649     }
650     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
651   }
652   ierr = PetscFree(osm->x);CHKERRQ(ierr);
653   ierr = PetscFree(osm->y);CHKERRQ(ierr);
654   ierr = PetscFree(pc->data);CHKERRQ(ierr);
655   PetscFunctionReturn(0);
656 }
657 
658 #undef __FUNCT__
659 #define __FUNCT__ "PCSetFromOptions_GASM"
660 static PetscErrorCode PCSetFromOptions_GASM(PC pc)
661 {
662   PC_GASM        *osm = (PC_GASM*)pc->data;
663   PetscErrorCode ierr;
664   PetscInt       blocks,ovl;
665   PetscBool      symset,flg;
666   PCGASMType     gasmtype;
667 
668   PetscFunctionBegin;
669   /* set the type to symmetric if matrix is symmetric */
670   if (!osm->type_set && pc->pmat) {
671     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
672     if (symset && flg) osm->type = PC_GASM_BASIC;
673   }
674   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
675   ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
676 
677   osm->create_local = PETSC_TRUE;
678 
679   ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,&flg);CHKERRQ(ierr);
680   if (!osm->create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No support for autocreation of nonlocal subdomains yet.");
681 
682   if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr); }
683   ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
684   if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
685   flg  = PETSC_FALSE;
686   ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
687   if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
688   ierr = PetscOptionsTail();CHKERRQ(ierr);
689   PetscFunctionReturn(0);
690 }
691 
692 /*------------------------------------------------------------------------------------*/
693 
694 EXTERN_C_BEGIN
695 #undef __FUNCT__
696 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
697 PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
698 {
699   PC_GASM        *osm = (PC_GASM*)pc->data;
700   PetscErrorCode ierr;
701   PetscInt       i;
702 
703   PetscFunctionBegin;
704   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
705   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
706 
707   if (!pc->setupcalled) {
708     osm->n   = n;
709     osm->ois = 0;
710     osm->iis = 0;
711     if (ois) {
712       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
713     }
714     if (iis) {
715       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
716     }
717     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
718     if (ois) {
719       ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
720       for (i=0; i<n; i++) osm->ois[i] = ois[i];
721       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
722       osm->overlap = -1;
723       if (!iis) {
724         ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
725         for (i=0; i<n; i++) {
726           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
727           osm->iis[i] = ois[i];
728         }
729       }
730     }
731     if (iis) {
732       ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
733       for (i=0; i<n; i++) osm->iis[i] = iis[i];
734       if (!ois) {
735         ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
736         for (i=0; i<n; i++) {
737           for (i=0; i<n; i++) {
738             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
739             osm->ois[i] = iis[i];
740           }
741         }
742         if (osm->overlap > 0) {
743           /* Extend the "overlapping" regions by a number of steps */
744           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
745         }
746       }
747     }
748   }
749   PetscFunctionReturn(0);
750 }
751 EXTERN_C_END
752 
753 EXTERN_C_BEGIN
754 #undef __FUNCT__
755 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
756 PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
757 {
758   PC_GASM        *osm = (PC_GASM*)pc->data;
759   PetscErrorCode ierr;
760   PetscMPIInt    rank,size;
761   PetscInt       n;
762   PetscInt       Nmin, Nmax;
763 
764   PetscFunctionBegin;
765   if (!create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
766   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
767   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,((PetscObject)pc)->comm);CHKERRQ(ierr);
768   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
769   if (Nmin != Nmax) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
770 
771   osm->create_local = create_local;
772   /*
773      Split the subdomains equally among all processors
774   */
775   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
776   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
777   n    = N/size + ((N % size) > rank);
778   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
779   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
780   if (!pc->setupcalled) {
781     ierr      = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
782     osm->N    = N;
783     osm->n    = n;
784     osm->nmax = N/size + ((N%size) ? 1 : 0);
785     osm->ois  = 0;
786     osm->iis  = 0;
787   }
788   PetscFunctionReturn(0);
789 }
790 EXTERN_C_END
791 
792 EXTERN_C_BEGIN
793 #undef __FUNCT__
794 #define __FUNCT__ "PCGASMSetOverlap_GASM"
795 PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
796 {
797   PC_GASM *osm = (PC_GASM*)pc->data;
798 
799   PetscFunctionBegin;
800   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
801   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
802   if (!pc->setupcalled) osm->overlap = ovl;
803   PetscFunctionReturn(0);
804 }
805 EXTERN_C_END
806 
807 EXTERN_C_BEGIN
808 #undef __FUNCT__
809 #define __FUNCT__ "PCGASMSetType_GASM"
810 PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
811 {
812   PC_GASM *osm = (PC_GASM*)pc->data;
813 
814   PetscFunctionBegin;
815   osm->type     = type;
816   osm->type_set = PETSC_TRUE;
817   PetscFunctionReturn(0);
818 }
819 EXTERN_C_END
820 
821 EXTERN_C_BEGIN
822 #undef __FUNCT__
823 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
824 PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort)
825 {
826   PC_GASM *osm = (PC_GASM*)pc->data;
827 
828   PetscFunctionBegin;
829   osm->sort_indices = doSort;
830   PetscFunctionReturn(0);
831 }
832 EXTERN_C_END
833 
834 EXTERN_C_BEGIN
835 #undef __FUNCT__
836 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
837 /*
838    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
839         In particular, it would upset the global subdomain number calculation.
840 */
841 PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
842 {
843   PC_GASM        *osm = (PC_GASM*)pc->data;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
848 
849   if (n) *n = osm->n;
850   if (first) {
851     ierr    = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
852     *first -= osm->n;
853   }
854   if (ksp) {
855     /* Assume that local solves are now different; not necessarily
856        true, though!  This flag is used only for PCView_GASM() */
857     *ksp                        = osm->ksp;
858     osm->same_subdomain_solvers = PETSC_FALSE;
859   }
860   PetscFunctionReturn(0);
861 } /* PCGASMGetSubKSP_GASM() */
862 EXTERN_C_END
863 
864 
865 #undef __FUNCT__
866 #define __FUNCT__ "PCGASMSetSubdomains"
867 /*@C
868     PCGASMSetSubdomains - Sets the subdomains for this processor
869     for the additive Schwarz preconditioner.
870 
871     Collective on PC
872 
873     Input Parameters:
874 +   pc  - the preconditioner context
875 .   n   - the number of subdomains for this processor
876 .   iis - the index sets that define this processor's local inner subdomains
877          (or PETSC_NULL for PETSc to determine subdomains)
878 -   ois- the index sets that define this processor's local outer subdomains
879          (or PETSC_NULL to use the same as iis)
880 
881     Notes:
882     The IS indices use the parallel, global numbering of the vector entries.
883     Inner subdomains are those where the correction is applied.
884     Outer subdomains are those where the residual necessary to obtain the
885     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
886     Both inner and outer subdomains can extend over several processors.
887     This processor's portion of a subdomain is known as a local subdomain.
888 
889     By default the GASM preconditioner uses 1 (local) subdomain per processor.
890     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
891     all processors that PCGASM will create automatically, and to specify whether
892     they should be local or not.
893 
894 
895     Level: advanced
896 
897 .keywords: PC, GASM, set, subdomains, additive Schwarz
898 
899 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
900           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
901 @*/
902 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
903 {
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
908   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "PCGASMSetTotalSubdomains"
914 /*@C
915     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
916     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
917     communicator. Either all or no processors in the PC communicator must call this routine with
918     the same N.  The subdomains will be created automatically during PCSetUp().
919 
920     Collective on PC
921 
922     Input Parameters:
923 +   pc           - the preconditioner context
924 .   N            - the total number of subdomains cumulative across all processors
925 -   create_local - whether the subdomains to be created are to be local
926 
927     Options Database Key:
928     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
929 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
930 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
931 
932     By default the GASM preconditioner uses 1 subdomain per processor.
933 
934 
935     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
936     of subdomains per processor.
937 
938     Level: advanced
939 
940 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
941 
942 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
943           PCGASMCreateSubdomains2D()
944 @*/
945 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
946 {
947   PetscErrorCode ierr;
948 
949   PetscFunctionBegin;
950   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
951   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
952   PetscFunctionReturn(0);
953 }
954 
955 #undef __FUNCT__
956 #define __FUNCT__ "PCGASMSetOverlap"
957 /*@
958     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
959     additive Schwarz preconditioner.  Either all or no processors in the
960     PC communicator must call this routine.
961 
962     Logically Collective on PC
963 
964     Input Parameters:
965 +   pc  - the preconditioner context
966 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
967 
968     Options Database Key:
969 .   -pc_gasm_overlap <overlap> - Sets overlap
970 
971     Notes:
972     By default the GASM preconditioner uses 1 subdomain per processor.  To use
973     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
974     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
975 
976     The overlap defaults to 1, so if one desires that no additional
977     overlap be computed beyond what may have been set with a call to
978     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
979     must be set to be 0.  In particular, if one does not explicitly set
980     the subdomains in application code, then all overlap would be computed
981     internally by PETSc, and using an overlap of 0 would result in an GASM
982     variant that is equivalent to the block Jacobi preconditioner.
983 
984     Note that one can define initial index sets with any overlap via
985     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
986     PETSc to extend that overlap further, if desired.
987 
988     Level: intermediate
989 
990 .keywords: PC, GASM, set, overlap
991 
992 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
993           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
994 @*/
995 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
996 {
997   PetscErrorCode ierr;
998 
999   PetscFunctionBegin;
1000   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1001   PetscValidLogicalCollectiveInt(pc,ovl,2);
1002   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 #undef __FUNCT__
1007 #define __FUNCT__ "PCGASMSetType"
1008 /*@
1009     PCGASMSetType - Sets the type of restriction and interpolation used
1010     for local problems in the additive Schwarz method.
1011 
1012     Logically Collective on PC
1013 
1014     Input Parameters:
1015 +   pc  - the preconditioner context
1016 -   type - variant of GASM, one of
1017 .vb
1018       PC_GASM_BASIC       - full interpolation and restriction
1019       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1020       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1021       PC_GASM_NONE        - local processor restriction and interpolation
1022 .ve
1023 
1024     Options Database Key:
1025 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1026 
1027     Level: intermediate
1028 
1029 .keywords: PC, GASM, set, type
1030 
1031 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1032           PCGASMCreateSubdomains2D()
1033 @*/
1034 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1035 {
1036   PetscErrorCode ierr;
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1040   PetscValidLogicalCollectiveEnum(pc,type,2);
1041   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1042   PetscFunctionReturn(0);
1043 }
1044 
1045 #undef __FUNCT__
1046 #define __FUNCT__ "PCGASMSetSortIndices"
1047 /*@
1048     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1049 
1050     Logically Collective on PC
1051 
1052     Input Parameters:
1053 +   pc  - the preconditioner context
1054 -   doSort - sort the subdomain indices
1055 
1056     Level: intermediate
1057 
1058 .keywords: PC, GASM, set, type
1059 
1060 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1061           PCGASMCreateSubdomains2D()
1062 @*/
1063 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool doSort)
1064 {
1065   PetscErrorCode ierr;
1066 
1067   PetscFunctionBegin;
1068   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1069   PetscValidLogicalCollectiveBool(pc,doSort,2);
1070   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "PCGASMGetSubKSP"
1076 /*@C
1077    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1078    this processor.
1079 
1080    Collective on PC iff first_local is requested
1081 
1082    Input Parameter:
1083 .  pc - the preconditioner context
1084 
1085    Output Parameters:
1086 +  n_local - the number of blocks on this processor or PETSC_NULL
1087 .  first_local - the global number of the first block on this processor or PETSC_NULL,
1088                  all processors must request or all must pass PETSC_NULL
1089 -  ksp - the array of KSP contexts
1090 
1091    Note:
1092    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1093 
1094    Currently for some matrix implementations only 1 block per processor
1095    is supported.
1096 
1097    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1098 
1099    Level: advanced
1100 
1101 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1102 
1103 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1104           PCGASMCreateSubdomains2D(),
1105 @*/
1106 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1107 {
1108   PetscErrorCode ierr;
1109 
1110   PetscFunctionBegin;
1111   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1112   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 /* -------------------------------------------------------------------------------------*/
1117 /*MC
1118    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1119            its own KSP object.
1120 
1121    Options Database Keys:
1122 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1123 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1124 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1125 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1126 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1127 
1128      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1129       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1130       -pc_gasm_type basic to use the standard GASM.
1131 
1132    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1133      than one processor. Defaults to one block per processor.
1134 
1135      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1136         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1137 
1138      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1139          and set the options directly on the resulting KSP object (you can access its PC
1140          with KSPGetPC())
1141 
1142 
1143    Level: beginner
1144 
1145    Concepts: additive Schwarz method
1146 
1147     References:
1148     An additive variant of the Schwarz alternating method for the case of many subregions
1149     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1150 
1151     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1152     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1153 
1154 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1155            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1156            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1157 
1158 M*/
1159 
1160 EXTERN_C_BEGIN
1161 #undef __FUNCT__
1162 #define __FUNCT__ "PCCreate_GASM"
1163 PetscErrorCode  PCCreate_GASM(PC pc)
1164 {
1165   PetscErrorCode ierr;
1166   PC_GASM        *osm;
1167 
1168   PetscFunctionBegin;
1169   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
1170 
1171   osm->N                      = PETSC_DECIDE;
1172   osm->n                      = PETSC_DECIDE;
1173   osm->nmax                   = 0;
1174   osm->overlap                = 1;
1175   osm->ksp                    = 0;
1176   osm->gorestriction          = 0;
1177   osm->girestriction          = 0;
1178   osm->gx                     = 0;
1179   osm->gy                     = 0;
1180   osm->x                      = 0;
1181   osm->y                      = 0;
1182   osm->ois                    = 0;
1183   osm->iis                    = 0;
1184   osm->pmat                   = 0;
1185   osm->type                   = PC_GASM_RESTRICT;
1186   osm->same_subdomain_solvers = PETSC_TRUE;
1187   osm->sort_indices           = PETSC_TRUE;
1188 
1189   pc->data                 = (void*)osm;
1190   pc->ops->apply           = PCApply_GASM;
1191   pc->ops->applytranspose  = PCApplyTranspose_GASM;
1192   pc->ops->setup           = PCSetUp_GASM;
1193   pc->ops->reset           = PCReset_GASM;
1194   pc->ops->destroy         = PCDestroy_GASM;
1195   pc->ops->setfromoptions  = PCSetFromOptions_GASM;
1196   pc->ops->setuponblocks   = PCSetUpOnBlocks_GASM;
1197   pc->ops->view            = PCView_GASM;
1198   pc->ops->applyrichardson = 0;
1199 
1200   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM",
1201                                            PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1202   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1203                                            PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1204   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1205                                            PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1206   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1207                                            PCGASMSetType_GASM);CHKERRQ(ierr);
1208   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1209                                            PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1210   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1211                                            PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1212   PetscFunctionReturn(0);
1213 }
1214 EXTERN_C_END
1215 
1216 
1217 #undef __FUNCT__
1218 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1219 /*@C
1220    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1221    Schwarz preconditioner for a any problem based on its matrix.
1222 
1223    Collective
1224 
1225    Input Parameters:
1226 +  A       - The global matrix operator
1227 .  overlap - amount of overlap in outer subdomains
1228 -  n       - the number of local subdomains
1229 
1230    Output Parameters:
1231 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1232 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1233 
1234    Level: advanced
1235 
1236    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1237          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1238          nonzero overlap with PCGASMSetOverlap()
1239 
1240     In the Fortran version you must provide the array outis[] already allocated of length n.
1241 
1242 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1243 
1244 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1245 @*/
1246 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS *iis[], IS *ois[])
1247 {
1248   MatPartitioning mpart;
1249   const char      *prefix;
1250   PetscErrorCode  (*f)(Mat,MatReuse,Mat*);
1251   PetscMPIInt     size;
1252   PetscInt        i,j,rstart,rend,bs;
1253   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1254   Mat             Ad     = PETSC_NULL, adj;
1255   IS              ispart,isnumb,*is;
1256   PetscErrorCode  ierr;
1257 
1258   PetscFunctionBegin;
1259   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1260   PetscValidPointer(iis,4);
1261   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1262 
1263   /* Get prefix, row distribution, and block size */
1264   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1265   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1266   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1267   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1268 
1269   /* Get diagonal block from matrix if possible */
1270   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1271   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1272   if (f) {
1273     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1274   } else if (size == 1) {
1275     Ad = A;
1276   }
1277   if (Ad) {
1278     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1279     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1280   }
1281   if (Ad && n > 1) {
1282     PetscBool  match,done;
1283     /* Try to setup a good matrix partitioning if available */
1284     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1285     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1286     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1287     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1288     if (!match) {
1289       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1290     }
1291     if (!match) { /* assume a "good" partitioner is available */
1292       PetscInt       na;
1293       const PetscInt *ia,*ja;
1294       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1295       if (done) {
1296         /* Build adjacency matrix by hand. Unfortunately a call to
1297            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1298            remove the block-aij structure and we cannot expect
1299            MatPartitioning to split vertices as we need */
1300         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1301         const PetscInt *row;
1302         nnz = 0;
1303         for (i=0; i<na; i++) { /* count number of nonzeros */
1304           len = ia[i+1] - ia[i];
1305           row = ja + ia[i];
1306           for (j=0; j<len; j++) {
1307             if (row[j] == i) { /* don't count diagonal */
1308               len--; break;
1309             }
1310           }
1311           nnz += len;
1312         }
1313         ierr   = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1314         ierr   = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1315         nnz    = 0;
1316         iia[0] = 0;
1317         for (i=0; i<na; i++) { /* fill adjacency */
1318           cnt = 0;
1319           len = ia[i+1] - ia[i];
1320           row = ja + ia[i];
1321           for (j=0; j<len; j++) {
1322             if (row[j] != i) jja[nnz+cnt++] = row[j]; /* if not diagonal */
1323           }
1324           nnz += cnt;
1325           iia[i+1] = nnz;
1326         }
1327         /* Partitioning of the adjacency matrix */
1328         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1329         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1330         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1331         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1332         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1333         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1334         foundpart = PETSC_TRUE;
1335       }
1336       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1337     }
1338     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1339   }
1340   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1341   if (!foundpart) {
1342 
1343     /* Partitioning by contiguous chunks of rows */
1344 
1345     PetscInt mbs   = (rend-rstart)/bs;
1346     PetscInt start = rstart;
1347     for (i=0; i<n; i++) {
1348       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1349       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1350       start += count;
1351     }
1352 
1353   } else {
1354 
1355     /* Partitioning by adjacency of diagonal block  */
1356 
1357     const PetscInt *numbering;
1358     PetscInt       *count,nidx,*indices,*newidx,start=0;
1359     /* Get node count in each partition */
1360     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1361     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1362     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1363       for (i=0; i<n; i++) count[i] *= bs;
1364     }
1365     /* Build indices from node numbering */
1366     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1367     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1368     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1369     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1370     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1371     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1372     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1373       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1374       for (i=0; i<nidx; i++) {
1375         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1376       }
1377       ierr    = PetscFree(indices);CHKERRQ(ierr);
1378       nidx   *= bs;
1379       indices = newidx;
1380     }
1381     /* Shift to get global indices */
1382     for (i=0; i<nidx; i++) indices[i] += rstart;
1383 
1384     /* Build the index sets for each block */
1385     for (i=0; i<n; i++) {
1386       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1387       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1388       start += count[i];
1389     }
1390 
1391     ierr = PetscFree(count);
1392     ierr = PetscFree(indices);
1393     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1394     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1395   }
1396   *iis = is;
1397   if (!ois) PetscFunctionReturn(0);
1398   /*
1399    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1400    has been requested, copy the inner subdomains over so they can be modified.
1401    */
1402   ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr);
1403   for (i=0; i<n; ++i) {
1404     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1405       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1406       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1407     } else {
1408       ierr      = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1409       (*ois)[i] = (*iis)[i];
1410     }
1411   }
1412   if (overlap > 0) {
1413     /* Extend the "overlapping" regions by a number of steps */
1414     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1415   }
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 #undef __FUNCT__
1420 #define __FUNCT__ "PCGASMDestroySubdomains"
1421 /*@C
1422    PCGASMDestroySubdomains - Destroys the index sets created with
1423    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1424    called after setting subdomains with PCGASMSetSubdomains().
1425 
1426    Collective
1427 
1428    Input Parameters:
1429 +  n   - the number of index sets
1430 .  iis - the array of inner subdomains,
1431 -  ois - the array of outer subdomains, can be PETSC_NULL
1432 
1433    Level: intermediate
1434 
1435    Notes: this is merely a convenience subroutine that walks each list,
1436    destroys each IS on the list, and then frees the list.
1437 
1438 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1439 
1440 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1441 @*/
1442 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1443 {
1444   PetscInt       i;
1445   PetscErrorCode ierr;
1446 
1447   PetscFunctionBegin;
1448   if (n <= 0) PetscFunctionReturn(0);
1449   if (iis) {
1450     PetscValidPointer(iis,2);
1451     for (i=0; i<n; i++) {
1452       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1453     }
1454     ierr = PetscFree(iis);CHKERRQ(ierr);
1455   }
1456   if (ois) {
1457     for (i=0; i<n; i++) {
1458       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1459     }
1460     ierr = PetscFree(ois);CHKERRQ(ierr);
1461   }
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 
1466 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1467   {                                                                                                       \
1468     PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1469     /*                                                                                                    \
1470      Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1471      of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1472      subdomain).                                                                                          \
1473      Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1474      of the intersection.                                                                                 \
1475     */                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             \
1476     /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1477     *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1478     /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1479     *xleft_loc = *ylow_loc==first_row ? PetscMax(first%M,xleft) : xleft;                                                                            \
1480     /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1481     *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1482     /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1483     *xright_loc = *yhigh_loc==last_row ? PetscMin(xright,last%M) : xright;                                                                          \
1484     /* Now compute the size of the local subdomain n. */ \
1485     *n = 0;                                               \
1486     if (*ylow_loc < *yhigh_loc) {                           \
1487       PetscInt width = xright-xleft;                     \
1488       *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1489       *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1490       *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1491     } \
1492   }
1493 
1494 
1495 
1496 #undef __FUNCT__
1497 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1498 /*@
1499    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1500    preconditioner for a two-dimensional problem on a regular grid.
1501 
1502    Collective
1503 
1504    Input Parameters:
1505 +  M, N               - the global number of grid points in the x and y directions
1506 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1507 .  dof                - degrees of freedom per node
1508 -  overlap            - overlap in mesh lines
1509 
1510    Output Parameters:
1511 +  Nsub - the number of local subdomains created
1512 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1513 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1514 
1515 
1516    Level: advanced
1517 
1518 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1519 
1520 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1521           PCGASMSetOverlap()
1522 @*/
1523 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1524 {
1525   PetscErrorCode ierr;
1526   PetscMPIInt    size, rank;
1527   PetscInt       i, j;
1528   PetscInt       maxheight, maxwidth;
1529   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1530   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1531   PetscInt       x[2][2], y[2][2], n[2];
1532   PetscInt       first, last;
1533   PetscInt       nidx, *idx;
1534   PetscInt       ii,jj,s,q,d;
1535   PetscInt       k,kk;
1536   PetscMPIInt    color;
1537   MPI_Comm       comm, subcomm;
1538   IS             **xis = 0, **is = ois, **is_local = iis;
1539 
1540   PetscFunctionBegin;
1541   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1542   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1543   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1544   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1545   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1546                                       "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1547 
1548   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1549   s      = 0;
1550   ystart = 0;
1551   for (j=0; j<Ndomains; ++j) {
1552     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1553     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1554     /* Vertical domain limits with an overlap. */
1555     ylow   = PetscMax(ystart - overlap,0);
1556     yhigh  = PetscMin(ystart + maxheight + overlap,N);
1557     xstart = 0;
1558     for (i=0; i<Mdomains; ++i) {
1559       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1560       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1561       /* Horizontal domain limits with an overlap. */
1562       xleft  = PetscMax(xstart - overlap,0);
1563       xright = PetscMin(xstart + maxwidth + overlap,M);
1564       /*
1565          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1566       */
1567       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1568       if (nidx) ++s;
1569       xstart += maxwidth;
1570     } /* for (i = 0; i < Mdomains; ++i) */
1571     ystart += maxheight;
1572   } /* for (j = 0; j < Ndomains; ++j) */
1573 
1574   /* Now we can allocate the necessary number of ISs. */
1575   *nsub  = s;
1576   ierr   = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1577   ierr   = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1578   s      = 0;
1579   ystart = 0;
1580   for (j=0; j<Ndomains; ++j) {
1581     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1582     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1583     /* Vertical domain limits with an overlap. */
1584     y[0][0] = PetscMax(ystart - overlap,0);
1585     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1586     /* Vertical domain limits without an overlap. */
1587     y[1][0] = ystart;
1588     y[1][1] = PetscMin(ystart + maxheight,N);
1589     xstart  = 0;
1590     for (i=0; i<Mdomains; ++i) {
1591       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1592       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1593       /* Horizontal domain limits with an overlap. */
1594       x[0][0] = PetscMax(xstart - overlap,0);
1595       x[0][1] = PetscMin(xstart + maxwidth + overlap,M);
1596       /* Horizontal domain limits without an overlap. */
1597       x[1][0] = xstart;
1598       x[1][1] = PetscMin(xstart+maxwidth,M);
1599       /*
1600          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1601          Do this twice: first for the domains with overlaps, and once without.
1602          During the first pass create the subcommunicators, and use them on the second pass as well.
1603       */
1604       for (q = 0; q < 2; ++q) {
1605         /*
1606           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1607           according to whether the domain with an overlap or without is considered.
1608         */
1609         xleft = x[q][0]; xright = x[q][1];
1610         ylow  = y[q][0]; yhigh  = y[q][1];
1611         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1612         nidx *= dof;
1613         n[q]  = nidx;
1614         /*
1615          Based on the counted number of indices in the local domain *with an overlap*,
1616          construct a subcommunicator of all the processors supporting this domain.
1617          Observe that a domain with an overlap might have nontrivial local support,
1618          while the domain without an overlap might not.  Hence, the decision to participate
1619          in the subcommunicator must be based on the domain with an overlap.
1620          */
1621         if (q == 0) {
1622           if (nidx) color = 1;
1623           else color = MPI_UNDEFINED;
1624 
1625           ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1626         }
1627         /*
1628          Proceed only if the number of local indices *with an overlap* is nonzero.
1629          */
1630         if (n[0]) {
1631           if (q == 0) xis = is;
1632           if (q == 1) {
1633             /*
1634              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1635              Moreover, if the overlap is zero, the two ISs are identical.
1636              */
1637             if (overlap == 0) {
1638               (*is_local)[s] = (*is)[s];
1639               ierr           = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1640               continue;
1641             } else {
1642               xis     = is_local;
1643               subcomm = ((PetscObject)(*is)[s])->comm;
1644             }
1645           } /* if (q == 1) */
1646           idx  = PETSC_NULL;
1647           ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1648           if (nidx) {
1649             k = 0;
1650             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1651               PetscInt x0 = (jj==ylow_loc) ? xleft_loc : xleft;
1652               PetscInt x1 = (jj==yhigh_loc-1) ? xright_loc : xright;
1653               kk = dof*(M*jj + x0);
1654               for (ii=x0; ii<x1; ++ii) {
1655                 for (d = 0; d < dof; ++d) {
1656                   idx[k++] = kk++;
1657                 }
1658               }
1659             }
1660           }
1661           ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1662         }/* if (n[0]) */
1663       }/* for (q = 0; q < 2; ++q) */
1664       if (n[0]) ++s;
1665       xstart += maxwidth;
1666     } /* for (i = 0; i < Mdomains; ++i) */
1667     ystart += maxheight;
1668   } /* for (j = 0; j < Ndomains; ++j) */
1669   PetscFunctionReturn(0);
1670 }
1671 
1672 #undef __FUNCT__
1673 #define __FUNCT__ "PCGASMGetSubdomains"
1674 /*@C
1675     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1676     for the additive Schwarz preconditioner.
1677 
1678     Not Collective
1679 
1680     Input Parameter:
1681 .   pc - the preconditioner context
1682 
1683     Output Parameters:
1684 +   n   - the number of subdomains for this processor (default value = 1)
1685 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be PETSC_NULL)
1686 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be PETSC_NULL)
1687 
1688 
1689     Notes:
1690     The user is responsible for destroying the ISs and freeing the returned arrays.
1691     The IS numbering is in the parallel, global numbering of the vector.
1692 
1693     Level: advanced
1694 
1695 .keywords: PC, GASM, get, subdomains, additive Schwarz
1696 
1697 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1698           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1699 @*/
1700 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1701 {
1702   PC_GASM        *osm;
1703   PetscErrorCode ierr;
1704   PetscBool      match;
1705   PetscInt       i;
1706 
1707   PetscFunctionBegin;
1708   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1709   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1710   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1711   osm = (PC_GASM*)pc->data;
1712   if (n) *n = osm->n;
1713   if (iis) {
1714     ierr = PetscMalloc(osm->n*sizeof(IS), iis);CHKERRQ(ierr);
1715   }
1716   if (ois) {
1717     ierr = PetscMalloc(osm->n*sizeof(IS), ois);CHKERRQ(ierr);
1718   }
1719   if (iis || ois) {
1720     for (i = 0; i < osm->n; ++i) {
1721       if (iis) (*iis)[i] = osm->iis[i];
1722       if (ois) (*ois)[i] = osm->ois[i];
1723     }
1724   }
1725   PetscFunctionReturn(0);
1726 }
1727 
1728 #undef __FUNCT__
1729 #define __FUNCT__ "PCGASMGetSubmatrices"
1730 /*@C
1731     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1732     only) for the additive Schwarz preconditioner.
1733 
1734     Not Collective
1735 
1736     Input Parameter:
1737 .   pc - the preconditioner context
1738 
1739     Output Parameters:
1740 +   n   - the number of matrices for this processor (default value = 1)
1741 -   mat - the matrices
1742 
1743     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1744            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1745            subdomains were defined using PCGASMSetTotalSubdomains().
1746     Level: advanced
1747 
1748 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1749 
1750 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1751           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1752 @*/
1753 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1754 {
1755   PC_GASM        *osm;
1756   PetscErrorCode ierr;
1757   PetscBool      match;
1758 
1759   PetscFunctionBegin;
1760   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1761   PetscValidIntPointer(n,2);
1762   if (mat) PetscValidPointer(mat,3);
1763   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1764   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1765   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1766   osm = (PC_GASM*)pc->data;
1767   if (n) *n = osm->n;
1768   if (mat) *mat = osm->pmat;
1769   PetscFunctionReturn(0);
1770 }
1771