xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision a6dfd86ebdf75fa024070af26d51b62fd16650a3)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 
13 typedef struct {
14   PetscInt   N,n,nmax;
15   PetscInt   overlap;             /* overlap requested by user */
16   KSP        *ksp;                /* linear solvers for each block */
17   Vec        gx,gy;               /* Merged work vectors */
18   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
19   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
20   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
21   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
22   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
23   Mat        *pmat;               /* subdomain block matrices */
24   PCGASMType type;                /* use reduced interpolation, restriction or both */
25   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
26   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
27   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
28   PetscBool  sort_indices;           /* flag to sort subdomain indices */
29 } PC_GASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCGASMSubdomainView_Private"
33 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
34 {
35   PC_GASM        *osm  = (PC_GASM*)pc->data;
36   PetscInt       j,nidx;
37   const PetscInt *idx;
38   PetscViewer    sviewer;
39   char           *cidx;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   if (i < 0 || i > osm->n) SETERRQ2(((PetscObject)viewer)->comm, PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
44   /* Inner subdomains. */
45   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
46   /*
47    No more than 15 characters per index plus a space.
48    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
49    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
50    For nidx == 0, the whole string 16 '\0'.
51    */
52   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
53   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
54   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
55   for (j = 0; j < nidx; ++j) {
56     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
57   }
58   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
59   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
60   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
61   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
63   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
64   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
66   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
67   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68   ierr = PetscFree(cidx);CHKERRQ(ierr);
69   /* Outer subdomains. */
70   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
71   /*
72    No more than 15 characters per index plus a space.
73    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
74    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
75    For nidx == 0, the whole string 16 '\0'.
76    */
77   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
78   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
79   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
80   for (j = 0; j < nidx; ++j) {
81     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
82   }
83   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
84   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
85   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
86   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
87   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
88   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
89   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
91   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
92   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
93   ierr = PetscFree(cidx);CHKERRQ(ierr);
94 
95   PetscFunctionReturn(0);
96 }
97 
98 #undef __FUNCT__
99 #define __FUNCT__ "PCGASMPrintSubdomains"
100 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
101 {
102   PC_GASM        *osm  = (PC_GASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
106   PetscBool      found;
107   PetscViewer    viewer, sviewer = PETSC_NULL;
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
112   for (i = 0; i < osm->n; ++i) permutation[i] = i;
113   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
114   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
117   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
119   /*
120    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
121    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
122   */
123   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
124   l = 0;
125   for (count = 0; count < gcount; ++count) {
126     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
127     if (l<osm->n) {
128       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
129       if (numbering[d] == count) {
130         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
131         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
132         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
133         ++l;
134       }
135     }
136     ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
137   }
138   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
139   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "PCView_GASM"
146 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
147 {
148   PC_GASM        *osm = (PC_GASM*)pc->data;
149   const char     *prefix;
150   PetscErrorCode ierr;
151   PetscMPIInt    rank, size;
152   PetscInt       i,bsz;
153   PetscBool      iascii,view_subdomains=PETSC_FALSE;
154   PetscViewer    sviewer;
155   PetscInt       count, l, gcount, *numbering, *permutation;
156   char overlap[256]     = "user-defined overlap";
157   char gsubdomains[256] = "unknown total number of subdomains";
158   char lsubdomains[256] = "unknown number of local  subdomains";
159   char msubdomains[256] = "unknown max number of local subdomains";
160   PetscFunctionBegin;
161   ierr = MPI_Comm_size(((PetscObject)pc)->comm, &size);CHKERRQ(ierr);
162   ierr = MPI_Comm_rank(((PetscObject)pc)->comm, &rank);CHKERRQ(ierr);
163 
164 
165   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
166   for (i = 0; i < osm->n; ++i) permutation[i] = i;
167   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
168   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
169 
170   if (osm->overlap >= 0) {
171     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
172   }
173   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
174   if (osm->N > 0) {
175     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
176   }
177   if (osm->nmax > 0) {
178     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
179   }
180 
181   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
182   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,PETSC_NULL);CHKERRQ(ierr);
183 
184   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
185   if (iascii) {
186     /*
187      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
188      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
189      collectively on the comm.
190      */
191     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
197     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
198     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr);
200     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
201     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
202     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
203     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
204     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
205     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
206     /* Make sure that everybody waits for the banner to be printed. */
207     ierr = MPI_Barrier(((PetscObject)viewer)->comm);CHKERRQ(ierr);
208     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
209     l = 0;
210     for (count = 0; count < gcount; ++count) {
211       PetscMPIInt srank, ssize;
212       if (l<osm->n) {
213         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
214         if (numbering[d] == count) {
215           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
216           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
217           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
218           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
219           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr);
221           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
222           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
223           if (view_subdomains) {
224             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
225           }
226           if (!pc->setupcalled) {
227             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
228           }
229           else {
230             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
231           }
232           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
233           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
234           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
235           ++l;
236         }
237       }
238       ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
239     }
240     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
241   } else {
242     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
243   }
244   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
245   PetscFunctionReturn(0);
246 }
247 
248 
249 
250 
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "PCSetUp_GASM"
254 static PetscErrorCode PCSetUp_GASM(PC pc)
255 {
256   PC_GASM         *osm  = (PC_GASM*)pc->data;
257   PetscErrorCode ierr;
258   PetscBool      symset,flg;
259   PetscInt       i;
260   PetscMPIInt    rank, size;
261   MatReuse       scall = MAT_REUSE_MATRIX;
262   KSP            ksp;
263   PC             subpc;
264   const char     *prefix,*pprefix;
265   Vec            x,y;
266   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
267   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
268   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
269   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
270   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
271   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
272   PetscScalar    *gxarray, *gyarray;
273   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy --
274                              over the disjoint union of outer subdomains. */
275   DM             *subdomain_dm = PETSC_NULL;
276 
277   PetscFunctionBegin;
278   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
279   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
280   if (!pc->setupcalled) {
281 
282     if (!osm->type_set) {
283       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
284       if (symset && flg) { osm->type = PC_GASM_BASIC; }
285     }
286 
287     /*
288      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
289      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
290      */
291     if (osm->n == PETSC_DECIDE) {
292       /* no subdomains given */
293       /* try pc->dm first */
294       if (pc->dm) {
295         char      ddm_name[1024];
296         DM        ddm;
297         PetscBool flg;
298         PetscInt     num_subdomains, d;
299         char         **subdomain_names;
300         IS           *inner_subdomain_is, *outer_subdomain_is;
301         /* Allow the user to request a decomposition DM by name */
302         ierr = PetscStrncpy(ddm_name, "", 1024);CHKERRQ(ierr);
303         ierr = PetscOptionsString("-pc_gasm_decomposition","Name of the DM defining the decomposition", "PCSetDM",ddm_name,ddm_name,1024,&flg);CHKERRQ(ierr);
304         if (flg) {
305           ierr = DMCreateDomainDecompositionDM(pc->dm, ddm_name, &ddm);CHKERRQ(ierr);
306           if (!ddm) {
307             SETERRQ1(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Uknown DM decomposition name %s", ddm_name);
308           }
309           ierr = PetscInfo(pc,"Using decomposition DM defined using options database\n");CHKERRQ(ierr);
310           ierr = PCSetDM(pc,ddm);CHKERRQ(ierr);
311         }
312         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
313         if (num_subdomains) {
314           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
315         }
316         for (d = 0; d < num_subdomains; ++d) {
317           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
318           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
319           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
320         }
321         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
322         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
323         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
324       }
325       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
326         osm->nmax = osm->n = 1;
327         ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
328         osm->N = size;
329       }
330     }
331     if (!osm->iis) {
332       /*
333        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
334        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
335        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
336        */
337       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
338     }
339     if (osm->N == PETSC_DECIDE) {
340       struct {PetscInt max,sum;} inwork,outwork;
341       /* determine global number of subdomains and the max number of local subdomains */
342       inwork.max = osm->n;
343       inwork.sum = osm->n;
344       ierr = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
345       osm->nmax = outwork.max;
346       osm->N    = outwork.sum;
347     }
348 
349     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
350     flg  = PETSC_FALSE;
351     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
352     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
353 
354     if (osm->sort_indices) {
355       for (i=0; i<osm->n; i++) {
356         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
357         ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
358       }
359     }
360     /*
361      Merge the ISs, create merged vectors and restrictions.
362      */
363     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
364     on = 0;
365     for (i=0; i<osm->n; i++) {
366       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
367       on += oni;
368     }
369     ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr);
370     on = 0;
371     for (i=0; i<osm->n; i++) {
372       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
373       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
374       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
375       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
376       on += oni;
377     }
378     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
379     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
380     ierr = VecCreateMPI(((PetscObject)pc)->comm, on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
381     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
382     ierr = VecGetOwnershipRange(osm->gx, &gofirst, PETSC_NULL);CHKERRQ(ierr);
383     ierr = ISCreateStride(((PetscObject)pc)->comm,on,gofirst,1, &goid);CHKERRQ(ierr);
384     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
385     ierr = VecDestroy(&x);CHKERRQ(ierr);
386     ierr = ISDestroy(&gois);CHKERRQ(ierr);
387     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
388     { PetscInt       ini;     /* Number of indices the i-th a local inner subdomain. */
389       PetscInt       in;      /* Number of indices in the disjoint uniont of local inner subdomains. */
390       PetscInt       *iidx;   /* Global indices in the merged local inner subdomain. */
391       PetscInt       *ioidx;  /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
392       IS             giis;    /* IS for the disjoint union of inner subdomains. */
393       IS             giois;   /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
394       /**/
395       in = 0;
396       for (i=0; i<osm->n; i++) {
397         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
398         in += ini;
399       }
400       ierr = PetscMalloc(in*sizeof(PetscInt), &iidx);CHKERRQ(ierr);
401       ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr);
402       ierr = VecGetOwnershipRange(osm->gx,&gofirst, PETSC_NULL);CHKERRQ(ierr);
403       in = 0;
404       on = 0;
405       for (i=0; i<osm->n; i++) {
406         const PetscInt *iidxi;        /* Global indices of the i-th local inner subdomain. */
407         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
408         PetscInt       *ioidxi;       /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
409         PetscInt       ioni;          /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
410         PetscInt       k;
411         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
412         ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
413         ierr = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
414         ierr = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
415         ierr = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
416         ioidxi = ioidx+in;
417         ierr = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
418         ierr = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
419         ierr = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
420         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
421         for (k = 0; k < ini; ++k) {
422           ioidxi[k] += gofirst+on;
423         }
424         in += ini;
425         on += oni;
426       }
427       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
428       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
429       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
430       ierr = VecDestroy(&y);CHKERRQ(ierr);
431       ierr = ISDestroy(&giis);CHKERRQ(ierr);
432       ierr = ISDestroy(&giois);CHKERRQ(ierr);
433     }
434     ierr = ISDestroy(&goid);CHKERRQ(ierr);
435     /* Create the subdomain work vectors. */
436     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
437     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
438     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
439     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
440     for (i=0, on=0; i<osm->n; ++i, on += oni) {
441       PetscInt oNi;
442       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
443       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
444       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
445       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
446     }
447     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
448     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
449     /* Create the local solvers */
450     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
451     for (i=0; i<osm->n; i++) { /* KSPs are local */
452       ierr = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
453       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
454       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
455       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
456       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
457       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
458       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
459       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
460       osm->ksp[i] = ksp;
461     }
462     scall = MAT_INITIAL_MATRIX;
463 
464   }/*if (!pc->setupcalled)*/
465   else {
466     /*
467        Destroy the blocks from the previous iteration
468     */
469     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
470       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
471       scall = MAT_INITIAL_MATRIX;
472     }
473   }
474 
475   /*
476      Extract out the submatrices.
477   */
478   if (size > 1) {
479     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
480   }
481   else {
482     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
483   }
484   if (scall == MAT_INITIAL_MATRIX) {
485     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
486     for (i=0; i<osm->n; i++) {
487       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
488       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
489     }
490   }
491 
492   /* Return control to the user so that the submatrices can be modified (e.g., to apply
493      different boundary conditions for the submatrices than for the global problem) */
494   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
495 
496   /*
497      Loop over submatrices putting them into local ksps
498   */
499   for (i=0; i<osm->n; i++) {
500     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
501     if (!pc->setupcalled) {
502       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
503     }
504   }
505 
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
511 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
512 {
513   PC_GASM         *osm = (PC_GASM*)pc->data;
514   PetscErrorCode ierr;
515   PetscInt       i;
516 
517   PetscFunctionBegin;
518   for (i=0; i<osm->n; i++) {
519     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
520   }
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "PCApply_GASM"
526 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
527 {
528   PC_GASM         *osm = (PC_GASM*)pc->data;
529   PetscErrorCode ierr;
530   PetscInt       i;
531   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
532 
533   PetscFunctionBegin;
534   /*
535      Support for limiting the restriction or interpolation only to the inner
536      subdomain values (leaving the other values 0).
537   */
538   if (!(osm->type & PC_GASM_RESTRICT)) {
539     /* have to zero the work RHS since scatter may leave some slots empty */
540     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
541     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
542   }
543   else {
544     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
545   }
546   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
547   if (!(osm->type & PC_GASM_RESTRICT)) {
548     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
549   }
550   else {
551     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
552   }
553   /* do the subdomain solves */
554   for (i=0; i<osm->n; ++i) {
555     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
556   }
557   ierr = VecZeroEntries(y);CHKERRQ(ierr);
558   if (!(osm->type & PC_GASM_INTERPOLATE)) {
559     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
560     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
561   }
562   else {
563     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
564     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
565   }
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "PCApplyTranspose_GASM"
570 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
571 {
572   PC_GASM         *osm = (PC_GASM*)pc->data;
573   PetscErrorCode ierr;
574   PetscInt       i;
575   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
576 
577   PetscFunctionBegin;
578   /*
579      Support for limiting the restriction or interpolation to only local
580      subdomain values (leaving the other values 0).
581 
582      Note: these are reversed from the PCApply_GASM() because we are applying the
583      transpose of the three terms
584   */
585   if (!(osm->type & PC_GASM_INTERPOLATE)) {
586     /* have to zero the work RHS since scatter may leave some slots empty */
587     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
588     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
589   }
590   else {
591     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
592   }
593   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
594   if (!(osm->type & PC_GASM_INTERPOLATE)) {
595     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
596   }
597   else {
598     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
599   }
600   /* do the local solves */
601   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
602     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
603   }
604   ierr = VecZeroEntries(y);CHKERRQ(ierr);
605   if (!(osm->type & PC_GASM_RESTRICT)) {
606     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
607     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
608   }
609   else {
610     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
611     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
612   }
613 
614   PetscFunctionReturn(0);
615 }
616 
617 #undef __FUNCT__
618 #define __FUNCT__ "PCReset_GASM"
619 static PetscErrorCode PCReset_GASM(PC pc)
620 {
621   PC_GASM        *osm = (PC_GASM*)pc->data;
622   PetscErrorCode ierr;
623   PetscInt       i;
624 
625   PetscFunctionBegin;
626   if (osm->ksp) {
627     for (i=0; i<osm->n; i++) {
628       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
629     }
630   }
631   if (osm->pmat) {
632     if (osm->n > 0) {
633       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
634     }
635   }
636   if (osm->x) {
637     for (i=0; i<osm->n; i++) {
638       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
639       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
640     }
641   }
642   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
643   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
644 
645   ierr = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
646   ierr = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
647   ierr = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
648   osm->ois = 0;
649   osm->iis = 0;
650   PetscFunctionReturn(0);
651 }
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "PCDestroy_GASM"
655 static PetscErrorCode PCDestroy_GASM(PC pc)
656 {
657   PC_GASM         *osm = (PC_GASM*)pc->data;
658   PetscErrorCode ierr;
659   PetscInt       i;
660 
661   PetscFunctionBegin;
662   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
663   if (osm->ksp) {
664     for (i=0; i<osm->n; i++) {
665       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
666     }
667     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
668   }
669   ierr = PetscFree(osm->x);CHKERRQ(ierr);
670   ierr = PetscFree(osm->y);CHKERRQ(ierr);
671   ierr = PetscFree(pc->data);CHKERRQ(ierr);
672   PetscFunctionReturn(0);
673 }
674 
675 #undef __FUNCT__
676 #define __FUNCT__ "PCSetFromOptions_GASM"
677 static PetscErrorCode PCSetFromOptions_GASM(PC pc)
678 {
679   PC_GASM         *osm = (PC_GASM*)pc->data;
680   PetscErrorCode ierr;
681   PetscInt       blocks,ovl;
682   PetscBool      symset,flg;
683   PCGASMType      gasmtype;
684 
685   PetscFunctionBegin;
686   /* set the type to symmetric if matrix is symmetric */
687   if (!osm->type_set && pc->pmat) {
688     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
689     if (symset && flg) { osm->type = PC_GASM_BASIC; }
690   }
691   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
692     ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
693     osm->create_local = PETSC_TRUE;
694     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,&flg);CHKERRQ(ierr);
695     if (!osm->create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No support for autocreation of nonlocal subdomains yet.");
696 
697     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr); }
698     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
699     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
700     flg  = PETSC_FALSE;
701     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
702     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
703   ierr = PetscOptionsTail();CHKERRQ(ierr);
704   PetscFunctionReturn(0);
705 }
706 
707 /*------------------------------------------------------------------------------------*/
708 
709 EXTERN_C_BEGIN
710 #undef __FUNCT__
711 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
712 PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
713 {
714   PC_GASM         *osm = (PC_GASM*)pc->data;
715   PetscErrorCode ierr;
716   PetscInt       i;
717 
718   PetscFunctionBegin;
719   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
720   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
721 
722   if (!pc->setupcalled) {
723     osm->n       = n;
724     osm->ois     = 0;
725     osm->iis     = 0;
726     if (ois) {
727       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
728     }
729     if (iis) {
730       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
731     }
732     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
733     if (ois) {
734       ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
735       for (i=0; i<n; i++) { osm->ois[i] = ois[i]; }
736       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
737       osm->overlap = -1;
738       if (!iis) {
739         ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
740         for (i=0; i<n; i++) {
741           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
742           osm->iis[i] = ois[i];
743         }
744       }
745     }
746     if (iis) {
747       ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
748       for (i=0; i<n; i++) { osm->iis[i] = iis[i]; }
749       if (!ois) {
750         ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
751         for (i=0; i<n; i++) {
752           for (i=0; i<n; i++) {
753             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
754             osm->ois[i] = iis[i];
755           }
756         }
757         if (osm->overlap > 0) {
758           /* Extend the "overlapping" regions by a number of steps */
759           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
760         }
761       }
762     }
763   }
764   PetscFunctionReturn(0);
765 }
766 EXTERN_C_END
767 
768 EXTERN_C_BEGIN
769 #undef __FUNCT__
770 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
771 PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
772 {
773   PC_GASM         *osm = (PC_GASM*)pc->data;
774   PetscErrorCode ierr;
775   PetscMPIInt    rank,size;
776   PetscInt       n;
777   PetscInt       Nmin, Nmax;
778   PetscFunctionBegin;
779   if (!create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
780   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
781   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,((PetscObject)pc)->comm);CHKERRQ(ierr);
782   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
783   if (Nmin != Nmax)
784     SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
785 
786   osm->create_local = create_local;
787   /*
788      Split the subdomains equally among all processors
789   */
790   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
791   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
792   n = N/size + ((N % size) > rank);
793   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
794   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
795   if (!pc->setupcalled) {
796     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
797     osm->N            = N;
798     osm->n            = n;
799     osm->nmax         = N/size + ((N%size)?1:0);
800     osm->ois           = 0;
801     osm->iis     = 0;
802   }
803   PetscFunctionReturn(0);
804 }
805 EXTERN_C_END
806 
807 EXTERN_C_BEGIN
808 #undef __FUNCT__
809 #define __FUNCT__ "PCGASMSetOverlap_GASM"
810 PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
811 {
812   PC_GASM *osm = (PC_GASM*)pc->data;
813 
814   PetscFunctionBegin;
815   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
816   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
817   if (!pc->setupcalled) {
818     osm->overlap = ovl;
819   }
820   PetscFunctionReturn(0);
821 }
822 EXTERN_C_END
823 
824 EXTERN_C_BEGIN
825 #undef __FUNCT__
826 #define __FUNCT__ "PCGASMSetType_GASM"
827 PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
828 {
829   PC_GASM *osm = (PC_GASM*)pc->data;
830 
831   PetscFunctionBegin;
832   osm->type     = type;
833   osm->type_set = PETSC_TRUE;
834   PetscFunctionReturn(0);
835 }
836 EXTERN_C_END
837 
838 EXTERN_C_BEGIN
839 #undef __FUNCT__
840 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
841 PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
842 {
843   PC_GASM *osm = (PC_GASM*)pc->data;
844 
845   PetscFunctionBegin;
846   osm->sort_indices = doSort;
847   PetscFunctionReturn(0);
848 }
849 EXTERN_C_END
850 
851 EXTERN_C_BEGIN
852 #undef __FUNCT__
853 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
854 /*
855    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
856         In particular, it would upset the global subdomain number calculation.
857 */
858 PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
859 {
860   PC_GASM         *osm = (PC_GASM*)pc->data;
861   PetscErrorCode ierr;
862 
863   PetscFunctionBegin;
864   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
865 
866   if (n) {
867     *n = osm->n;
868   }
869   if (first) {
870     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
871     *first -= osm->n;
872   }
873   if (ksp) {
874     /* Assume that local solves are now different; not necessarily
875        true, though!  This flag is used only for PCView_GASM() */
876     *ksp                   = osm->ksp;
877     osm->same_subdomain_solvers = PETSC_FALSE;
878   }
879   PetscFunctionReturn(0);
880 }/* PCGASMGetSubKSP_GASM() */
881 EXTERN_C_END
882 
883 
884 #undef __FUNCT__
885 #define __FUNCT__ "PCGASMSetSubdomains"
886 /*@C
887     PCGASMSetSubdomains - Sets the subdomains for this processor
888     for the additive Schwarz preconditioner.
889 
890     Collective on PC
891 
892     Input Parameters:
893 +   pc  - the preconditioner context
894 .   n   - the number of subdomains for this processor
895 .   iis - the index sets that define this processor's local inner subdomains
896          (or PETSC_NULL for PETSc to determine subdomains)
897 -   ois- the index sets that define this processor's local outer subdomains
898          (or PETSC_NULL to use the same as iis)
899 
900     Notes:
901     The IS indices use the parallel, global numbering of the vector entries.
902     Inner subdomains are those where the correction is applied.
903     Outer subdomains are those where the residual necessary to obtain the
904     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
905     Both inner and outer subdomains can extend over several processors.
906     This processor's portion of a subdomain is known as a local subdomain.
907 
908     By default the GASM preconditioner uses 1 (local) subdomain per processor.
909     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
910     all processors that PCGASM will create automatically, and to specify whether
911     they should be local or not.
912 
913 
914     Level: advanced
915 
916 .keywords: PC, GASM, set, subdomains, additive Schwarz
917 
918 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
919           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
920 @*/
921 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
922 {
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
927   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
928   PetscFunctionReturn(0);
929 }
930 
931 #undef __FUNCT__
932 #define __FUNCT__ "PCGASMSetTotalSubdomains"
933 /*@C
934     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
935     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
936     communicator. Either all or no processors in the PC communicator must call this routine with
937     the same N.  The subdomains will be created automatically during PCSetUp().
938 
939     Collective on PC
940 
941     Input Parameters:
942 +   pc           - the preconditioner context
943 .   N            - the total number of subdomains cumulative across all processors
944 -   create_local - whether the subdomains to be created are to be local
945 
946     Options Database Key:
947     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
948 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
949 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
950 
951     By default the GASM preconditioner uses 1 subdomain per processor.
952 
953 
954     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
955     of subdomains per processor.
956 
957     Level: advanced
958 
959 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
960 
961 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
962           PCGASMCreateSubdomains2D()
963 @*/
964 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
965 {
966   PetscErrorCode ierr;
967 
968   PetscFunctionBegin;
969   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
970   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "PCGASMSetOverlap"
976 /*@
977     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
978     additive Schwarz preconditioner.  Either all or no processors in the
979     PC communicator must call this routine.
980 
981     Logically Collective on PC
982 
983     Input Parameters:
984 +   pc  - the preconditioner context
985 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
986 
987     Options Database Key:
988 .   -pc_gasm_overlap <overlap> - Sets overlap
989 
990     Notes:
991     By default the GASM preconditioner uses 1 subdomain per processor.  To use
992     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
993     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
994 
995     The overlap defaults to 1, so if one desires that no additional
996     overlap be computed beyond what may have been set with a call to
997     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
998     must be set to be 0.  In particular, if one does not explicitly set
999     the subdomains in application code, then all overlap would be computed
1000     internally by PETSc, and using an overlap of 0 would result in an GASM
1001     variant that is equivalent to the block Jacobi preconditioner.
1002 
1003     Note that one can define initial index sets with any overlap via
1004     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
1005     PETSc to extend that overlap further, if desired.
1006 
1007     Level: intermediate
1008 
1009 .keywords: PC, GASM, set, overlap
1010 
1011 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1012           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
1013 @*/
1014 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
1015 {
1016   PetscErrorCode ierr;
1017 
1018   PetscFunctionBegin;
1019   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1020   PetscValidLogicalCollectiveInt(pc,ovl,2);
1021   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 #undef __FUNCT__
1026 #define __FUNCT__ "PCGASMSetType"
1027 /*@
1028     PCGASMSetType - Sets the type of restriction and interpolation used
1029     for local problems in the additive Schwarz method.
1030 
1031     Logically Collective on PC
1032 
1033     Input Parameters:
1034 +   pc  - the preconditioner context
1035 -   type - variant of GASM, one of
1036 .vb
1037       PC_GASM_BASIC       - full interpolation and restriction
1038       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1039       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1040       PC_GASM_NONE        - local processor restriction and interpolation
1041 .ve
1042 
1043     Options Database Key:
1044 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1045 
1046     Level: intermediate
1047 
1048 .keywords: PC, GASM, set, type
1049 
1050 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1051           PCGASMCreateSubdomains2D()
1052 @*/
1053 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1054 {
1055   PetscErrorCode ierr;
1056 
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1059   PetscValidLogicalCollectiveEnum(pc,type,2);
1060   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "PCGASMSetSortIndices"
1066 /*@
1067     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1068 
1069     Logically Collective on PC
1070 
1071     Input Parameters:
1072 +   pc  - the preconditioner context
1073 -   doSort - sort the subdomain indices
1074 
1075     Level: intermediate
1076 
1077 .keywords: PC, GASM, set, type
1078 
1079 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1080           PCGASMCreateSubdomains2D()
1081 @*/
1082 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool  doSort)
1083 {
1084   PetscErrorCode ierr;
1085 
1086   PetscFunctionBegin;
1087   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1088   PetscValidLogicalCollectiveBool(pc,doSort,2);
1089   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1090   PetscFunctionReturn(0);
1091 }
1092 
1093 #undef __FUNCT__
1094 #define __FUNCT__ "PCGASMGetSubKSP"
1095 /*@C
1096    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1097    this processor.
1098 
1099    Collective on PC iff first_local is requested
1100 
1101    Input Parameter:
1102 .  pc - the preconditioner context
1103 
1104    Output Parameters:
1105 +  n_local - the number of blocks on this processor or PETSC_NULL
1106 .  first_local - the global number of the first block on this processor or PETSC_NULL,
1107                  all processors must request or all must pass PETSC_NULL
1108 -  ksp - the array of KSP contexts
1109 
1110    Note:
1111    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1112 
1113    Currently for some matrix implementations only 1 block per processor
1114    is supported.
1115 
1116    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1117 
1118    Level: advanced
1119 
1120 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1121 
1122 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1123           PCGASMCreateSubdomains2D(),
1124 @*/
1125 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1126 {
1127   PetscErrorCode ierr;
1128 
1129   PetscFunctionBegin;
1130   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1131   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 /* -------------------------------------------------------------------------------------*/
1136 /*MC
1137    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1138            its own KSP object.
1139 
1140    Options Database Keys:
1141 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1142 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1143 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1144 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1145 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1146 
1147      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1148       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1149       -pc_gasm_type basic to use the standard GASM.
1150 
1151    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1152      than one processor. Defaults to one block per processor.
1153 
1154      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1155         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1156 
1157      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1158          and set the options directly on the resulting KSP object (you can access its PC
1159          with KSPGetPC())
1160 
1161 
1162    Level: beginner
1163 
1164    Concepts: additive Schwarz method
1165 
1166     References:
1167     An additive variant of the Schwarz alternating method for the case of many subregions
1168     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1169 
1170     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1171     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1172 
1173 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1174            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1175            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1176 
1177 M*/
1178 
1179 EXTERN_C_BEGIN
1180 #undef __FUNCT__
1181 #define __FUNCT__ "PCCreate_GASM"
1182 PetscErrorCode  PCCreate_GASM(PC pc)
1183 {
1184   PetscErrorCode ierr;
1185   PC_GASM         *osm;
1186 
1187   PetscFunctionBegin;
1188   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
1189   osm->N                 = PETSC_DECIDE;
1190   osm->n                 = PETSC_DECIDE;
1191   osm->nmax              = 0;
1192   osm->overlap           = 1;
1193   osm->ksp               = 0;
1194   osm->gorestriction     = 0;
1195   osm->girestriction     = 0;
1196   osm->gx                = 0;
1197   osm->gy                = 0;
1198   osm->x                 = 0;
1199   osm->y                 = 0;
1200   osm->ois               = 0;
1201   osm->iis               = 0;
1202   osm->pmat              = 0;
1203   osm->type              = PC_GASM_RESTRICT;
1204   osm->same_subdomain_solvers = PETSC_TRUE;
1205   osm->sort_indices           = PETSC_TRUE;
1206 
1207   pc->data                   = (void*)osm;
1208   pc->ops->apply             = PCApply_GASM;
1209   pc->ops->applytranspose    = PCApplyTranspose_GASM;
1210   pc->ops->setup             = PCSetUp_GASM;
1211   pc->ops->reset             = PCReset_GASM;
1212   pc->ops->destroy           = PCDestroy_GASM;
1213   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
1214   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
1215   pc->ops->view              = PCView_GASM;
1216   pc->ops->applyrichardson   = 0;
1217 
1218   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM",
1219                     PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1220   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1221                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1222   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1223                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1224   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1225                     PCGASMSetType_GASM);CHKERRQ(ierr);
1226   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1227                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1228   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1229                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1230   PetscFunctionReturn(0);
1231 }
1232 EXTERN_C_END
1233 
1234 
1235 #undef __FUNCT__
1236 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1237 /*@C
1238    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1239    Schwarz preconditioner for a any problem based on its matrix.
1240 
1241    Collective
1242 
1243    Input Parameters:
1244 +  A       - The global matrix operator
1245 .  overlap - amount of overlap in outer subdomains
1246 -  n       - the number of local subdomains
1247 
1248    Output Parameters:
1249 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1250 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1251 
1252    Level: advanced
1253 
1254    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1255          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1256          nonzero overlap with PCGASMSetOverlap()
1257 
1258     In the Fortran version you must provide the array outis[] already allocated of length n.
1259 
1260 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1261 
1262 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1263 @*/
1264 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS* iis[], IS* ois[])
1265 {
1266   MatPartitioning           mpart;
1267   const char                *prefix;
1268   PetscErrorCode            (*f)(Mat,MatReuse,Mat*);
1269   PetscMPIInt               size;
1270   PetscInt                  i,j,rstart,rend,bs;
1271   PetscBool                 isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1272   Mat                       Ad = PETSC_NULL, adj;
1273   IS                        ispart,isnumb,*is;
1274   PetscErrorCode            ierr;
1275 
1276   PetscFunctionBegin;
1277   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1278   PetscValidPointer(iis,4);
1279   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1280 
1281   /* Get prefix, row distribution, and block size */
1282   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1283   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1284   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1285   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1286 
1287   /* Get diagonal block from matrix if possible */
1288   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1289   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1290   if (f) {
1291     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1292   } else if (size == 1) {
1293     Ad = A;
1294   }
1295   if (Ad) {
1296     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1297     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1298   }
1299   if (Ad && n > 1) {
1300     PetscBool  match,done;
1301     /* Try to setup a good matrix partitioning if available */
1302     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1303     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1304     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1305     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1306     if (!match) {
1307       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1308     }
1309     if (!match) { /* assume a "good" partitioner is available */
1310       PetscInt na;
1311       const PetscInt *ia,*ja;
1312       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1313       if (done) {
1314         /* Build adjacency matrix by hand. Unfortunately a call to
1315            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1316            remove the block-aij structure and we cannot expect
1317            MatPartitioning to split vertices as we need */
1318         PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1319         const PetscInt *row;
1320         nnz = 0;
1321         for (i=0; i<na; i++) { /* count number of nonzeros */
1322           len = ia[i+1] - ia[i];
1323           row = ja + ia[i];
1324           for (j=0; j<len; j++) {
1325             if (row[j] == i) { /* don't count diagonal */
1326               len--; break;
1327             }
1328           }
1329           nnz += len;
1330         }
1331         ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1332         ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1333         nnz    = 0;
1334         iia[0] = 0;
1335         for (i=0; i<na; i++) { /* fill adjacency */
1336           cnt = 0;
1337           len = ia[i+1] - ia[i];
1338           row = ja + ia[i];
1339           for (j=0; j<len; j++) {
1340             if (row[j] != i) { /* if not diagonal */
1341               jja[nnz+cnt++] = row[j];
1342             }
1343           }
1344           nnz += cnt;
1345           iia[i+1] = nnz;
1346         }
1347         /* Partitioning of the adjacency matrix */
1348         ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1349         ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1350         ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1351         ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1352         ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1353         ierr = MatDestroy(&adj);CHKERRQ(ierr);
1354         foundpart = PETSC_TRUE;
1355       }
1356       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1357     }
1358     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1359   }
1360   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1361   if (!foundpart) {
1362 
1363     /* Partitioning by contiguous chunks of rows */
1364 
1365     PetscInt mbs   = (rend-rstart)/bs;
1366     PetscInt start = rstart;
1367     for (i=0; i<n; i++) {
1368       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1369       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1370       start += count;
1371     }
1372 
1373   } else {
1374 
1375     /* Partitioning by adjacency of diagonal block  */
1376 
1377     const PetscInt *numbering;
1378     PetscInt       *count,nidx,*indices,*newidx,start=0;
1379     /* Get node count in each partition */
1380     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1381     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1382     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1383       for (i=0; i<n; i++) count[i] *= bs;
1384     }
1385     /* Build indices from node numbering */
1386     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1387     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1388     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1389     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1390     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1391     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1392     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1393       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1394       for (i=0; i<nidx; i++)
1395         for (j=0; j<bs; j++)
1396           newidx[i*bs+j] = indices[i]*bs + j;
1397       ierr = PetscFree(indices);CHKERRQ(ierr);
1398       nidx   *= bs;
1399       indices = newidx;
1400     }
1401     /* Shift to get global indices */
1402     for (i=0; i<nidx; i++) indices[i] += rstart;
1403 
1404     /* Build the index sets for each block */
1405     for (i=0; i<n; i++) {
1406       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1407       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1408       start += count[i];
1409     }
1410 
1411     ierr = PetscFree(count);
1412     ierr = PetscFree(indices);
1413     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1414     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1415   }
1416   *iis = is;
1417   if (!ois) PetscFunctionReturn(0);
1418   /*
1419    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1420    has been requested, copy the inner subdomains over so they can be modified.
1421    */
1422   ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr);
1423   for (i=0; i<n; ++i) {
1424     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1425       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1426       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1427     } else {
1428       ierr = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1429       (*ois)[i] = (*iis)[i];
1430     }
1431   }
1432   if (overlap > 0) {
1433     /* Extend the "overlapping" regions by a number of steps */
1434     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1435   }
1436   PetscFunctionReturn(0);
1437 }
1438 
1439 #undef __FUNCT__
1440 #define __FUNCT__ "PCGASMDestroySubdomains"
1441 /*@C
1442    PCGASMDestroySubdomains - Destroys the index sets created with
1443    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1444    called after setting subdomains with PCGASMSetSubdomains().
1445 
1446    Collective
1447 
1448    Input Parameters:
1449 +  n   - the number of index sets
1450 .  iis - the array of inner subdomains,
1451 -  ois - the array of outer subdomains, can be PETSC_NULL
1452 
1453    Level: intermediate
1454 
1455    Notes: this is merely a convenience subroutine that walks each list,
1456    destroys each IS on the list, and then frees the list.
1457 
1458 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1459 
1460 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1461 @*/
1462 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1463 {
1464   PetscInt       i;
1465   PetscErrorCode ierr;
1466   PetscFunctionBegin;
1467   if (n <= 0) PetscFunctionReturn(0);
1468   if (iis) {
1469     PetscValidPointer(iis,2);
1470     for (i=0; i<n; i++) {
1471       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1472     }
1473     ierr = PetscFree(iis);CHKERRQ(ierr);
1474   }
1475   if (ois) {
1476     for (i=0; i<n; i++) {
1477       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1478     }
1479     ierr = PetscFree(ois);CHKERRQ(ierr);
1480   }
1481   PetscFunctionReturn(0);
1482 }
1483 
1484 
1485 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1486 {                                                                                                       \
1487  PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1488   /*                                                                                                    \
1489    Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1490    of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1491    subdomain).                                                                                          \
1492    Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1493    of the intersection.                                                                                 \
1494   */                                                                                                    \
1495   /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1496   *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1497   /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1498   *xleft_loc = *ylow_loc==first_row?PetscMax(first%M,xleft):xleft;                                                                            \
1499   /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1500   *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1501   /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1502   *xright_loc = *yhigh_loc==last_row?PetscMin(xright,last%M):xright;                                                                          \
1503   /* Now compute the size of the local subdomain n. */ \
1504   *n = 0;                                               \
1505   if (*ylow_loc < *yhigh_loc) {                           \
1506     PetscInt width = xright-xleft;                     \
1507     *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1508     *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1509     *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1510   }\
1511 }
1512 
1513 
1514 
1515 #undef __FUNCT__
1516 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1517 /*@
1518    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1519    preconditioner for a two-dimensional problem on a regular grid.
1520 
1521    Collective
1522 
1523    Input Parameters:
1524 +  M, N               - the global number of grid points in the x and y directions
1525 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1526 .  dof                - degrees of freedom per node
1527 -  overlap            - overlap in mesh lines
1528 
1529    Output Parameters:
1530 +  Nsub - the number of local subdomains created
1531 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1532 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1533 
1534 
1535    Level: advanced
1536 
1537 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1538 
1539 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1540           PCGASMSetOverlap()
1541 @*/
1542 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1543 {
1544   PetscErrorCode ierr;
1545   PetscMPIInt    size, rank;
1546   PetscInt       i, j;
1547   PetscInt       maxheight, maxwidth;
1548   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1549   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1550   PetscInt       x[2][2], y[2][2], n[2];
1551   PetscInt       first, last;
1552   PetscInt       nidx, *idx;
1553   PetscInt       ii,jj,s,q,d;
1554   PetscInt       k,kk;
1555   PetscMPIInt    color;
1556   MPI_Comm       comm, subcomm;
1557   IS             **xis = 0, **is = ois, **is_local = iis;
1558 
1559   PetscFunctionBegin;
1560   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1561   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1562   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1563   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1564   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1565              "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1566 
1567   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1568   s = 0;
1569   ystart = 0;
1570   for (j=0; j<Ndomains; ++j) {
1571     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1572     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1573     /* Vertical domain limits with an overlap. */
1574     ylow = PetscMax(ystart - overlap,0);
1575     yhigh = PetscMin(ystart + maxheight + overlap,N);
1576     xstart = 0;
1577     for (i=0; i<Mdomains; ++i) {
1578       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1579       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1580       /* Horizontal domain limits with an overlap. */
1581       xleft   = PetscMax(xstart - overlap,0);
1582       xright  = PetscMin(xstart + maxwidth + overlap,M);
1583       /*
1584          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1585       */
1586       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1587       if (nidx) {
1588         ++s;
1589       }
1590       xstart += maxwidth;
1591     }/* for (i = 0; i < Mdomains; ++i) */
1592     ystart += maxheight;
1593   }/* for (j = 0; j < Ndomains; ++j) */
1594   /* Now we can allocate the necessary number of ISs. */
1595   *nsub = s;
1596   ierr = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1597   ierr = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1598   s = 0;
1599   ystart = 0;
1600   for (j=0; j<Ndomains; ++j) {
1601     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1602     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1603     /* Vertical domain limits with an overlap. */
1604     y[0][0] = PetscMax(ystart - overlap,0);
1605     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1606     /* Vertical domain limits without an overlap. */
1607     y[1][0] = ystart;
1608     y[1][1] = PetscMin(ystart + maxheight,N);
1609     xstart = 0;
1610     for (i=0; i<Mdomains; ++i) {
1611       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1612       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1613       /* Horizontal domain limits with an overlap. */
1614       x[0][0]  = PetscMax(xstart - overlap,0);
1615       x[0][1]  = PetscMin(xstart + maxwidth + overlap,M);
1616       /* Horizontal domain limits without an overlap. */
1617       x[1][0] = xstart;
1618       x[1][1] = PetscMin(xstart+maxwidth,M);
1619       /*
1620          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1621          Do this twice: first for the domains with overlaps, and once without.
1622          During the first pass create the subcommunicators, and use them on the second pass as well.
1623       */
1624       for (q = 0; q < 2; ++q) {
1625         /*
1626           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1627           according to whether the domain with an overlap or without is considered.
1628         */
1629         xleft = x[q][0]; xright = x[q][1];
1630         ylow  = y[q][0]; yhigh  = y[q][1];
1631         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1632         nidx *= dof;
1633         n[q] = nidx;
1634         /*
1635          Based on the counted number of indices in the local domain *with an overlap*,
1636          construct a subcommunicator of all the processors supporting this domain.
1637          Observe that a domain with an overlap might have nontrivial local support,
1638          while the domain without an overlap might not.  Hence, the decision to participate
1639          in the subcommunicator must be based on the domain with an overlap.
1640          */
1641         if (q == 0) {
1642           if (nidx) {
1643             color = 1;
1644           } else {
1645             color = MPI_UNDEFINED;
1646           }
1647           ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1648         }
1649         /*
1650          Proceed only if the number of local indices *with an overlap* is nonzero.
1651          */
1652         if (n[0]) {
1653           if (q == 0) {
1654             xis = is;
1655           }
1656           if (q == 1) {
1657             /*
1658              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1659              Moreover, if the overlap is zero, the two ISs are identical.
1660              */
1661             if (overlap == 0) {
1662               (*is_local)[s] = (*is)[s];
1663               ierr = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1664               continue;
1665             } else {
1666               xis = is_local;
1667               subcomm = ((PetscObject)(*is)[s])->comm;
1668             }
1669           }/* if (q == 1) */
1670           idx = PETSC_NULL;
1671           ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1672           if (nidx) {
1673             k    = 0;
1674             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1675               PetscInt x0 = (jj==ylow_loc)?xleft_loc:xleft;
1676               PetscInt x1 = (jj==yhigh_loc-1)?xright_loc:xright;
1677               kk = dof*(M*jj + x0);
1678               for (ii=x0; ii<x1; ++ii) {
1679                 for (d = 0; d < dof; ++d) {
1680                   idx[k++] = kk++;
1681                 }
1682               }
1683             }
1684           }
1685           ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1686         }/* if (n[0]) */
1687       }/* for (q = 0; q < 2; ++q) */
1688       if (n[0]) {
1689         ++s;
1690       }
1691       xstart += maxwidth;
1692     }/* for (i = 0; i < Mdomains; ++i) */
1693     ystart += maxheight;
1694   }/* for (j = 0; j < Ndomains; ++j) */
1695   PetscFunctionReturn(0);
1696 }
1697 
1698 #undef __FUNCT__
1699 #define __FUNCT__ "PCGASMGetSubdomains"
1700 /*@C
1701     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1702     for the additive Schwarz preconditioner.
1703 
1704     Not Collective
1705 
1706     Input Parameter:
1707 .   pc - the preconditioner context
1708 
1709     Output Parameters:
1710 +   n   - the number of subdomains for this processor (default value = 1)
1711 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be PETSC_NULL)
1712 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be PETSC_NULL)
1713 
1714 
1715     Notes:
1716     The user is responsible for destroying the ISs and freeing the returned arrays.
1717     The IS numbering is in the parallel, global numbering of the vector.
1718 
1719     Level: advanced
1720 
1721 .keywords: PC, GASM, get, subdomains, additive Schwarz
1722 
1723 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1724           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1725 @*/
1726 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1727 {
1728   PC_GASM         *osm;
1729   PetscErrorCode ierr;
1730   PetscBool      match;
1731   PetscInt       i;
1732   PetscFunctionBegin;
1733   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1734   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1735   if (!match)
1736     SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1737   osm = (PC_GASM*)pc->data;
1738   if (n)  *n  = osm->n;
1739   if (iis) {
1740     ierr = PetscMalloc(osm->n*sizeof(IS), iis);CHKERRQ(ierr);
1741   }
1742   if (ois) {
1743     ierr = PetscMalloc(osm->n*sizeof(IS), ois);CHKERRQ(ierr);
1744   }
1745   if (iis || ois) {
1746     for (i = 0; i < osm->n; ++i) {
1747       if (iis) (*iis)[i] = osm->iis[i];
1748       if (ois) (*ois)[i] = osm->ois[i];
1749     }
1750   }
1751   PetscFunctionReturn(0);
1752 }
1753 
1754 #undef __FUNCT__
1755 #define __FUNCT__ "PCGASMGetSubmatrices"
1756 /*@C
1757     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1758     only) for the additive Schwarz preconditioner.
1759 
1760     Not Collective
1761 
1762     Input Parameter:
1763 .   pc - the preconditioner context
1764 
1765     Output Parameters:
1766 +   n   - the number of matrices for this processor (default value = 1)
1767 -   mat - the matrices
1768 
1769     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1770            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1771            subdomains were defined using PCGASMSetTotalSubdomains().
1772     Level: advanced
1773 
1774 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1775 
1776 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1777           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1778 @*/
1779 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1780 {
1781   PC_GASM         *osm;
1782   PetscErrorCode ierr;
1783   PetscBool      match;
1784 
1785   PetscFunctionBegin;
1786   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1787   PetscValidIntPointer(n,2);
1788   if (mat) PetscValidPointer(mat,3);
1789   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1790   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1791   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1792   osm = (PC_GASM*)pc->data;
1793   if (n)   *n   = osm->n;
1794   if (mat) *mat = osm->pmat;
1795 
1796   PetscFunctionReturn(0);
1797 }
1798