xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision 009bbdc485cd9ad46be9940d3549e2dde9cdc322)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 
13 typedef struct {
14   PetscInt   N,n,nmax;
15   PetscInt   overlap;             /* overlap requested by user */
16   KSP        *ksp;                /* linear solvers for each block */
17   Vec        gx,gy;               /* Merged work vectors */
18   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
19   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
20   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
21   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
22   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
23   Mat        *pmat;               /* subdomain block matrices */
24   PCGASMType type;                /* use reduced interpolation, restriction or both */
25   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
26   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
27   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
28   PetscBool  sort_indices;           /* flag to sort subdomain indices */
29 } PC_GASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCGASMSubdomainView_Private"
33 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
34 {
35   PC_GASM        *osm  = (PC_GASM*)pc->data;
36   PetscInt       j,nidx;
37   const PetscInt *idx;
38   PetscViewer    sviewer;
39   char           *cidx;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   if (i < 0 || i > osm->n) SETERRQ2(((PetscObject)viewer)->comm, PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
44   /* Inner subdomains. */
45   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
46   /*
47    No more than 15 characters per index plus a space.
48    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
49    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
50    For nidx == 0, the whole string 16 '\0'.
51    */
52   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
53   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
54   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
55   for (j = 0; j < nidx; ++j) {
56     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
57   }
58   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
59   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
60   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
61   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
63   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
64   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
66   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
67   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68   ierr = PetscFree(cidx);CHKERRQ(ierr);
69   /* Outer subdomains. */
70   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
71   /*
72    No more than 15 characters per index plus a space.
73    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
74    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
75    For nidx == 0, the whole string 16 '\0'.
76    */
77   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
78   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
79   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
80   for (j = 0; j < nidx; ++j) {
81     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
82   }
83   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
84   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
85   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
86   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
87   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
88   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
89   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
91   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
92   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
93   ierr = PetscFree(cidx);CHKERRQ(ierr);
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCGASMPrintSubdomains"
99 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
100 {
101   PC_GASM        *osm  = (PC_GASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
105   PetscBool      found;
106   PetscViewer    viewer, sviewer = PETSC_NULL;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
111   for (i = 0; i < osm->n; ++i) permutation[i] = i;
112   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
113   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
114   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
116   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
117   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
118   /*
119    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
120    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
121   */
122   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
123   l = 0;
124   for (count = 0; count < gcount; ++count) {
125     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
126     if (l<osm->n) {
127       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
128       if (numbering[d] == count) {
129         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
130         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
131         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
132         ++l;
133       }
134     }
135     ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
136   }
137   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
138   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
139   PetscFunctionReturn(0);
140 }
141 
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "PCView_GASM"
145 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
146 {
147   PC_GASM        *osm = (PC_GASM*)pc->data;
148   const char     *prefix;
149   PetscErrorCode ierr;
150   PetscMPIInt    rank, size;
151   PetscInt       i,bsz;
152   PetscBool      iascii,view_subdomains=PETSC_FALSE;
153   PetscViewer    sviewer;
154   PetscInt       count, l, gcount, *numbering, *permutation;
155   char           overlap[256]     = "user-defined overlap";
156   char           gsubdomains[256] = "unknown total number of subdomains";
157   char           lsubdomains[256] = "unknown number of local  subdomains";
158   char           msubdomains[256] = "unknown max number of local subdomains";
159 
160   PetscFunctionBegin;
161   ierr = MPI_Comm_size(((PetscObject)pc)->comm, &size);CHKERRQ(ierr);
162   ierr = MPI_Comm_rank(((PetscObject)pc)->comm, &rank);CHKERRQ(ierr);
163   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
164   for (i = 0; i < osm->n; ++i) permutation[i] = i;
165   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
166   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
167 
168   if (osm->overlap >= 0) {
169     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
170   }
171   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
172   if (osm->N > 0) {
173     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
174   }
175   if (osm->nmax > 0) {
176     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
177   }
178 
179   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
180   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,PETSC_NULL);CHKERRQ(ierr);
181 
182   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
183   if (iascii) {
184     /*
185      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
186      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
187      collectively on the comm.
188      */
189     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
190     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
191     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
197     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr);
198     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
200     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
201     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
202     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
203     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
204     /* Make sure that everybody waits for the banner to be printed. */
205     ierr = MPI_Barrier(((PetscObject)viewer)->comm);CHKERRQ(ierr);
206     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
207     l = 0;
208     for (count = 0; count < gcount; ++count) {
209       PetscMPIInt srank, ssize;
210       if (l<osm->n) {
211         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
212         if (numbering[d] == count) {
213           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
214           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
215           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
216           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
217           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
218           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr);
219           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
221           if (view_subdomains) {
222             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
223           }
224           if (!pc->setupcalled) {
225             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
226           }
227           else {
228             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
229           }
230           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
231           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
232           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
233           ++l;
234         }
235       }
236       ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
237     }
238     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
239   }
240   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
241   PetscFunctionReturn(0);
242 }
243 
244 
245 
246 
247 
248 #undef __FUNCT__
249 #define __FUNCT__ "PCSetUp_GASM"
250 static PetscErrorCode PCSetUp_GASM(PC pc)
251 {
252   PC_GASM         *osm  = (PC_GASM*)pc->data;
253   PetscErrorCode ierr;
254   PetscBool      symset,flg;
255   PetscInt       i;
256   PetscMPIInt    rank, size;
257   MatReuse       scall = MAT_REUSE_MATRIX;
258   KSP            ksp;
259   PC             subpc;
260   const char     *prefix,*pprefix;
261   Vec            x,y;
262   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
263   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
264   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
265   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
266   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
267   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
268   PetscScalar    *gxarray, *gyarray;
269   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy --
270                              over the disjoint union of outer subdomains. */
271   DM             *subdomain_dm = PETSC_NULL;
272 
273   PetscFunctionBegin;
274   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
275   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
276   if (!pc->setupcalled) {
277 
278     if (!osm->type_set) {
279       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
280       if (symset && flg) { osm->type = PC_GASM_BASIC; }
281     }
282 
283     /*
284      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
285      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
286      */
287     if (osm->n == PETSC_DECIDE) {
288       /* no subdomains given */
289       /* try pc->dm first */
290       if (pc->dm) {
291         char      ddm_name[1024];
292         DM        ddm;
293         PetscBool flg;
294         PetscInt     num_subdomains, d;
295         char         **subdomain_names;
296         IS           *inner_subdomain_is, *outer_subdomain_is;
297         /* Allow the user to request a decomposition DM by name */
298         ierr = PetscStrncpy(ddm_name, "", 1024);CHKERRQ(ierr);
299         ierr = PetscOptionsString("-pc_gasm_decomposition","Name of the DM defining the decomposition", "PCSetDM",ddm_name,ddm_name,1024,&flg);CHKERRQ(ierr);
300         if (flg) {
301           ierr = DMCreateDomainDecompositionDM(pc->dm, ddm_name, &ddm);CHKERRQ(ierr);
302           if (!ddm) SETERRQ1(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Uknown DM decomposition name %s", ddm_name);
303           ierr = PetscInfo(pc,"Using decomposition DM defined using options database\n");CHKERRQ(ierr);
304           ierr = PCSetDM(pc,ddm);CHKERRQ(ierr);
305         }
306         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
307         if (num_subdomains) {
308           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
309         }
310         for (d = 0; d < num_subdomains; ++d) {
311           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
312           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
313           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
314         }
315         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
316         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
317         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
318       }
319       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
320         osm->nmax = osm->n = 1;
321         ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
322         osm->N = size;
323       }
324     }
325     if (!osm->iis) {
326       /*
327        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
328        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
329        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
330        */
331       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
332     }
333     if (osm->N == PETSC_DECIDE) {
334       struct {PetscInt max,sum;} inwork,outwork;
335       /* determine global number of subdomains and the max number of local subdomains */
336       inwork.max = osm->n;
337       inwork.sum = osm->n;
338       ierr = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
339       osm->nmax = outwork.max;
340       osm->N    = outwork.sum;
341     }
342 
343     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
344     flg  = PETSC_FALSE;
345     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
346     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
347 
348     if (osm->sort_indices) {
349       for (i=0; i<osm->n; i++) {
350         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
351         ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
352       }
353     }
354     /*
355      Merge the ISs, create merged vectors and restrictions.
356      */
357     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
358     on = 0;
359     for (i=0; i<osm->n; i++) {
360       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
361       on += oni;
362     }
363     ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr);
364     on = 0;
365     for (i=0; i<osm->n; i++) {
366       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
367       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
368       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
369       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
370       on += oni;
371     }
372     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
373     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
374     ierr = VecCreateMPI(((PetscObject)pc)->comm, on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
375     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
376     ierr = VecGetOwnershipRange(osm->gx, &gofirst, PETSC_NULL);CHKERRQ(ierr);
377     ierr = ISCreateStride(((PetscObject)pc)->comm,on,gofirst,1, &goid);CHKERRQ(ierr);
378     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
379     ierr = VecDestroy(&x);CHKERRQ(ierr);
380     ierr = ISDestroy(&gois);CHKERRQ(ierr);
381     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
382     { PetscInt       ini;     /* Number of indices the i-th a local inner subdomain. */
383       PetscInt       in;      /* Number of indices in the disjoint uniont of local inner subdomains. */
384       PetscInt       *iidx;   /* Global indices in the merged local inner subdomain. */
385       PetscInt       *ioidx;  /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
386       IS             giis;    /* IS for the disjoint union of inner subdomains. */
387       IS             giois;   /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
388       /**/
389       in = 0;
390       for (i=0; i<osm->n; i++) {
391         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
392         in += ini;
393       }
394       ierr = PetscMalloc(in*sizeof(PetscInt), &iidx);CHKERRQ(ierr);
395       ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr);
396       ierr = VecGetOwnershipRange(osm->gx,&gofirst, PETSC_NULL);CHKERRQ(ierr);
397       in = 0;
398       on = 0;
399       for (i=0; i<osm->n; i++) {
400         const PetscInt *iidxi;        /* Global indices of the i-th local inner subdomain. */
401         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
402         PetscInt       *ioidxi;       /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
403         PetscInt       ioni;          /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
404         PetscInt       k;
405         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
406         ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
407         ierr = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
408         ierr = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
409         ierr = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
410         ioidxi = ioidx+in;
411         ierr = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
412         ierr = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
413         ierr = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
414         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
415         for (k = 0; k < ini; ++k) {
416           ioidxi[k] += gofirst+on;
417         }
418         in += ini;
419         on += oni;
420       }
421       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
422       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
423       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
424       ierr = VecDestroy(&y);CHKERRQ(ierr);
425       ierr = ISDestroy(&giis);CHKERRQ(ierr);
426       ierr = ISDestroy(&giois);CHKERRQ(ierr);
427     }
428     ierr = ISDestroy(&goid);CHKERRQ(ierr);
429     /* Create the subdomain work vectors. */
430     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
431     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
432     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
433     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
434     for (i=0, on=0; i<osm->n; ++i, on += oni) {
435       PetscInt oNi;
436       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
437       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
438       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
439       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
440     }
441     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
442     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
443     /* Create the local solvers */
444     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
445     for (i=0; i<osm->n; i++) { /* KSPs are local */
446       ierr = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
447       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
448       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
449       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
450       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
451       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
452       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
453       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
454       osm->ksp[i] = ksp;
455     }
456     scall = MAT_INITIAL_MATRIX;
457 
458   }/*if (!pc->setupcalled)*/
459   else {
460     /*
461        Destroy the blocks from the previous iteration
462     */
463     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
464       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
465       scall = MAT_INITIAL_MATRIX;
466     }
467   }
468 
469   /*
470      Extract out the submatrices.
471   */
472   if (size > 1) {
473     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
474   }
475   else {
476     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
477   }
478   if (scall == MAT_INITIAL_MATRIX) {
479     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
480     for (i=0; i<osm->n; i++) {
481       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
482       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
483     }
484   }
485 
486   /* Return control to the user so that the submatrices can be modified (e.g., to apply
487      different boundary conditions for the submatrices than for the global problem) */
488   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
489 
490   /*
491      Loop over submatrices putting them into local ksps
492   */
493   for (i=0; i<osm->n; i++) {
494     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
495     if (!pc->setupcalled) {
496       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
497     }
498   }
499   PetscFunctionReturn(0);
500 }
501 
502 #undef __FUNCT__
503 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
504 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
505 {
506   PC_GASM         *osm = (PC_GASM*)pc->data;
507   PetscErrorCode ierr;
508   PetscInt       i;
509 
510   PetscFunctionBegin;
511   for (i=0; i<osm->n; i++) {
512     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
513   }
514   PetscFunctionReturn(0);
515 }
516 
517 #undef __FUNCT__
518 #define __FUNCT__ "PCApply_GASM"
519 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
520 {
521   PC_GASM         *osm = (PC_GASM*)pc->data;
522   PetscErrorCode ierr;
523   PetscInt       i;
524   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
525 
526   PetscFunctionBegin;
527   /*
528      Support for limiting the restriction or interpolation only to the inner
529      subdomain values (leaving the other values 0).
530   */
531   if (!(osm->type & PC_GASM_RESTRICT)) {
532     /* have to zero the work RHS since scatter may leave some slots empty */
533     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
534     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
535   }
536   else {
537     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
538   }
539   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
540   if (!(osm->type & PC_GASM_RESTRICT)) {
541     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
542   }
543   else {
544     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
545   }
546   /* do the subdomain solves */
547   for (i=0; i<osm->n; ++i) {
548     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
549   }
550   ierr = VecZeroEntries(y);CHKERRQ(ierr);
551   if (!(osm->type & PC_GASM_INTERPOLATE)) {
552     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
553     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
554   }
555   else {
556     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
557     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
558   }
559 }
560 
561 #undef __FUNCT__
562 #define __FUNCT__ "PCApplyTranspose_GASM"
563 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
564 {
565   PC_GASM         *osm = (PC_GASM*)pc->data;
566   PetscErrorCode ierr;
567   PetscInt       i;
568   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
569 
570   PetscFunctionBegin;
571   /*
572      Support for limiting the restriction or interpolation to only local
573      subdomain values (leaving the other values 0).
574 
575      Note: these are reversed from the PCApply_GASM() because we are applying the
576      transpose of the three terms
577   */
578   if (!(osm->type & PC_GASM_INTERPOLATE)) {
579     /* have to zero the work RHS since scatter may leave some slots empty */
580     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
581     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
582   }
583   else {
584     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
585   }
586   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
587   if (!(osm->type & PC_GASM_INTERPOLATE)) {
588     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
589   }
590   else {
591     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
592   }
593   /* do the local solves */
594   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
595     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
596   }
597   ierr = VecZeroEntries(y);CHKERRQ(ierr);
598   if (!(osm->type & PC_GASM_RESTRICT)) {
599     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
600     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
601   }
602   else {
603     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
604     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
605   }
606   PetscFunctionReturn(0);
607 }
608 
609 #undef __FUNCT__
610 #define __FUNCT__ "PCReset_GASM"
611 static PetscErrorCode PCReset_GASM(PC pc)
612 {
613   PC_GASM        *osm = (PC_GASM*)pc->data;
614   PetscErrorCode ierr;
615   PetscInt       i;
616 
617   PetscFunctionBegin;
618   if (osm->ksp) {
619     for (i=0; i<osm->n; i++) {
620       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
621     }
622   }
623   if (osm->pmat) {
624     if (osm->n > 0) {
625       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
626     }
627   }
628   if (osm->x) {
629     for (i=0; i<osm->n; i++) {
630       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
631       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
632     }
633   }
634   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
635   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
636 
637   ierr = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
638   ierr = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
639   ierr = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
640   osm->ois = 0;
641   osm->iis = 0;
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "PCDestroy_GASM"
647 static PetscErrorCode PCDestroy_GASM(PC pc)
648 {
649   PC_GASM         *osm = (PC_GASM*)pc->data;
650   PetscErrorCode ierr;
651   PetscInt       i;
652 
653   PetscFunctionBegin;
654   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
655   if (osm->ksp) {
656     for (i=0; i<osm->n; i++) {
657       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
658     }
659     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
660   }
661   ierr = PetscFree(osm->x);CHKERRQ(ierr);
662   ierr = PetscFree(osm->y);CHKERRQ(ierr);
663   ierr = PetscFree(pc->data);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "PCSetFromOptions_GASM"
669 static PetscErrorCode PCSetFromOptions_GASM(PC pc)
670 {
671   PC_GASM         *osm = (PC_GASM*)pc->data;
672   PetscErrorCode ierr;
673   PetscInt       blocks,ovl;
674   PetscBool      symset,flg;
675   PCGASMType      gasmtype;
676 
677   PetscFunctionBegin;
678   /* set the type to symmetric if matrix is symmetric */
679   if (!osm->type_set && pc->pmat) {
680     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
681     if (symset && flg) { osm->type = PC_GASM_BASIC; }
682   }
683   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
684     ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
685     osm->create_local = PETSC_TRUE;
686     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,&flg);CHKERRQ(ierr);
687     if (!osm->create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No support for autocreation of nonlocal subdomains yet.");
688 
689     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr); }
690     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
691     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
692     flg  = PETSC_FALSE;
693     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
694     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
695   ierr = PetscOptionsTail();CHKERRQ(ierr);
696   PetscFunctionReturn(0);
697 }
698 
699 /*------------------------------------------------------------------------------------*/
700 
701 EXTERN_C_BEGIN
702 #undef __FUNCT__
703 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
704 PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
705 {
706   PC_GASM         *osm = (PC_GASM*)pc->data;
707   PetscErrorCode ierr;
708   PetscInt       i;
709 
710   PetscFunctionBegin;
711   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
712   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
713 
714   if (!pc->setupcalled) {
715     osm->n       = n;
716     osm->ois     = 0;
717     osm->iis     = 0;
718     if (ois) {
719       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
720     }
721     if (iis) {
722       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
723     }
724     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
725     if (ois) {
726       ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
727       for (i=0; i<n; i++) { osm->ois[i] = ois[i]; }
728       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
729       osm->overlap = -1;
730       if (!iis) {
731         ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
732         for (i=0; i<n; i++) {
733           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
734           osm->iis[i] = ois[i];
735         }
736       }
737     }
738     if (iis) {
739       ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
740       for (i=0; i<n; i++) { osm->iis[i] = iis[i]; }
741       if (!ois) {
742         ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
743         for (i=0; i<n; i++) {
744           for (i=0; i<n; i++) {
745             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
746             osm->ois[i] = iis[i];
747           }
748         }
749         if (osm->overlap > 0) {
750           /* Extend the "overlapping" regions by a number of steps */
751           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
752         }
753       }
754     }
755   }
756   PetscFunctionReturn(0);
757 }
758 EXTERN_C_END
759 
760 EXTERN_C_BEGIN
761 #undef __FUNCT__
762 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
763 PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
764 {
765   PC_GASM         *osm = (PC_GASM*)pc->data;
766   PetscErrorCode ierr;
767   PetscMPIInt    rank,size;
768   PetscInt       n;
769   PetscInt       Nmin, Nmax;
770 
771   PetscFunctionBegin;
772   if (!create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
773   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
774   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,((PetscObject)pc)->comm);CHKERRQ(ierr);
775   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
776   if (Nmin != Nmax) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
777 
778   osm->create_local = create_local;
779   /*
780      Split the subdomains equally among all processors
781   */
782   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
783   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
784   n = N/size + ((N % size) > rank);
785   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
786   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
787   if (!pc->setupcalled) {
788     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
789     osm->N            = N;
790     osm->n            = n;
791     osm->nmax         = N/size + ((N%size)?1:0);
792     osm->ois           = 0;
793     osm->iis     = 0;
794   }
795   PetscFunctionReturn(0);
796 }
797 EXTERN_C_END
798 
799 EXTERN_C_BEGIN
800 #undef __FUNCT__
801 #define __FUNCT__ "PCGASMSetOverlap_GASM"
802 PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
803 {
804   PC_GASM *osm = (PC_GASM*)pc->data;
805 
806   PetscFunctionBegin;
807   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
808   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
809   if (!pc->setupcalled) {
810     osm->overlap = ovl;
811   }
812   PetscFunctionReturn(0);
813 }
814 EXTERN_C_END
815 
816 EXTERN_C_BEGIN
817 #undef __FUNCT__
818 #define __FUNCT__ "PCGASMSetType_GASM"
819 PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
820 {
821   PC_GASM *osm = (PC_GASM*)pc->data;
822 
823   PetscFunctionBegin;
824   osm->type     = type;
825   osm->type_set = PETSC_TRUE;
826   PetscFunctionReturn(0);
827 }
828 EXTERN_C_END
829 
830 EXTERN_C_BEGIN
831 #undef __FUNCT__
832 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
833 PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
834 {
835   PC_GASM *osm = (PC_GASM*)pc->data;
836 
837   PetscFunctionBegin;
838   osm->sort_indices = doSort;
839   PetscFunctionReturn(0);
840 }
841 EXTERN_C_END
842 
843 EXTERN_C_BEGIN
844 #undef __FUNCT__
845 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
846 /*
847    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
848         In particular, it would upset the global subdomain number calculation.
849 */
850 PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
851 {
852   PC_GASM         *osm = (PC_GASM*)pc->data;
853   PetscErrorCode ierr;
854 
855   PetscFunctionBegin;
856   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
857 
858   if (n) {
859     *n = osm->n;
860   }
861   if (first) {
862     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
863     *first -= osm->n;
864   }
865   if (ksp) {
866     /* Assume that local solves are now different; not necessarily
867        true, though!  This flag is used only for PCView_GASM() */
868     *ksp                   = osm->ksp;
869     osm->same_subdomain_solvers = PETSC_FALSE;
870   }
871   PetscFunctionReturn(0);
872 }/* PCGASMGetSubKSP_GASM() */
873 EXTERN_C_END
874 
875 
876 #undef __FUNCT__
877 #define __FUNCT__ "PCGASMSetSubdomains"
878 /*@C
879     PCGASMSetSubdomains - Sets the subdomains for this processor
880     for the additive Schwarz preconditioner.
881 
882     Collective on PC
883 
884     Input Parameters:
885 +   pc  - the preconditioner context
886 .   n   - the number of subdomains for this processor
887 .   iis - the index sets that define this processor's local inner subdomains
888          (or PETSC_NULL for PETSc to determine subdomains)
889 -   ois- the index sets that define this processor's local outer subdomains
890          (or PETSC_NULL to use the same as iis)
891 
892     Notes:
893     The IS indices use the parallel, global numbering of the vector entries.
894     Inner subdomains are those where the correction is applied.
895     Outer subdomains are those where the residual necessary to obtain the
896     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
897     Both inner and outer subdomains can extend over several processors.
898     This processor's portion of a subdomain is known as a local subdomain.
899 
900     By default the GASM preconditioner uses 1 (local) subdomain per processor.
901     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
902     all processors that PCGASM will create automatically, and to specify whether
903     they should be local or not.
904 
905 
906     Level: advanced
907 
908 .keywords: PC, GASM, set, subdomains, additive Schwarz
909 
910 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
911           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
912 @*/
913 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
914 {
915   PetscErrorCode ierr;
916 
917   PetscFunctionBegin;
918   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
919   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 #undef __FUNCT__
924 #define __FUNCT__ "PCGASMSetTotalSubdomains"
925 /*@C
926     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
927     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
928     communicator. Either all or no processors in the PC communicator must call this routine with
929     the same N.  The subdomains will be created automatically during PCSetUp().
930 
931     Collective on PC
932 
933     Input Parameters:
934 +   pc           - the preconditioner context
935 .   N            - the total number of subdomains cumulative across all processors
936 -   create_local - whether the subdomains to be created are to be local
937 
938     Options Database Key:
939     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
940 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
941 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
942 
943     By default the GASM preconditioner uses 1 subdomain per processor.
944 
945 
946     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
947     of subdomains per processor.
948 
949     Level: advanced
950 
951 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
952 
953 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
954           PCGASMCreateSubdomains2D()
955 @*/
956 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
957 {
958   PetscErrorCode ierr;
959 
960   PetscFunctionBegin;
961   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
962   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
963   PetscFunctionReturn(0);
964 }
965 
966 #undef __FUNCT__
967 #define __FUNCT__ "PCGASMSetOverlap"
968 /*@
969     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
970     additive Schwarz preconditioner.  Either all or no processors in the
971     PC communicator must call this routine.
972 
973     Logically Collective on PC
974 
975     Input Parameters:
976 +   pc  - the preconditioner context
977 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
978 
979     Options Database Key:
980 .   -pc_gasm_overlap <overlap> - Sets overlap
981 
982     Notes:
983     By default the GASM preconditioner uses 1 subdomain per processor.  To use
984     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
985     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
986 
987     The overlap defaults to 1, so if one desires that no additional
988     overlap be computed beyond what may have been set with a call to
989     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
990     must be set to be 0.  In particular, if one does not explicitly set
991     the subdomains in application code, then all overlap would be computed
992     internally by PETSc, and using an overlap of 0 would result in an GASM
993     variant that is equivalent to the block Jacobi preconditioner.
994 
995     Note that one can define initial index sets with any overlap via
996     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
997     PETSc to extend that overlap further, if desired.
998 
999     Level: intermediate
1000 
1001 .keywords: PC, GASM, set, overlap
1002 
1003 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1004           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
1005 @*/
1006 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
1007 {
1008   PetscErrorCode ierr;
1009 
1010   PetscFunctionBegin;
1011   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1012   PetscValidLogicalCollectiveInt(pc,ovl,2);
1013   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1014   PetscFunctionReturn(0);
1015 }
1016 
1017 #undef __FUNCT__
1018 #define __FUNCT__ "PCGASMSetType"
1019 /*@
1020     PCGASMSetType - Sets the type of restriction and interpolation used
1021     for local problems in the additive Schwarz method.
1022 
1023     Logically Collective on PC
1024 
1025     Input Parameters:
1026 +   pc  - the preconditioner context
1027 -   type - variant of GASM, one of
1028 .vb
1029       PC_GASM_BASIC       - full interpolation and restriction
1030       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1031       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1032       PC_GASM_NONE        - local processor restriction and interpolation
1033 .ve
1034 
1035     Options Database Key:
1036 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1037 
1038     Level: intermediate
1039 
1040 .keywords: PC, GASM, set, type
1041 
1042 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1043           PCGASMCreateSubdomains2D()
1044 @*/
1045 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1046 {
1047   PetscErrorCode ierr;
1048 
1049   PetscFunctionBegin;
1050   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1051   PetscValidLogicalCollectiveEnum(pc,type,2);
1052   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 #undef __FUNCT__
1057 #define __FUNCT__ "PCGASMSetSortIndices"
1058 /*@
1059     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1060 
1061     Logically Collective on PC
1062 
1063     Input Parameters:
1064 +   pc  - the preconditioner context
1065 -   doSort - sort the subdomain indices
1066 
1067     Level: intermediate
1068 
1069 .keywords: PC, GASM, set, type
1070 
1071 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1072           PCGASMCreateSubdomains2D()
1073 @*/
1074 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool  doSort)
1075 {
1076   PetscErrorCode ierr;
1077 
1078   PetscFunctionBegin;
1079   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1080   PetscValidLogicalCollectiveBool(pc,doSort,2);
1081   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 #undef __FUNCT__
1086 #define __FUNCT__ "PCGASMGetSubKSP"
1087 /*@C
1088    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1089    this processor.
1090 
1091    Collective on PC iff first_local is requested
1092 
1093    Input Parameter:
1094 .  pc - the preconditioner context
1095 
1096    Output Parameters:
1097 +  n_local - the number of blocks on this processor or PETSC_NULL
1098 .  first_local - the global number of the first block on this processor or PETSC_NULL,
1099                  all processors must request or all must pass PETSC_NULL
1100 -  ksp - the array of KSP contexts
1101 
1102    Note:
1103    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1104 
1105    Currently for some matrix implementations only 1 block per processor
1106    is supported.
1107 
1108    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1109 
1110    Level: advanced
1111 
1112 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1113 
1114 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1115           PCGASMCreateSubdomains2D(),
1116 @*/
1117 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1118 {
1119   PetscErrorCode ierr;
1120 
1121   PetscFunctionBegin;
1122   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1123   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 /* -------------------------------------------------------------------------------------*/
1128 /*MC
1129    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1130            its own KSP object.
1131 
1132    Options Database Keys:
1133 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1134 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1135 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1136 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1137 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1138 
1139      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1140       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1141       -pc_gasm_type basic to use the standard GASM.
1142 
1143    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1144      than one processor. Defaults to one block per processor.
1145 
1146      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1147         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1148 
1149      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1150          and set the options directly on the resulting KSP object (you can access its PC
1151          with KSPGetPC())
1152 
1153 
1154    Level: beginner
1155 
1156    Concepts: additive Schwarz method
1157 
1158     References:
1159     An additive variant of the Schwarz alternating method for the case of many subregions
1160     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1161 
1162     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1163     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1164 
1165 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1166            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1167            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1168 
1169 M*/
1170 
1171 EXTERN_C_BEGIN
1172 #undef __FUNCT__
1173 #define __FUNCT__ "PCCreate_GASM"
1174 PetscErrorCode  PCCreate_GASM(PC pc)
1175 {
1176   PetscErrorCode ierr;
1177   PC_GASM         *osm;
1178 
1179   PetscFunctionBegin;
1180   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
1181   osm->N                 = PETSC_DECIDE;
1182   osm->n                 = PETSC_DECIDE;
1183   osm->nmax              = 0;
1184   osm->overlap           = 1;
1185   osm->ksp               = 0;
1186   osm->gorestriction     = 0;
1187   osm->girestriction     = 0;
1188   osm->gx                = 0;
1189   osm->gy                = 0;
1190   osm->x                 = 0;
1191   osm->y                 = 0;
1192   osm->ois               = 0;
1193   osm->iis               = 0;
1194   osm->pmat              = 0;
1195   osm->type              = PC_GASM_RESTRICT;
1196   osm->same_subdomain_solvers = PETSC_TRUE;
1197   osm->sort_indices           = PETSC_TRUE;
1198 
1199   pc->data                   = (void*)osm;
1200   pc->ops->apply             = PCApply_GASM;
1201   pc->ops->applytranspose    = PCApplyTranspose_GASM;
1202   pc->ops->setup             = PCSetUp_GASM;
1203   pc->ops->reset             = PCReset_GASM;
1204   pc->ops->destroy           = PCDestroy_GASM;
1205   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
1206   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
1207   pc->ops->view              = PCView_GASM;
1208   pc->ops->applyrichardson   = 0;
1209 
1210   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM",
1211                     PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1212   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1213                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1214   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1215                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1216   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1217                     PCGASMSetType_GASM);CHKERRQ(ierr);
1218   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1219                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1220   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1221                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1222   PetscFunctionReturn(0);
1223 }
1224 EXTERN_C_END
1225 
1226 
1227 #undef __FUNCT__
1228 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1229 /*@C
1230    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1231    Schwarz preconditioner for a any problem based on its matrix.
1232 
1233    Collective
1234 
1235    Input Parameters:
1236 +  A       - The global matrix operator
1237 .  overlap - amount of overlap in outer subdomains
1238 -  n       - the number of local subdomains
1239 
1240    Output Parameters:
1241 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1242 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1243 
1244    Level: advanced
1245 
1246    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1247          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1248          nonzero overlap with PCGASMSetOverlap()
1249 
1250     In the Fortran version you must provide the array outis[] already allocated of length n.
1251 
1252 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1253 
1254 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1255 @*/
1256 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS* iis[], IS* ois[])
1257 {
1258   MatPartitioning           mpart;
1259   const char                *prefix;
1260   PetscErrorCode            (*f)(Mat,MatReuse,Mat*);
1261   PetscMPIInt               size;
1262   PetscInt                  i,j,rstart,rend,bs;
1263   PetscBool                 isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1264   Mat                       Ad = PETSC_NULL, adj;
1265   IS                        ispart,isnumb,*is;
1266   PetscErrorCode            ierr;
1267 
1268   PetscFunctionBegin;
1269   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1270   PetscValidPointer(iis,4);
1271   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1272 
1273   /* Get prefix, row distribution, and block size */
1274   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1275   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1276   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1277   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1278 
1279   /* Get diagonal block from matrix if possible */
1280   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1281   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1282   if (f) {
1283     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1284   } else if (size == 1) {
1285     Ad = A;
1286   }
1287   if (Ad) {
1288     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1289     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1290   }
1291   if (Ad && n > 1) {
1292     PetscBool  match,done;
1293     /* Try to setup a good matrix partitioning if available */
1294     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1295     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1296     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1297     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1298     if (!match) {
1299       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1300     }
1301     if (!match) { /* assume a "good" partitioner is available */
1302       PetscInt na;
1303       const PetscInt *ia,*ja;
1304       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1305       if (done) {
1306         /* Build adjacency matrix by hand. Unfortunately a call to
1307            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1308            remove the block-aij structure and we cannot expect
1309            MatPartitioning to split vertices as we need */
1310         PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1311         const PetscInt *row;
1312         nnz = 0;
1313         for (i=0; i<na; i++) { /* count number of nonzeros */
1314           len = ia[i+1] - ia[i];
1315           row = ja + ia[i];
1316           for (j=0; j<len; j++) {
1317             if (row[j] == i) { /* don't count diagonal */
1318               len--; break;
1319             }
1320           }
1321           nnz += len;
1322         }
1323         ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1324         ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1325         nnz    = 0;
1326         iia[0] = 0;
1327         for (i=0; i<na; i++) { /* fill adjacency */
1328           cnt = 0;
1329           len = ia[i+1] - ia[i];
1330           row = ja + ia[i];
1331           for (j=0; j<len; j++) {
1332             if (row[j] != i) { /* if not diagonal */
1333               jja[nnz+cnt++] = row[j];
1334             }
1335           }
1336           nnz += cnt;
1337           iia[i+1] = nnz;
1338         }
1339         /* Partitioning of the adjacency matrix */
1340         ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1341         ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1342         ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1343         ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1344         ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1345         ierr = MatDestroy(&adj);CHKERRQ(ierr);
1346         foundpart = PETSC_TRUE;
1347       }
1348       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1349     }
1350     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1351   }
1352   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1353   if (!foundpart) {
1354 
1355     /* Partitioning by contiguous chunks of rows */
1356 
1357     PetscInt mbs   = (rend-rstart)/bs;
1358     PetscInt start = rstart;
1359     for (i=0; i<n; i++) {
1360       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1361       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1362       start += count;
1363     }
1364 
1365   } else {
1366 
1367     /* Partitioning by adjacency of diagonal block  */
1368 
1369     const PetscInt *numbering;
1370     PetscInt       *count,nidx,*indices,*newidx,start=0;
1371     /* Get node count in each partition */
1372     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1373     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1374     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1375       for (i=0; i<n; i++) count[i] *= bs;
1376     }
1377     /* Build indices from node numbering */
1378     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1379     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1380     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1381     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1382     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1383     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1384     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1385       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1386       for (i=0; i<nidx; i++)
1387         for (j=0; j<bs; j++)
1388           newidx[i*bs+j] = indices[i]*bs + j;
1389       ierr = PetscFree(indices);CHKERRQ(ierr);
1390       nidx   *= bs;
1391       indices = newidx;
1392     }
1393     /* Shift to get global indices */
1394     for (i=0; i<nidx; i++) indices[i] += rstart;
1395 
1396     /* Build the index sets for each block */
1397     for (i=0; i<n; i++) {
1398       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1399       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1400       start += count[i];
1401     }
1402 
1403     ierr = PetscFree(count);
1404     ierr = PetscFree(indices);
1405     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1406     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1407   }
1408   *iis = is;
1409   if (!ois) PetscFunctionReturn(0);
1410   /*
1411    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1412    has been requested, copy the inner subdomains over so they can be modified.
1413    */
1414   ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr);
1415   for (i=0; i<n; ++i) {
1416     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1417       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1418       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1419     } else {
1420       ierr = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1421       (*ois)[i] = (*iis)[i];
1422     }
1423   }
1424   if (overlap > 0) {
1425     /* Extend the "overlapping" regions by a number of steps */
1426     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1427   }
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "PCGASMDestroySubdomains"
1433 /*@C
1434    PCGASMDestroySubdomains - Destroys the index sets created with
1435    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1436    called after setting subdomains with PCGASMSetSubdomains().
1437 
1438    Collective
1439 
1440    Input Parameters:
1441 +  n   - the number of index sets
1442 .  iis - the array of inner subdomains,
1443 -  ois - the array of outer subdomains, can be PETSC_NULL
1444 
1445    Level: intermediate
1446 
1447    Notes: this is merely a convenience subroutine that walks each list,
1448    destroys each IS on the list, and then frees the list.
1449 
1450 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1451 
1452 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1453 @*/
1454 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1455 {
1456   PetscInt       i;
1457   PetscErrorCode ierr;
1458 
1459   PetscFunctionBegin;
1460   if (n <= 0) PetscFunctionReturn(0);
1461   if (iis) {
1462     PetscValidPointer(iis,2);
1463     for (i=0; i<n; i++) {
1464       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1465     }
1466     ierr = PetscFree(iis);CHKERRQ(ierr);
1467   }
1468   if (ois) {
1469     for (i=0; i<n; i++) {
1470       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1471     }
1472     ierr = PetscFree(ois);CHKERRQ(ierr);
1473   }
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 
1478 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1479 {                                                                                                       \
1480  PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1481   /*                                                                                                    \
1482    Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1483    of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1484    subdomain).                                                                                          \
1485    Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1486    of the intersection.                                                                                 \
1487   */                                                                                                    \
1488   /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1489   *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1490   /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1491   *xleft_loc = *ylow_loc==first_row?PetscMax(first%M,xleft):xleft;                                                                            \
1492   /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1493   *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1494   /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1495   *xright_loc = *yhigh_loc==last_row?PetscMin(xright,last%M):xright;                                                                          \
1496   /* Now compute the size of the local subdomain n. */ \
1497   *n = 0;                                               \
1498   if (*ylow_loc < *yhigh_loc) {                           \
1499     PetscInt width = xright-xleft;                     \
1500     *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1501     *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1502     *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1503   }\
1504 }
1505 
1506 
1507 
1508 #undef __FUNCT__
1509 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1510 /*@
1511    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1512    preconditioner for a two-dimensional problem on a regular grid.
1513 
1514    Collective
1515 
1516    Input Parameters:
1517 +  M, N               - the global number of grid points in the x and y directions
1518 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1519 .  dof                - degrees of freedom per node
1520 -  overlap            - overlap in mesh lines
1521 
1522    Output Parameters:
1523 +  Nsub - the number of local subdomains created
1524 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1525 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1526 
1527 
1528    Level: advanced
1529 
1530 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1531 
1532 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1533           PCGASMSetOverlap()
1534 @*/
1535 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1536 {
1537   PetscErrorCode ierr;
1538   PetscMPIInt    size, rank;
1539   PetscInt       i, j;
1540   PetscInt       maxheight, maxwidth;
1541   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1542   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1543   PetscInt       x[2][2], y[2][2], n[2];
1544   PetscInt       first, last;
1545   PetscInt       nidx, *idx;
1546   PetscInt       ii,jj,s,q,d;
1547   PetscInt       k,kk;
1548   PetscMPIInt    color;
1549   MPI_Comm       comm, subcomm;
1550   IS             **xis = 0, **is = ois, **is_local = iis;
1551 
1552   PetscFunctionBegin;
1553   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1554   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1555   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1556   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1557   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1558              "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1559 
1560   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1561   s = 0;
1562   ystart = 0;
1563   for (j=0; j<Ndomains; ++j) {
1564     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1565     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1566     /* Vertical domain limits with an overlap. */
1567     ylow = PetscMax(ystart - overlap,0);
1568     yhigh = PetscMin(ystart + maxheight + overlap,N);
1569     xstart = 0;
1570     for (i=0; i<Mdomains; ++i) {
1571       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1572       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1573       /* Horizontal domain limits with an overlap. */
1574       xleft   = PetscMax(xstart - overlap,0);
1575       xright  = PetscMin(xstart + maxwidth + overlap,M);
1576       /*
1577          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1578       */
1579       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1580       if (nidx) {
1581         ++s;
1582       }
1583       xstart += maxwidth;
1584     }/* for (i = 0; i < Mdomains; ++i) */
1585     ystart += maxheight;
1586   }/* for (j = 0; j < Ndomains; ++j) */
1587   /* Now we can allocate the necessary number of ISs. */
1588   *nsub = s;
1589   ierr = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1590   ierr = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1591   s = 0;
1592   ystart = 0;
1593   for (j=0; j<Ndomains; ++j) {
1594     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1595     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1596     /* Vertical domain limits with an overlap. */
1597     y[0][0] = PetscMax(ystart - overlap,0);
1598     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1599     /* Vertical domain limits without an overlap. */
1600     y[1][0] = ystart;
1601     y[1][1] = PetscMin(ystart + maxheight,N);
1602     xstart = 0;
1603     for (i=0; i<Mdomains; ++i) {
1604       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1605       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1606       /* Horizontal domain limits with an overlap. */
1607       x[0][0]  = PetscMax(xstart - overlap,0);
1608       x[0][1]  = PetscMin(xstart + maxwidth + overlap,M);
1609       /* Horizontal domain limits without an overlap. */
1610       x[1][0] = xstart;
1611       x[1][1] = PetscMin(xstart+maxwidth,M);
1612       /*
1613          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1614          Do this twice: first for the domains with overlaps, and once without.
1615          During the first pass create the subcommunicators, and use them on the second pass as well.
1616       */
1617       for (q = 0; q < 2; ++q) {
1618         /*
1619           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1620           according to whether the domain with an overlap or without is considered.
1621         */
1622         xleft = x[q][0]; xright = x[q][1];
1623         ylow  = y[q][0]; yhigh  = y[q][1];
1624         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1625         nidx *= dof;
1626         n[q] = nidx;
1627         /*
1628          Based on the counted number of indices in the local domain *with an overlap*,
1629          construct a subcommunicator of all the processors supporting this domain.
1630          Observe that a domain with an overlap might have nontrivial local support,
1631          while the domain without an overlap might not.  Hence, the decision to participate
1632          in the subcommunicator must be based on the domain with an overlap.
1633          */
1634         if (q == 0) {
1635           if (nidx) {
1636             color = 1;
1637           } else {
1638             color = MPI_UNDEFINED;
1639           }
1640           ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1641         }
1642         /*
1643          Proceed only if the number of local indices *with an overlap* is nonzero.
1644          */
1645         if (n[0]) {
1646           if (q == 0) {
1647             xis = is;
1648           }
1649           if (q == 1) {
1650             /*
1651              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1652              Moreover, if the overlap is zero, the two ISs are identical.
1653              */
1654             if (overlap == 0) {
1655               (*is_local)[s] = (*is)[s];
1656               ierr = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1657               continue;
1658             } else {
1659               xis = is_local;
1660               subcomm = ((PetscObject)(*is)[s])->comm;
1661             }
1662           }/* if (q == 1) */
1663           idx = PETSC_NULL;
1664           ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1665           if (nidx) {
1666             k    = 0;
1667             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1668               PetscInt x0 = (jj==ylow_loc)?xleft_loc:xleft;
1669               PetscInt x1 = (jj==yhigh_loc-1)?xright_loc:xright;
1670               kk = dof*(M*jj + x0);
1671               for (ii=x0; ii<x1; ++ii) {
1672                 for (d = 0; d < dof; ++d) {
1673                   idx[k++] = kk++;
1674                 }
1675               }
1676             }
1677           }
1678           ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1679         }/* if (n[0]) */
1680       }/* for (q = 0; q < 2; ++q) */
1681       if (n[0]) {
1682         ++s;
1683       }
1684       xstart += maxwidth;
1685     }/* for (i = 0; i < Mdomains; ++i) */
1686     ystart += maxheight;
1687   }/* for (j = 0; j < Ndomains; ++j) */
1688   PetscFunctionReturn(0);
1689 }
1690 
1691 #undef __FUNCT__
1692 #define __FUNCT__ "PCGASMGetSubdomains"
1693 /*@C
1694     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1695     for the additive Schwarz preconditioner.
1696 
1697     Not Collective
1698 
1699     Input Parameter:
1700 .   pc - the preconditioner context
1701 
1702     Output Parameters:
1703 +   n   - the number of subdomains for this processor (default value = 1)
1704 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be PETSC_NULL)
1705 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be PETSC_NULL)
1706 
1707 
1708     Notes:
1709     The user is responsible for destroying the ISs and freeing the returned arrays.
1710     The IS numbering is in the parallel, global numbering of the vector.
1711 
1712     Level: advanced
1713 
1714 .keywords: PC, GASM, get, subdomains, additive Schwarz
1715 
1716 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1717           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1718 @*/
1719 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1720 {
1721   PC_GASM         *osm;
1722   PetscErrorCode ierr;
1723   PetscBool      match;
1724   PetscInt       i;
1725 
1726   PetscFunctionBegin;
1727   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1728   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1729   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1730   osm = (PC_GASM*)pc->data;
1731   if (n)  *n  = osm->n;
1732   if (iis) {
1733     ierr = PetscMalloc(osm->n*sizeof(IS), iis);CHKERRQ(ierr);
1734   }
1735   if (ois) {
1736     ierr = PetscMalloc(osm->n*sizeof(IS), ois);CHKERRQ(ierr);
1737   }
1738   if (iis || ois) {
1739     for (i = 0; i < osm->n; ++i) {
1740       if (iis) (*iis)[i] = osm->iis[i];
1741       if (ois) (*ois)[i] = osm->ois[i];
1742     }
1743   }
1744   PetscFunctionReturn(0);
1745 }
1746 
1747 #undef __FUNCT__
1748 #define __FUNCT__ "PCGASMGetSubmatrices"
1749 /*@C
1750     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1751     only) for the additive Schwarz preconditioner.
1752 
1753     Not Collective
1754 
1755     Input Parameter:
1756 .   pc - the preconditioner context
1757 
1758     Output Parameters:
1759 +   n   - the number of matrices for this processor (default value = 1)
1760 -   mat - the matrices
1761 
1762     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1763            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1764            subdomains were defined using PCGASMSetTotalSubdomains().
1765     Level: advanced
1766 
1767 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1768 
1769 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1770           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1771 @*/
1772 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1773 {
1774   PC_GASM         *osm;
1775   PetscErrorCode ierr;
1776   PetscBool      match;
1777 
1778   PetscFunctionBegin;
1779   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1780   PetscValidIntPointer(n,2);
1781   if (mat) PetscValidPointer(mat,3);
1782   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1783   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1784   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1785   osm = (PC_GASM*)pc->data;
1786   if (n)   *n   = osm->n;
1787   if (mat) *mat = osm->pmat;
1788   PetscFunctionReturn(0);
1789 }
1790