xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision b3e8af9f4df2caca5f1b00a1692f5f6a371a2808)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
5   In this version each processor may have any number of subdomains and a subdomain may live on multiple
6   processors.
7 
8        N    - total number of true subdomains on all processors
9        n    - actual number of subdomains on this processor
10        nmax - maximum number of subdomains per processor
11 */
12 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   IS         gis, gis_local;      /* merged ISs */
21   VecScatter grestriction;        /* merged restriction */
22   VecScatter gprolongation;       /* merged prolongation */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each local subdomain (same as subdomain with the overlap removed); may be NULL */
25   Mat        *pmat;               /* subdomain block matrices */
26   PCGASMType  type;               /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_GASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_GASM"
34 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
35 {
36   PC_GASM         *osm = (PC_GASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->nmax > 0)     {ierr = PetscSNPrintf(blocks,sizeof blocks,"max number of  subdomain blocks = %D",osm->nmax);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: restriction/interpolation type - %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->nmax; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCGASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCGASMPrintSubdomains"
97 static PetscErrorCode PCGASMPrintSubdomains(PC pc)
98 {
99   PC_GASM         *osm  = (PC_GASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n;++i) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_GASM"
137 static PetscErrorCode PCSetUp_GASM(PC pc)
138 {
139   PC_GASM         *osm  = (PC_GASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i;
143   PetscMPIInt    rank, size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   KSP            ksp;
146   PC             subpc;
147   const char     *prefix,*pprefix;
148   PetscInt       dn;       /* Number of indices in a single subdomain assigned to this processor. */
149   const PetscInt *didx;    /* Indices from a single subdomain assigned to this processor. */
150   PetscInt       ddn;      /* Number of indices in all subdomains assigned to this processor. */
151   PetscInt       *ddidx;   /* Indices of all subdomains assigned to this processor. */
152   IS             gid;      /* Identity IS of the size of all subdomains assigned to this processor. */
153   Vec            x,y;
154   PetscScalar    *gxarray, *gyarray;
155   PetscInt       gfirst, glast;
156 
157   PetscFunctionBegin;
158   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
159   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
160   if (!pc->setupcalled) {
161 
162     if (!osm->type_set) {
163       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
164       if (symset && flg) { osm->type = PC_GASM_BASIC; }
165     }
166 
167     if (osm->N == PETSC_DECIDE && osm->n < 1) {
168       /* no subdomains given, use one per processor */
169       osm->nmax = osm->n = 1;
170       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
171       osm->N = size;
172     } else if (osm->N == PETSC_DECIDE) {
173       PetscInt inwork[2], outwork[2];
174       /* determine global number of subdomains and the max number of local subdomains */
175       inwork[0] = inwork[1] = osm->n;
176       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
177       osm->nmax = outwork[0];
178       osm->N    = outwork[1];
179     }
180     if (!osm->is){ /* create the index sets */
181       ierr = PCGASMCreateSubdomains(pc->pmat,osm->n,&osm->is);CHKERRQ(ierr);
182     }
183     if (!osm->is_local) {
184       /*
185 	 This indicates that osm->is should define a nonoverlapping decomposition
186 	 (there is no way to really guarantee that if subdomains are set by the user through PCGASMSetLocalSubdomains,
187 	  but the assumption is that either the user does the right thing, or subdomains in ossm->is have been created
188 	  via PCGASMCreateSubdomains, which guarantees a nonoverlapping decomposition).
189 	 Therefore, osm->is will be used to define osm->is_local.
190 	 If a nonzero overlap has been requested by the user, then osm->is will be expanded and will overlap,
191 	 so osm->is_local should obtain a copy of osm->is while they are still (presumably) nonoverlapping.
192 	 Otherwise (no overlap has been requested), osm->is_local are simply aliases for osm->is.
193       */
194       ierr = PetscMalloc(osm->n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
195       for (i=0; i<osm->n; i++) {
196         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
197           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
198           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
199         } else {
200           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
201           osm->is_local[i] = osm->is[i];
202         }
203       }
204     }
205     /* Beyond this point osm->is_local is not null. */
206     if (osm->overlap > 0) {
207       /* Extend the "overlapping" regions by a number of steps */
208       ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->is,osm->overlap);CHKERRQ(ierr);
209     }
210     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
211     flg  = PETSC_FALSE;
212     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
213     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
214 
215     if (osm->sort_indices) {
216       for (i=0; i<osm->n; i++) {
217         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
218 	ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
219       }
220     }
221     /* Merge the ISs, create merged vectors and scatter contexts. */
222     /* Restriction ISs. */
223     ddn = 0;
224     for (i=0; i<osm->n; i++) {
225       ierr = ISGetLocalSize(osm->is[i],&dn);          CHKERRQ(ierr);
226       ddn += dn;
227     }
228     ierr = PetscMalloc(ddn*sizeof(PetscInt), &ddidx); CHKERRQ(ierr);
229     ddn = 0;
230     for (i=0; i<osm->n; i++) {
231       ierr = ISGetLocalSize(osm->is[i],&dn); CHKERRQ(ierr);
232       ierr = ISGetIndices(osm->is[i],&didx); CHKERRQ(ierr);
233       ierr = PetscMemcpy(ddidx+ddn, didx, sizeof(PetscInt)*dn); CHKERRQ(ierr);
234       ierr = ISRestoreIndices(osm->is[i], &didx); CHKERRQ(ierr);
235       ddn += dn;
236     }
237     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, ddn, ddidx, PETSC_OWN_POINTER, &osm->gis); CHKERRQ(ierr);
238     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
239     ierr = VecCreateMPI(((PetscObject)pc)->comm, ddn, PETSC_DECIDE, &osm->gx); CHKERRQ(ierr);
240     ierr = VecDuplicate(osm->gx,&osm->gy);                                     CHKERRQ(ierr);
241     ierr = VecGetOwnershipRange(osm->gx, &gfirst, &glast);                     CHKERRQ(ierr);
242     ierr = ISCreateStride(((PetscObject)pc)->comm,ddn,gfirst,1, &gid);              CHKERRQ(ierr);
243     ierr = VecScatterCreate(x,osm->gis,osm->gx,gid, &(osm->grestriction));     CHKERRQ(ierr);
244     ierr = ISDestroy(gid);                                                     CHKERRQ(ierr);
245     /* Prolongation ISs */
246     { PetscInt       dn_local;       /* Number of indices in the local part of a single domain assigned to this processor. */
247       const PetscInt *didx_local;    /* Global indices from the local part of a single domain assigned to this processor. */
248       PetscInt       ddn_local;      /* Number of indices in the local part of the disjoint union all domains assigned to this processor. */
249       PetscInt       *ddidx_local;   /* Global indices of the local part of the disjoint union of all domains assigned to this processor. */
250       /**/
251       ISLocalToGlobalMapping ltog;          /* Map from global to local indices on the disjoint union of subdomains: "local" ind's run from 0 to ddn-1. */
252       PetscInt              *ddidx_llocal;  /* Mapped local indices of the disjoint union of local parts of subdomains. */
253       PetscInt               ddn_llocal;    /* Number of indices in ddidx_llocal; must equal ddn_local, or else gis_local is not a sub-IS of gis. */
254       IS                     gis_llocal;    /* IS with ddidx_llocal indices. */
255       PetscInt               j;
256       ddn_local = 0;
257       for (i=0; i<osm->n; i++) {
258 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
259 	ddn_local += dn_local;
260       }
261       ierr = PetscMalloc(ddn_local*sizeof(PetscInt), &ddidx_local); CHKERRQ(ierr);
262       ddn_local = 0;
263       for (i=0; i<osm->n; i++) {
264 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
265 	ierr = ISGetIndices(osm->is_local[i],&didx_local); CHKERRQ(ierr);
266 	ierr = PetscMemcpy(ddidx_local+ddn_local, didx_local, sizeof(PetscInt)*dn_local); CHKERRQ(ierr);
267 	ierr = ISRestoreIndices(osm->is_local[i], &didx_local); CHKERRQ(ierr);
268 	ddn_local += dn_local;
269       }
270       ierr = PetscMalloc(sizeof(PetscInt)*ddn_local, &ddidx_llocal); CHKERRQ(ierr);
271       ierr = ISCreateGeneral(((PetscObject)pc)->comm, ddn_local, ddidx_local, PETSC_OWN_POINTER, &(osm->gis_local)); CHKERRQ(ierr);
272       ierr = ISLocalToGlobalMappingCreateIS(osm->gis,&ltog);CHKERRQ(ierr);
273       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,ddn_local,ddidx_local,&ddn_llocal,ddidx_llocal);CHKERRQ(ierr);
274       ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
275       if (ddn_llocal != ddn_local) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"gis_local contains %D indices outside of gis", ddn_llocal - ddn_local);
276       /* Now convert these localized indices into the global indices into the merged output vector. */
277       ierr = VecGetOwnershipRange(osm->gy, &gfirst, &glast); CHKERRQ(ierr);
278       for(j=0; j < ddn_llocal; ++j) {
279 	ddidx_llocal[j] += gfirst;
280       }
281       ierr = ISCreateGeneral(PETSC_COMM_SELF,ddn_llocal,ddidx_llocal,PETSC_OWN_POINTER,&gis_llocal); CHKERRQ(ierr);
282       ierr = VecScatterCreate(y,osm->gis_local,osm->gy,gis_llocal,&osm->gprolongation);              CHKERRQ(ierr);
283       ierr = ISDestroy(gis_llocal);CHKERRQ(ierr);
284     }
285     /* Create the subdomain work vectors. */
286     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
287     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
288     ierr = VecGetOwnershipRange(osm->gx, &gfirst, &glast);CHKERRQ(ierr);
289     ierr = VecGetArray(osm->gx, &gxarray);                                     CHKERRQ(ierr);
290     ierr = VecGetArray(osm->gy, &gyarray);                                     CHKERRQ(ierr);
291     for (i=0, ddn=0; i<osm->n; ++i, ddn += dn) {
292       ierr = ISGetLocalSize(osm->is[i],&dn);CHKERRQ(ierr);
293       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gxarray+ddn,&osm->x[i]);CHKERRQ(ierr);
294       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gyarray+ddn,&osm->y[i]);CHKERRQ(ierr);
295     }
296     ierr = VecRestoreArray(osm->gx, &gxarray); CHKERRQ(ierr);
297     ierr = VecRestoreArray(osm->gy, &gyarray); CHKERRQ(ierr);
298     ierr = VecDestroy(x); CHKERRQ(ierr);
299     ierr = VecDestroy(y); CHKERRQ(ierr);
300     /* Create the local solvers */
301     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
302     for (i=0; i<osm->n; i++) { /* KSPs are local */
303       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
304       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
305       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
306       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
307       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
308       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
309       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
310       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
311       osm->ksp[i] = ksp;
312     }
313     scall = MAT_INITIAL_MATRIX;
314 
315   } else {
316     /*
317        Destroy the blocks from the previous iteration
318     */
319     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
320       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
321       scall = MAT_INITIAL_MATRIX;
322     }
323   }
324 
325   /*
326      Extract out the submatrices.
327   */
328   if(size > 1) {
329     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->is, osm->is,scall,&osm->pmat);CHKERRQ(ierr);
330   }
331   else {
332     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->is, osm->is,scall,&osm->pmat);CHKERRQ(ierr);
333   }
334   if (scall == MAT_INITIAL_MATRIX) {
335     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
336     for (i=0; i<osm->n; i++) {
337       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
338       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
339     }
340   }
341 
342   /* Return control to the user so that the submatrices can be modified (e.g., to apply
343      different boundary conditions for the submatrices than for the global problem) */
344   ierr = PCModifySubMatrices(pc,osm->n,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
345 
346   /*
347      Loop over submatrices putting them into local ksp
348   */
349   for (i=0; i<osm->n; i++) {
350     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
351     if (!pc->setupcalled) {
352       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
353     }
354   }
355 
356   PetscFunctionReturn(0);
357 }
358 
359 #undef __FUNCT__
360 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
361 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
362 {
363   PC_GASM         *osm = (PC_GASM*)pc->data;
364   PetscErrorCode ierr;
365   PetscInt       i;
366 
367   PetscFunctionBegin;
368   for (i=0; i<osm->n; i++) {
369     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
370   }
371   PetscFunctionReturn(0);
372 }
373 
374 #undef __FUNCT__
375 #define __FUNCT__ "PCApply_GASM"
376 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
377 {
378   PC_GASM         *osm = (PC_GASM*)pc->data;
379   PetscErrorCode ierr;
380   PetscInt       i;
381   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
382 
383   PetscFunctionBegin;
384   /*
385      Support for limiting the restriction or interpolation to only local
386      subdomain values (leaving the other values 0).
387   */
388   if (!(osm->type & PC_GASM_RESTRICT)) {
389     forward = SCATTER_FORWARD_LOCAL;
390     /* have to zero the work RHS since scatter may leave some slots empty */
391     ierr = VecZeroEntries(osm->gx); CHKERRQ(ierr);
392   }
393   if (!(osm->type & PC_GASM_INTERPOLATE)) {
394     reverse = SCATTER_REVERSE_LOCAL;
395   }
396 
397   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
398   ierr = VecZeroEntries(y);CHKERRQ(ierr);
399   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
400   /* do the local solves */
401   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
402     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
403   }
404   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
405   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "PCApplyTranspose_GASM"
411 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
412 {
413   PC_GASM         *osm = (PC_GASM*)pc->data;
414   PetscErrorCode ierr;
415   PetscInt       i;
416   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
417 
418   PetscFunctionBegin;
419   /*
420      Support for limiting the restriction or interpolation to only local
421      subdomain values (leaving the other values 0).
422 
423      Note: these are reversed from the PCApply_GASM() because we are applying the
424      transpose of the three terms
425   */
426   if (!(osm->type & PC_GASM_INTERPOLATE)) {
427     forward = SCATTER_FORWARD_LOCAL;
428     /* have to zero the work RHS since scatter may leave some slots empty */
429     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
430   }
431   if (!(osm->type & PC_GASM_RESTRICT)) {
432     reverse = SCATTER_REVERSE_LOCAL;
433   }
434 
435   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
436   ierr = VecZeroEntries(y);CHKERRQ(ierr);
437   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
438   /* do the local solves */
439   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
440     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
441   }
442   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
443   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
444   PetscFunctionReturn(0);
445 }
446 
447 #undef __FUNCT__
448 #define __FUNCT__ "PCDestroy_GASM"
449 static PetscErrorCode PCDestroy_GASM(PC pc)
450 {
451   PC_GASM         *osm = (PC_GASM*)pc->data;
452   PetscErrorCode ierr;
453   PetscInt       i;
454 
455   PetscFunctionBegin;
456   if (osm->ksp) {
457     for (i=0; i<osm->n; i++) {
458       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
459     }
460     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
461   }
462   if (osm->pmat) {
463     if (osm->n > 0) {
464       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
465     }
466   }
467   for (i=0; i<osm->n; i++) {
468     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
469     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
470   }
471   ierr = PetscFree(osm->x);CHKERRQ(ierr);
472   ierr = PetscFree(osm->y);CHKERRQ(ierr);
473   ierr = VecDestroy(osm->gx); CHKERRQ(ierr);
474   ierr = VecDestroy(osm->gy); CHKERRQ(ierr);
475 
476   ierr = VecScatterDestroy(osm->grestriction); CHKERRQ(ierr);
477   ierr = VecScatterDestroy(osm->gprolongation); CHKERRQ(ierr);
478 
479   if (osm->is) {
480     ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
481   }
482   ierr = PetscFree(osm);CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "PCSetFromOptions_GASM"
488 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
489   PC_GASM         *osm = (PC_GASM*)pc->data;
490   PetscErrorCode ierr;
491   PetscInt       blocks,ovl;
492   PetscBool      symset,flg;
493   PCGASMType      gasmtype;
494 
495   PetscFunctionBegin;
496   /* set the type to symmetric if matrix is symmetric */
497   if (!osm->type_set && pc->pmat) {
498     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
499     if (symset && flg) { osm->type = PC_GASM_BASIC; }
500   }
501   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
502     ierr = PetscOptionsInt("-pc_gasm_blocks","Number of subdomains","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
503     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks);CHKERRQ(ierr); }
504     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of grid points overlap","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
505     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
506     flg  = PETSC_FALSE;
507     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
508     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
509   ierr = PetscOptionsTail();CHKERRQ(ierr);
510   PetscFunctionReturn(0);
511 }
512 
513 /*------------------------------------------------------------------------------------*/
514 
515 EXTERN_C_BEGIN
516 #undef __FUNCT__
517 #define __FUNCT__ "PCGASMSetLocalSubdomains_GASM"
518 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains_GASM(PC pc,PetscInt n,IS is[],IS is_local[])
519 {
520   PC_GASM         *osm = (PC_GASM*)pc->data;
521   PetscErrorCode ierr;
522   PetscInt       i;
523 
524   PetscFunctionBegin;
525   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
526   if (pc->setupcalled && (n != osm->n || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetLocalSubdomains() should be called before calling PCSetUp().");
527 
528   if (!pc->setupcalled) {
529     osm->n            = n;
530     osm->is           = 0;
531     osm->is_local     = 0;
532     if (is) {
533       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
534     }
535     if (is_local) {
536       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
537     }
538     if (osm->is) {
539       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
540     }
541     if (is) {
542       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
543       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
544       /* Flag indicating that the user has set overlapping subdomains so PCGASM should not increase their size. */
545       osm->overlap = -1;
546     }
547     if (is_local) {
548       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
549       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
550     }
551   }
552   PetscFunctionReturn(0);
553 }
554 EXTERN_C_END
555 
556 EXTERN_C_BEGIN
557 #undef __FUNCT__
558 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
559 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N) {
560   PC_GASM         *osm = (PC_GASM*)pc->data;
561   PetscErrorCode ierr;
562   PetscMPIInt    rank,size;
563   PetscInt       n;
564 
565   PetscFunctionBegin;
566   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
567 
568   /*
569      Split the subdomains equally among all processors
570   */
571   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
572   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
573   n = N/size + ((N % size) > rank);
574   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
575   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
576   if (!pc->setupcalled) {
577     if (osm->is) {
578       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
579     }
580     osm->N            = N;
581     osm->n            = n;
582     osm->is           = 0;
583     osm->is_local     = 0;
584   }
585   PetscFunctionReturn(0);
586 }/* PCGASMSetTotalSubdomains_GASM() */
587 EXTERN_C_END
588 
589 EXTERN_C_BEGIN
590 #undef __FUNCT__
591 #define __FUNCT__ "PCGASMSetOverlap_GASM"
592 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
593 {
594   PC_GASM *osm = (PC_GASM*)pc->data;
595 
596   PetscFunctionBegin;
597   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
598   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
599   if (!pc->setupcalled) {
600     osm->overlap = ovl;
601   }
602   PetscFunctionReturn(0);
603 }
604 EXTERN_C_END
605 
606 EXTERN_C_BEGIN
607 #undef __FUNCT__
608 #define __FUNCT__ "PCGASMSetType_GASM"
609 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType_GASM(PC pc,PCGASMType type)
610 {
611   PC_GASM *osm = (PC_GASM*)pc->data;
612 
613   PetscFunctionBegin;
614   osm->type     = type;
615   osm->type_set = PETSC_TRUE;
616   PetscFunctionReturn(0);
617 }
618 EXTERN_C_END
619 
620 EXTERN_C_BEGIN
621 #undef __FUNCT__
622 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
623 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
624 {
625   PC_GASM *osm = (PC_GASM*)pc->data;
626 
627   PetscFunctionBegin;
628   osm->sort_indices = doSort;
629   PetscFunctionReturn(0);
630 }
631 EXTERN_C_END
632 
633 EXTERN_C_BEGIN
634 #undef __FUNCT__
635 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
636 /*
637    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
638         In particular, it would upset the global subdomain number calculation.
639 */
640 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
641 {
642   PC_GASM         *osm = (PC_GASM*)pc->data;
643   PetscErrorCode ierr;
644 
645   PetscFunctionBegin;
646   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
647 
648   if (n) {
649     *n = osm->n;
650   }
651   if (first) {
652     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
653     *first -= osm->n;
654   }
655   if (ksp) {
656     /* Assume that local solves are now different; not necessarily
657        true though!  This flag is used only for PCView_GASM() */
658     *ksp                   = osm->ksp;
659     osm->same_local_solves = PETSC_FALSE;
660   }
661   PetscFunctionReturn(0);
662 }/* PCGASMGetSubKSP_GASM() */
663 EXTERN_C_END
664 
665 
666 #undef __FUNCT__
667 #define __FUNCT__ "PCGASMSetLocalSubdomains"
668 /*@C
669     PCGASMSetLocalSubdomains - Sets the local subdomains (for this processor
670     only) for the additive Schwarz preconditioner.
671 
672     Collective on PC
673 
674     Input Parameters:
675 +   pc - the preconditioner context
676 .   n - the number of subdomains for this processor (default value = 1)
677 .   is - the index set that defines the subdomains for this processor
678          (or PETSC_NULL for PETSc to determine subdomains)
679 -   is_local - the index sets that define the local part of the subdomains for this processor
680          (or PETSC_NULL to use the default of 1 subdomain per process)
681 
682     Notes:
683     The IS numbering is in the parallel, global numbering of the vector.
684 
685     By default the GASM preconditioner uses 1 block per processor.
686 
687     Use PCGASMSetTotalSubdomains() to set the subdomains for all processors.
688 
689     Level: advanced
690 
691 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
692 
693 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
694           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
695 @*/
696 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
697 {
698   PetscErrorCode ierr;
699 
700   PetscFunctionBegin;
701   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
702   ierr = PetscTryMethod(pc,"PCGASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
703   PetscFunctionReturn(0);
704 }
705 
706 #undef __FUNCT__
707 #define __FUNCT__ "PCGASMSetTotalSubdomains"
708 /*@C
709     PCGASMSetTotalSubdomains - Sets the subdomains for all processor for the
710     additive Schwarz preconditioner.  Either all or no processors in the
711     PC communicator must call this routine, with the same index sets.
712 
713     Collective on PC
714 
715     Input Parameters:
716 +   pc - the preconditioner context
717 .   n - the number of subdomains for all processors
718 .   is - the index sets that define the subdomains for all processor
719          (or PETSC_NULL for PETSc to determine subdomains)
720 -   is_local - the index sets that define the local part of the subdomains for this processor
721          (or PETSC_NULL to use the default of 1 subdomain per process)
722 
723     Options Database Key:
724     To set the total number of subdomain blocks rather than specify the
725     index sets, use the option
726 .    -pc_gasm_blocks <blks> - Sets total blocks
727 
728     Notes:
729     Currently you cannot use this to set the actual subdomains with the argument is.
730 
731     By default the GASM preconditioner uses 1 block per processor.
732 
733     These index sets cannot be destroyed until after completion of the
734     linear solves for which the GASM preconditioner is being used.
735 
736     Use PCGASMSetLocalSubdomains() to set local subdomains.
737 
738     Level: advanced
739 
740 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
741 
742 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
743           PCGASMCreateSubdomains2D()
744 @*/
745 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains(PC pc,PetscInt N)
746 {
747   PetscErrorCode ierr;
748 
749   PetscFunctionBegin;
750   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
751   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt),(pc,N));CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "PCGASMSetOverlap"
757 /*@
758     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
759     additive Schwarz preconditioner.  Either all or no processors in the
760     PC communicator must call this routine.
761 
762     Logically Collective on PC
763 
764     Input Parameters:
765 +   pc  - the preconditioner context
766 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
767 
768     Options Database Key:
769 .   -pc_gasm_overlap <ovl> - Sets overlap
770 
771     Notes:
772     By default the GASM preconditioner uses 1 block per processor.  To use
773     multiple blocks per perocessor, see PCGASMSetTotalSubdomains() and
774     PCGASMSetLocalSubdomains() (and the option -pc_gasm_blocks <blks>).
775 
776     The overlap defaults to 1, so if one desires that no additional
777     overlap be computed beyond what may have been set with a call to
778     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(), then ovl
779     must be set to be 0.  In particular, if one does not explicitly set
780     the subdomains an application code, then all overlap would be computed
781     internally by PETSc, and using an overlap of 0 would result in an GASM
782     variant that is equivalent to the block Jacobi preconditioner.
783 
784     Note that one can define initial index sets with any overlap via
785     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(); the routine
786     PCGASMSetOverlap() merely allows PETSc to extend that overlap further
787     if desired.
788 
789     Level: intermediate
790 
791 .keywords: PC, GASM, set, overlap
792 
793 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
794           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
795 @*/
796 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap(PC pc,PetscInt ovl)
797 {
798   PetscErrorCode ierr;
799 
800   PetscFunctionBegin;
801   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
802   PetscValidLogicalCollectiveInt(pc,ovl,2);
803   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "PCGASMSetType"
809 /*@
810     PCGASMSetType - Sets the type of restriction and interpolation used
811     for local problems in the additive Schwarz method.
812 
813     Logically Collective on PC
814 
815     Input Parameters:
816 +   pc  - the preconditioner context
817 -   type - variant of GASM, one of
818 .vb
819       PC_GASM_BASIC       - full interpolation and restriction
820       PC_GASM_RESTRICT    - full restriction, local processor interpolation
821       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
822       PC_GASM_NONE        - local processor restriction and interpolation
823 .ve
824 
825     Options Database Key:
826 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
827 
828     Level: intermediate
829 
830 .keywords: PC, GASM, set, type
831 
832 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
833           PCGASMCreateSubdomains2D()
834 @*/
835 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType(PC pc,PCGASMType type)
836 {
837   PetscErrorCode ierr;
838 
839   PetscFunctionBegin;
840   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
841   PetscValidLogicalCollectiveEnum(pc,type,2);
842   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 #undef __FUNCT__
847 #define __FUNCT__ "PCGASMSetSortIndices"
848 /*@
849     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
850 
851     Logically Collective on PC
852 
853     Input Parameters:
854 +   pc  - the preconditioner context
855 -   doSort - sort the subdomain indices
856 
857     Level: intermediate
858 
859 .keywords: PC, GASM, set, type
860 
861 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
862           PCGASMCreateSubdomains2D()
863 @*/
864 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices(PC pc,PetscBool  doSort)
865 {
866   PetscErrorCode ierr;
867 
868   PetscFunctionBegin;
869   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
870   PetscValidLogicalCollectiveBool(pc,doSort,2);
871   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
872   PetscFunctionReturn(0);
873 }
874 
875 #undef __FUNCT__
876 #define __FUNCT__ "PCGASMGetSubKSP"
877 /*@C
878    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
879    this processor.
880 
881    Collective on PC iff first_local is requested
882 
883    Input Parameter:
884 .  pc - the preconditioner context
885 
886    Output Parameters:
887 +  n_local - the number of blocks on this processor or PETSC_NULL
888 .  first_local - the global number of the first block on this processor or PETSC_NULL,
889                  all processors must request or all must pass PETSC_NULL
890 -  ksp - the array of KSP contexts
891 
892    Note:
893    After PCGASMGetSubKSP() the array of KSPes is not to be freed
894 
895    Currently for some matrix implementations only 1 block per processor
896    is supported.
897 
898    You must call KSPSetUp() before calling PCGASMGetSubKSP().
899 
900    Level: advanced
901 
902 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
903 
904 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap(),
905           PCGASMCreateSubdomains2D(),
906 @*/
907 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
908 {
909   PetscErrorCode ierr;
910 
911   PetscFunctionBegin;
912   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
913   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 /* -------------------------------------------------------------------------------------*/
918 /*MC
919    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
920            its own KSP object.
921 
922    Options Database Keys:
923 +  -pc_gasm_truelocal - Activates PCGGASMSetUseTrueLocal()
924 .  -pc_gasm_blocks <blks> - Sets total blocks
925 .  -pc_gasm_overlap <ovl> - Sets overlap
926 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
927 
928      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
929       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
930       -pc_gasm_type basic to use the standard GASM.
931 
932    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
933      than one processor. Defaults to one block per processor.
934 
935      To set options on the solvers for each block append -sub_ to all the KSP, and PC
936         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
937 
938      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
939          and set the options directly on the resulting KSP object (you can access its PC
940          with KSPGetPC())
941 
942 
943    Level: beginner
944 
945    Concepts: additive Schwarz method
946 
947     References:
948     An additive variant of the Schwarz alternating method for the case of many subregions
949     M Dryja, OB Widlund - Courant Institute, New York University Technical report
950 
951     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
952     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
953 
954 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
955            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetLocalSubdomains(),
956            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
957 
958 M*/
959 
960 EXTERN_C_BEGIN
961 #undef __FUNCT__
962 #define __FUNCT__ "PCCreate_GASM"
963 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_GASM(PC pc)
964 {
965   PetscErrorCode ierr;
966   PC_GASM         *osm;
967 
968   PetscFunctionBegin;
969   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
970   osm->N                 = PETSC_DECIDE;
971   osm->n                 = 0;
972   osm->nmax              = 0;
973   osm->overlap           = 1;
974   osm->ksp               = 0;
975   osm->grestriction      = 0;
976   osm->gprolongation     = 0;
977   osm->gx                = 0;
978   osm->gy                = 0;
979   osm->x                 = 0;
980   osm->y                 = 0;
981   osm->is                = 0;
982   osm->is_local          = 0;
983   osm->pmat              = 0;
984   osm->type              = PC_GASM_RESTRICT;
985   osm->same_local_solves = PETSC_TRUE;
986   osm->sort_indices      = PETSC_TRUE;
987 
988   pc->data                   = (void*)osm;
989   pc->ops->apply             = PCApply_GASM;
990   pc->ops->applytranspose    = PCApplyTranspose_GASM;
991   pc->ops->setup             = PCSetUp_GASM;
992   pc->ops->destroy           = PCDestroy_GASM;
993   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
994   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
995   pc->ops->view              = PCView_GASM;
996   pc->ops->applyrichardson   = 0;
997 
998   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetLocalSubdomains_C","PCGASMSetLocalSubdomains_GASM",
999                     PCGASMSetLocalSubdomains_GASM);CHKERRQ(ierr);
1000   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1001                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1002   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1003                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1004   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1005                     PCGASMSetType_GASM);CHKERRQ(ierr);
1006   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1007                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1008   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1009                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1010   PetscFunctionReturn(0);
1011 }
1012 EXTERN_C_END
1013 
1014 
1015 #undef __FUNCT__
1016 #define __FUNCT__ "PCGASMCreateSubdomains"
1017 /*@C
1018    PCGASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1019    preconditioner for a any problem on a general grid.
1020 
1021    Collective
1022 
1023    Input Parameters:
1024 +  A - The global matrix operator
1025 -  n - the number of local blocks
1026 
1027    Output Parameters:
1028 .  outis - the array of index sets defining the subdomains
1029 
1030    Level: advanced
1031 
1032    Note: this generates nonoverlapping subdomains; the PCGASM will generate the overlap
1033     from these if you use PCGASMSetLocalSubdomains()
1034 
1035     In the Fortran version you must provide the array outis[] already allocated of length n.
1036 
1037 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1038 
1039 .seealso: PCGASMSetLocalSubdomains(), PCGASMDestroySubdomains()
1040 @*/
1041 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1042 {
1043   MatPartitioning           mpart;
1044   const char                *prefix;
1045   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1046   PetscMPIInt               size;
1047   PetscInt                  i,j,rstart,rend,bs;
1048   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1049   Mat                       Ad = PETSC_NULL, adj;
1050   IS                        ispart,isnumb,*is;
1051   PetscErrorCode            ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1055   PetscValidPointer(outis,3);
1056   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1057 
1058   /* Get prefix, row distribution, and block size */
1059   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1060   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1061   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1062   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1063 
1064   /* Get diagonal block from matrix if possible */
1065   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1066   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1067   if (f) {
1068     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1069   } else if (size == 1) {
1070     iscopy = PETSC_FALSE; Ad = A;
1071   } else {
1072     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1073   }
1074   if (Ad) {
1075     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1076     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1077   }
1078   if (Ad && n > 1) {
1079     PetscBool  match,done;
1080     /* Try to setup a good matrix partitioning if available */
1081     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1082     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1083     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1084     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1085     if (!match) {
1086       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1087     }
1088     if (!match) { /* assume a "good" partitioner is available */
1089       PetscInt na,*ia,*ja;
1090       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1091       if (done) {
1092 	/* Build adjacency matrix by hand. Unfortunately a call to
1093 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1094 	   remove the block-aij structure and we cannot expect
1095 	   MatPartitioning to split vertices as we need */
1096 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1097 	nnz = 0;
1098 	for (i=0; i<na; i++) { /* count number of nonzeros */
1099 	  len = ia[i+1] - ia[i];
1100 	  row = ja + ia[i];
1101 	  for (j=0; j<len; j++) {
1102 	    if (row[j] == i) { /* don't count diagonal */
1103 	      len--; break;
1104 	    }
1105 	  }
1106 	  nnz += len;
1107 	}
1108 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1109 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1110 	nnz    = 0;
1111 	iia[0] = 0;
1112 	for (i=0; i<na; i++) { /* fill adjacency */
1113 	  cnt = 0;
1114 	  len = ia[i+1] - ia[i];
1115 	  row = ja + ia[i];
1116 	  for (j=0; j<len; j++) {
1117 	    if (row[j] != i) { /* if not diagonal */
1118 	      jja[nnz+cnt++] = row[j];
1119 	    }
1120 	  }
1121 	  nnz += cnt;
1122 	  iia[i+1] = nnz;
1123 	}
1124 	/* Partitioning of the adjacency matrix */
1125 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1126 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1127 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1128 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1129 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1130 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1131 	foundpart = PETSC_TRUE;
1132       }
1133       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1134     }
1135     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1136   }
1137   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1138 
1139   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1140   *outis = is;
1141 
1142   if (!foundpart) {
1143 
1144     /* Partitioning by contiguous chunks of rows */
1145 
1146     PetscInt mbs   = (rend-rstart)/bs;
1147     PetscInt start = rstart;
1148     for (i=0; i<n; i++) {
1149       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1150       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1151       start += count;
1152     }
1153 
1154   } else {
1155 
1156     /* Partitioning by adjacency of diagonal block  */
1157 
1158     const PetscInt *numbering;
1159     PetscInt       *count,nidx,*indices,*newidx,start=0;
1160     /* Get node count in each partition */
1161     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1162     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1163     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1164       for (i=0; i<n; i++) count[i] *= bs;
1165     }
1166     /* Build indices from node numbering */
1167     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1168     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1169     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1170     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1171     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1172     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1173     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1174       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1175       for (i=0; i<nidx; i++)
1176 	for (j=0; j<bs; j++)
1177 	  newidx[i*bs+j] = indices[i]*bs + j;
1178       ierr = PetscFree(indices);CHKERRQ(ierr);
1179       nidx   *= bs;
1180       indices = newidx;
1181     }
1182     /* Shift to get global indices */
1183     for (i=0; i<nidx; i++) indices[i] += rstart;
1184 
1185     /* Build the index sets for each block */
1186     for (i=0; i<n; i++) {
1187       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1188       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1189       start += count[i];
1190     }
1191 
1192     ierr = PetscFree(count);
1193     ierr = PetscFree(indices);
1194     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1195     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1196 
1197   }
1198 
1199   PetscFunctionReturn(0);
1200 }
1201 
1202 #undef __FUNCT__
1203 #define __FUNCT__ "PCGASMDestroySubdomains"
1204 /*@C
1205    PCGASMDestroySubdomains - Destroys the index sets created with
1206    PCGASMCreateSubdomains(). Should be called after setting subdomains
1207    with PCGASMSetLocalSubdomains().
1208 
1209    Collective
1210 
1211    Input Parameters:
1212 +  n - the number of index sets
1213 .  is - the array of index sets
1214 -  is_local - the array of local index sets, can be PETSC_NULL
1215 
1216    Level: advanced
1217 
1218 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1219 
1220 .seealso: PCGASMCreateSubdomains(), PCGASMSetLocalSubdomains()
1221 @*/
1222 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1223 {
1224   PetscInt       i;
1225   PetscErrorCode ierr;
1226   PetscFunctionBegin;
1227   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1228   PetscValidPointer(is,2);
1229   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1230   ierr = PetscFree(is);CHKERRQ(ierr);
1231   if (is_local) {
1232     PetscValidPointer(is_local,3);
1233     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1234     ierr = PetscFree(is_local);CHKERRQ(ierr);
1235   }
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1241 /*@
1242    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1243    preconditioner for a two-dimensional problem on a regular grid.
1244 
1245    Not Collective
1246 
1247    Input Parameters:
1248 +  m, n - the number of mesh points in the x and y directions
1249 .  M, N - the number of subdomains in the x and y directions
1250 .  dof - degrees of freedom per node
1251 -  overlap - overlap in mesh lines
1252 
1253    Output Parameters:
1254 +  Nsub - the number of subdomains created
1255 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1256 -  is_local - array of index sets defining non-overlapping subdomains
1257 
1258    Note:
1259    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1260    preconditioners.  More general related routines are
1261    PCGASMSetTotalSubdomains() and PCGASMSetLocalSubdomains().
1262 
1263    Level: advanced
1264 
1265 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1266 
1267 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
1268           PCGASMSetOverlap()
1269 @*/
1270 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1271 {
1272   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1273   PetscErrorCode ierr;
1274   PetscInt       nidx,*idx,loc,ii,jj,count;
1275 
1276   PetscFunctionBegin;
1277   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1278 
1279   *Nsub = N*M;
1280   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1281   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1282   ystart = 0;
1283   loc_outer = 0;
1284   for (i=0; i<N; i++) {
1285     height = n/N + ((n % N) > i); /* height of subdomain */
1286     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1287     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1288     yright = ystart + height + overlap; if (yright > n) yright = n;
1289     xstart = 0;
1290     for (j=0; j<M; j++) {
1291       width = m/M + ((m % M) > j); /* width of subdomain */
1292       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1293       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1294       xright = xstart + width + overlap; if (xright > m) xright = m;
1295       nidx   = (xright - xleft)*(yright - yleft);
1296       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1297       loc    = 0;
1298       for (ii=yleft; ii<yright; ii++) {
1299         count = m*ii + xleft;
1300         for (jj=xleft; jj<xright; jj++) {
1301           idx[loc++] = count++;
1302         }
1303       }
1304       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1305       if (overlap == 0) {
1306         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1307         (*is_local)[loc_outer] = (*is)[loc_outer];
1308       } else {
1309         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1310           for (jj=xstart; jj<xstart+width; jj++) {
1311             idx[loc++] = m*ii + jj;
1312           }
1313         }
1314         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1315       }
1316       ierr = PetscFree(idx);CHKERRQ(ierr);
1317       xstart += width;
1318       loc_outer++;
1319     }
1320     ystart += height;
1321   }
1322   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1323   PetscFunctionReturn(0);
1324 }
1325 
1326 #undef __FUNCT__
1327 #define __FUNCT__ "PCGASMGetLocalSubdomains"
1328 /*@C
1329     PCGASMGetLocalSubdomains - Gets the local subdomains (for this processor
1330     only) for the additive Schwarz preconditioner.
1331 
1332     Not Collective
1333 
1334     Input Parameter:
1335 .   pc - the preconditioner context
1336 
1337     Output Parameters:
1338 +   n - the number of subdomains for this processor (default value = 1)
1339 .   is - the index sets that define the subdomains for this processor
1340 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1341 
1342 
1343     Notes:
1344     The IS numbering is in the parallel, global numbering of the vector.
1345 
1346     Level: advanced
1347 
1348 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
1349 
1350 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1351           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubmatrices()
1352 @*/
1353 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1354 {
1355   PC_GASM         *osm;
1356   PetscErrorCode ierr;
1357   PetscBool      match;
1358 
1359   PetscFunctionBegin;
1360   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1361   PetscValidIntPointer(n,2);
1362   if (is) PetscValidPointer(is,3);
1363   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1364   if (!match) {
1365     if (n)  *n  = 0;
1366     if (is) *is = PETSC_NULL;
1367   } else {
1368     osm = (PC_GASM*)pc->data;
1369     if (n)  *n  = osm->n;
1370     if (is) *is = osm->is;
1371     if (is_local) *is_local = osm->is_local;
1372   }
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 #undef __FUNCT__
1377 #define __FUNCT__ "PCGASMGetLocalSubmatrices"
1378 /*@C
1379     PCGASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1380     only) for the additive Schwarz preconditioner.
1381 
1382     Not Collective
1383 
1384     Input Parameter:
1385 .   pc - the preconditioner context
1386 
1387     Output Parameters:
1388 +   n - the number of matrices for this processor (default value = 1)
1389 -   mat - the matrices
1390 
1391 
1392     Level: advanced
1393 
1394 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1395 
1396 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1397           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubdomains()
1398 @*/
1399 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1400 {
1401   PC_GASM         *osm;
1402   PetscErrorCode ierr;
1403   PetscBool      match;
1404 
1405   PetscFunctionBegin;
1406   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1407   PetscValidIntPointer(n,2);
1408   if (mat) PetscValidPointer(mat,3);
1409   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1410   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1411   if (!match) {
1412     if (n)   *n   = 0;
1413     if (mat) *mat = PETSC_NULL;
1414   } else {
1415     osm = (PC_GASM*)pc->data;
1416     if (n)   *n   = osm->n;
1417     if (mat) *mat = osm->pmat;
1418   }
1419   PetscFunctionReturn(0);
1420 }
1421