xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision efe48dd8cf8b935accbbb9f4bcb20bc83865fa4d)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
5   In this version each processor may have any number of subdomains and a subdomain may live on multiple
6   processors.
7 
8        N    - total number of true subdomains on all processors
9        n    - actual number of subdomains on this processor
10        nmax - maximum number of subdomains per processor
11 */
12 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   IS         gis, gis_local;      /* merged ISs */
21   VecScatter grestriction;        /* merged restriction */
22   VecScatter gprolongation;       /* merged prolongation */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each local subdomain (same as subdomain with the overlap removed); may be NULL */
25   Mat        *pmat;               /* subdomain block matrices */
26   PCGASMType  type;               /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_GASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_GASM"
34 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
35 {
36   PC_GASM         *osm = (PC_GASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->nmax > 0)     {ierr = PetscSNPrintf(blocks,sizeof blocks,"max number of  subdomain blocks = %D",osm->nmax);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: restriction/interpolation type - %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->nmax; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCGASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCGASMPrintSubdomains"
97 static PetscErrorCode PCGASMPrintSubdomains(PC pc)
98 {
99   PC_GASM         *osm  = (PC_GASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n;++i) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_GASM"
137 static PetscErrorCode PCSetUp_GASM(PC pc)
138 {
139   PC_GASM         *osm  = (PC_GASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i;
143   PetscMPIInt    rank, size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   KSP            ksp;
146   PC             subpc;
147   const char     *prefix,*pprefix;
148   PetscInt       dn;       /* Number of indices in a single subdomain assigned to this processor. */
149   const PetscInt *didx;    /* Indices from a single subdomain assigned to this processor. */
150   PetscInt       ddn;      /* Number of indices in all subdomains assigned to this processor. */
151   PetscInt       *ddidx;   /* Indices of all subdomains assigned to this processor. */
152   IS             gid;      /* Identity IS of the size of all subdomains assigned to this processor. */
153   Vec            x,y;
154   PetscScalar    *gxarray, *gyarray;
155   PetscInt       gfirst, glast;
156 
157   PetscFunctionBegin;
158   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
159   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
160   if (!pc->setupcalled) {
161 
162     if (!osm->type_set) {
163       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
164       if (symset && flg) { osm->type = PC_GASM_BASIC; }
165     }
166 
167     if (osm->N == PETSC_DECIDE && osm->n < 1) {
168       /* no subdomains given, use one per processor */
169       osm->nmax = osm->n = 1;
170       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
171       osm->N = size;
172     } else if (osm->N == PETSC_DECIDE) {
173       PetscInt inwork[2], outwork[2];
174       /* determine global number of subdomains and the max number of local subdomains */
175       inwork[0] = inwork[1] = osm->n;
176       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
177       osm->nmax = outwork[0];
178       osm->N    = outwork[1];
179     }
180     if (!osm->is){ /* create the index sets */
181       ierr = PCGASMCreateSubdomains(pc->pmat,osm->n,&osm->is);CHKERRQ(ierr);
182     }
183     if (!osm->is_local) {
184       /*
185 	 This indicates that osm->is should define a nonoverlapping decomposition
186 	 (there is no way to really guarantee that if subdomains are set by the user through PCGASMSetLocalSubdomains,
187 	  but the assumption is that either the user does the right thing, or subdomains in ossm->is have been created
188 	  via PCGASMCreateSubdomains, which guarantees a nonoverlapping decomposition).
189 	 Therefore, osm->is will be used to define osm->is_local.
190 	 If a nonzero overlap has been requested by the user, then osm->is will be expanded and will overlap,
191 	 so osm->is_local should obtain a copy of osm->is while they are still (presumably) nonoverlapping.
192 	 Otherwise (no overlap has been requested), osm->is_local are simply aliases for osm->is.
193       */
194       ierr = PetscMalloc(osm->n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
195       for (i=0; i<osm->n; i++) {
196         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
197           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
198           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
199         } else {
200           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
201           osm->is_local[i] = osm->is[i];
202         }
203       }
204     }
205     /* Beyond this point osm->is_local is not null. */
206     if (osm->overlap > 0) {
207       /* Extend the "overlapping" regions by a number of steps */
208       ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->is,osm->overlap);CHKERRQ(ierr);
209     }
210     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
211     flg  = PETSC_FALSE;
212     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
213     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
214 
215     if (osm->sort_indices) {
216       for (i=0; i<osm->n; i++) {
217         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
218 	ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
219       }
220     }
221     /* Merge the ISs, create merged vectors and scatter contexts. */
222     /* Restriction ISs. */
223     ddn = 0;
224     for (i=0; i<osm->n; i++) {
225       ierr = ISGetLocalSize(osm->is[i],&dn);          CHKERRQ(ierr);
226       ddn += dn;
227     }
228     ierr = PetscMalloc(ddn*sizeof(PetscInt), &ddidx); CHKERRQ(ierr);
229     ddn = 0;
230     for (i=0; i<osm->n; i++) {
231       ierr = ISGetLocalSize(osm->is[i],&dn); CHKERRQ(ierr);
232       ierr = ISGetIndices(osm->is[i],&didx); CHKERRQ(ierr);
233       ierr = PetscMemcpy(ddidx+ddn, didx, sizeof(PetscInt)*dn); CHKERRQ(ierr);
234       ierr = ISRestoreIndices(osm->is[i], &didx); CHKERRQ(ierr);
235       ddn += dn;
236     }
237     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, ddn, ddidx, PETSC_OWN_POINTER, &osm->gis); CHKERRQ(ierr);
238     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
239     ierr = VecCreateMPI(((PetscObject)pc)->comm, ddn, PETSC_DECIDE, &osm->gx); CHKERRQ(ierr);
240     ierr = VecDuplicate(osm->gx,&osm->gy);                                     CHKERRQ(ierr);
241     ierr = VecGetOwnershipRange(osm->gx, &gfirst, &glast);                     CHKERRQ(ierr);
242     ierr = ISCreateStride(((PetscObject)pc)->comm,ddn,gfirst,1, &gid);              CHKERRQ(ierr);
243     ierr = VecScatterCreate(x,osm->gis,osm->gx,gid, &(osm->grestriction));     CHKERRQ(ierr);
244     ierr = ISDestroy(gid);                                                     CHKERRQ(ierr);
245     /* Prolongation ISs */
246     { PetscInt       dn_local;       /* Number of indices in the local part of single domain assigned to this processor. */
247       const PetscInt *didx_local;    /* Global indices from the local part of a single domain assigned to this processor. */
248       PetscInt       ddn_local;      /* Number of indices in the local part of the disjoint union all domains assigned to this processor. */
249       PetscInt       *ddidx_local;   /* Global indices of the local part of the disjoint union of all domains assigned to this processor. */
250       /**/
251       ISLocalToGlobalMapping ltog;          /* Map from global to local indices on the disjoint union of subdomains: "local" ind's run from 0 to ddn-1. */
252       PetscInt              *ddidx_llocal;  /* Mapped local indices of the disjoint union of local parts of subdomains. */
253       PetscInt               ddn_llocal;    /* Number of indices in ddidx_llocal; must equal ddn_local, or else gis_local is not a sub-IS of gis. */
254       IS                     gis_llocal;    /* IS with ddidx_llocal indices. */
255       PetscInt               j;
256       ddn_local = 0;
257       for (i=0; i<osm->n; i++) {
258 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
259 	ddn_local += dn_local;
260       }
261       ierr = PetscMalloc(ddn_local*sizeof(PetscInt), &ddidx_local); CHKERRQ(ierr);
262       ddn_local = 0;
263       for (i=0; i<osm->n; i++) {
264 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
265 	ierr = ISGetIndices(osm->is_local[i],&didx_local); CHKERRQ(ierr);
266 	ierr = PetscMemcpy(ddidx_local+ddn_local, didx_local, sizeof(PetscInt)*dn_local); CHKERRQ(ierr);
267 	ierr = ISRestoreIndices(osm->is_local[i], &didx_local); CHKERRQ(ierr);
268 	ddn_local += dn_local;
269       }
270       ierr = PetscMalloc(sizeof(PetscInt)*ddn_local, &ddidx_llocal); CHKERRQ(ierr);
271       ierr = ISCreateGeneral(((PetscObject)pc)->comm, ddn_local, ddidx_local, PETSC_OWN_POINTER, &(osm->gis_local)); CHKERRQ(ierr);
272       ierr = ISLocalToGlobalMappingCreateIS(osm->gis,&ltog);CHKERRQ(ierr);
273       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,dn_local,ddidx_local,&ddn_llocal,ddidx_llocal);CHKERRQ(ierr);
274       ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
275       if (ddn_llocal != ddn_local) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"gis_local contains %D indices outside of gis", ddn_llocal - ddn_local);
276       /* Now convert these localized indices into the global indices into the merged output vector. */
277       ierr = VecGetOwnershipRange(osm->gy, &gfirst, &glast); CHKERRQ(ierr);
278       for(j=0; j < ddn_llocal; ++j) {
279 	ddidx_llocal[j] += gfirst;
280       }
281       ierr = ISCreateGeneral(PETSC_COMM_SELF,ddn_llocal,ddidx_llocal,PETSC_OWN_POINTER,&gis_llocal); CHKERRQ(ierr);
282       ierr = VecScatterCreate(y,osm->gis_local,osm->gy,gis_llocal,&osm->gprolongation);              CHKERRQ(ierr);
283       ierr = ISDestroy(gis_llocal);CHKERRQ(ierr);
284     }
285     /* Create the subdomain work vectors. */
286     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
287     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
288     ierr = VecGetOwnershipRange(osm->gx, &gfirst, &glast);CHKERRQ(ierr);
289     ierr = VecGetArray(osm->gx, &gxarray);                                     CHKERRQ(ierr);
290     ierr = VecGetArray(osm->gy, &gyarray);                                     CHKERRQ(ierr);
291     for (i=0, ddn=0; i<osm->n; ++i, ddn += dn) {
292       ierr = ISGetLocalSize(osm->is[i],&dn);CHKERRQ(ierr);
293       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gxarray+ddn,&osm->x[i]);CHKERRQ(ierr);
294       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gyarray+ddn,&osm->y[i]);CHKERRQ(ierr);
295     }
296     ierr = VecRestoreArray(osm->gx, &gxarray); CHKERRQ(ierr);
297     ierr = VecRestoreArray(osm->gy, &gyarray); CHKERRQ(ierr);
298     ierr = VecDestroy(x); CHKERRQ(ierr);
299     ierr = VecDestroy(y); CHKERRQ(ierr);
300     /* Create the local solvers */
301     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
302     for (i=0; i<osm->n; i++) { /* KSPs are local */
303       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
304       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
305       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
306       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
307       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
308       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
309       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
310       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
311       osm->ksp[i] = ksp;
312     }
313     scall = MAT_INITIAL_MATRIX;
314 
315   } else {
316     /*
317        Destroy the blocks from the previous iteration
318     */
319     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
320       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
321       scall = MAT_INITIAL_MATRIX;
322     }
323   }
324 
325   /*
326      Extract out the submatrices.
327   */
328   ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->is, osm->is,scall,&osm->pmat);CHKERRQ(ierr);
329   if (scall == MAT_INITIAL_MATRIX) {
330     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
331     for (i=0; i<osm->n; i++) {
332       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
333       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
334     }
335   }
336 
337   /* Return control to the user so that the submatrices can be modified (e.g., to apply
338      different boundary conditions for the submatrices than for the global problem) */
339   ierr = PCModifySubMatrices(pc,osm->n,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
340 
341   /*
342      Loop over submatrices putting them into local ksp
343   */
344   for (i=0; i<osm->n; i++) {
345     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
346     if (!pc->setupcalled) {
347       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
348     }
349   }
350 
351   PetscFunctionReturn(0);
352 }
353 
354 #undef __FUNCT__
355 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
356 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
357 {
358   PC_GASM         *osm = (PC_GASM*)pc->data;
359   PetscErrorCode ierr;
360   PetscInt       i;
361 
362   PetscFunctionBegin;
363   for (i=0; i<osm->n; i++) {
364     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
365   }
366   PetscFunctionReturn(0);
367 }
368 
369 #undef __FUNCT__
370 #define __FUNCT__ "PCApply_GASM"
371 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
372 {
373   PC_GASM         *osm = (PC_GASM*)pc->data;
374   PetscErrorCode ierr;
375   PetscInt       i;
376   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
377 
378   PetscFunctionBegin;
379   /*
380      Support for limiting the restriction or interpolation to only local
381      subdomain values (leaving the other values 0).
382   */
383   if (!(osm->type & PC_GASM_RESTRICT)) {
384     forward = SCATTER_FORWARD_LOCAL;
385     /* have to zero the work RHS since scatter may leave some slots empty */
386     ierr = VecZeroEntries(osm->gx); CHKERRQ(ierr);
387   }
388   if (!(osm->type & PC_GASM_INTERPOLATE)) {
389     reverse = SCATTER_REVERSE_LOCAL;
390   }
391 
392   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
393   ierr = VecZeroEntries(y);CHKERRQ(ierr);
394   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
395   /* do the local solves */
396   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
397     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
398   }
399   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
400   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
401   PetscFunctionReturn(0);
402 }
403 
404 #undef __FUNCT__
405 #define __FUNCT__ "PCApplyTranspose_GASM"
406 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
407 {
408   PC_GASM         *osm = (PC_GASM*)pc->data;
409   PetscErrorCode ierr;
410   PetscInt       i;
411   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
412 
413   PetscFunctionBegin;
414   /*
415      Support for limiting the restriction or interpolation to only local
416      subdomain values (leaving the other values 0).
417 
418      Note: these are reversed from the PCApply_GASM() because we are applying the
419      transpose of the three terms
420   */
421   if (!(osm->type & PC_GASM_INTERPOLATE)) {
422     forward = SCATTER_FORWARD_LOCAL;
423     /* have to zero the work RHS since scatter may leave some slots empty */
424     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
425   }
426   if (!(osm->type & PC_GASM_RESTRICT)) {
427     reverse = SCATTER_REVERSE_LOCAL;
428   }
429 
430   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
431   ierr = VecZeroEntries(y);CHKERRQ(ierr);
432   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
433   /* do the local solves */
434   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
435     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
436   }
437   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
438   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCDestroy_GASM"
444 static PetscErrorCode PCDestroy_GASM(PC pc)
445 {
446   PC_GASM         *osm = (PC_GASM*)pc->data;
447   PetscErrorCode ierr;
448   PetscInt       i;
449 
450   PetscFunctionBegin;
451   if (osm->ksp) {
452     for (i=0; i<osm->n; i++) {
453       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
454     }
455     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
456   }
457   if (osm->pmat) {
458     if (osm->n > 0) {
459       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
460     }
461   }
462   for (i=0; i<osm->n; i++) {
463     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
464     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
465   }
466   ierr = PetscFree(osm->x);CHKERRQ(ierr);
467   ierr = PetscFree(osm->y);CHKERRQ(ierr);
468   ierr = VecDestroy(osm->gx); CHKERRQ(ierr);
469   ierr = VecDestroy(osm->gy); CHKERRQ(ierr);
470 
471   ierr = VecScatterDestroy(osm->grestriction); CHKERRQ(ierr);
472   ierr = VecScatterDestroy(osm->gprolongation); CHKERRQ(ierr);
473 
474   if (osm->is) {
475     ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
476   }
477   ierr = PetscFree(osm);CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 #undef __FUNCT__
482 #define __FUNCT__ "PCSetFromOptions_GASM"
483 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
484   PC_GASM         *osm = (PC_GASM*)pc->data;
485   PetscErrorCode ierr;
486   PetscInt       blocks,ovl;
487   PetscBool      symset,flg;
488   PCGASMType      gasmtype;
489 
490   PetscFunctionBegin;
491   /* set the type to symmetric if matrix is symmetric */
492   if (!osm->type_set && pc->pmat) {
493     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
494     if (symset && flg) { osm->type = PC_GASM_BASIC; }
495   }
496   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
497     ierr = PetscOptionsInt("-pc_gasm_blocks","Number of subdomains","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
498     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks);CHKERRQ(ierr); }
499     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of grid points overlap","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
500     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
501     flg  = PETSC_FALSE;
502     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
503     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
504   ierr = PetscOptionsTail();CHKERRQ(ierr);
505   PetscFunctionReturn(0);
506 }
507 
508 /*------------------------------------------------------------------------------------*/
509 
510 EXTERN_C_BEGIN
511 #undef __FUNCT__
512 #define __FUNCT__ "PCGASMSetLocalSubdomains_GASM"
513 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains_GASM(PC pc,PetscInt n,IS is[],IS is_local[])
514 {
515   PC_GASM         *osm = (PC_GASM*)pc->data;
516   PetscErrorCode ierr;
517   PetscInt       i;
518 
519   PetscFunctionBegin;
520   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
521   if (pc->setupcalled && (n != osm->n || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetLocalSubdomains() should be called before calling PCSetUp().");
522 
523   if (!pc->setupcalled) {
524     osm->n            = n;
525     osm->is           = 0;
526     osm->is_local     = 0;
527     if (is) {
528       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
529     }
530     if (is_local) {
531       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
532     }
533     if (osm->is) {
534       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
535     }
536     if (is) {
537       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
538       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
539       /* Flag indicating that the user has set overlapping subdomains so PCGASM should not increase their size. */
540       osm->overlap = -1;
541     }
542     if (is_local) {
543       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
544       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
545     }
546   }
547   PetscFunctionReturn(0);
548 }
549 EXTERN_C_END
550 
551 EXTERN_C_BEGIN
552 #undef __FUNCT__
553 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
554 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N) {
555   PC_GASM         *osm = (PC_GASM*)pc->data;
556   PetscErrorCode ierr;
557   PetscMPIInt    rank,size;
558   PetscInt       n;
559 
560   PetscFunctionBegin;
561   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
562 
563   /*
564      Split the subdomains equally among all processors
565   */
566   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
567   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
568   n = N/size + ((N % size) > rank);
569   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
570   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
571   if (!pc->setupcalled) {
572     if (osm->is) {
573       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
574     }
575     osm->N            = N;
576     osm->n            = n;
577     osm->is           = 0;
578     osm->is_local     = 0;
579   }
580   PetscFunctionReturn(0);
581 }/* PCGASMSetTotalSubdomains_GASM() */
582 EXTERN_C_END
583 
584 EXTERN_C_BEGIN
585 #undef __FUNCT__
586 #define __FUNCT__ "PCGASMSetOverlap_GASM"
587 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
588 {
589   PC_GASM *osm = (PC_GASM*)pc->data;
590 
591   PetscFunctionBegin;
592   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
593   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
594   if (!pc->setupcalled) {
595     osm->overlap = ovl;
596   }
597   PetscFunctionReturn(0);
598 }
599 EXTERN_C_END
600 
601 EXTERN_C_BEGIN
602 #undef __FUNCT__
603 #define __FUNCT__ "PCGASMSetType_GASM"
604 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType_GASM(PC pc,PCGASMType type)
605 {
606   PC_GASM *osm = (PC_GASM*)pc->data;
607 
608   PetscFunctionBegin;
609   osm->type     = type;
610   osm->type_set = PETSC_TRUE;
611   PetscFunctionReturn(0);
612 }
613 EXTERN_C_END
614 
615 EXTERN_C_BEGIN
616 #undef __FUNCT__
617 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
618 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
619 {
620   PC_GASM *osm = (PC_GASM*)pc->data;
621 
622   PetscFunctionBegin;
623   osm->sort_indices = doSort;
624   PetscFunctionReturn(0);
625 }
626 EXTERN_C_END
627 
628 EXTERN_C_BEGIN
629 #undef __FUNCT__
630 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
631 /*
632    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
633         In particular, it would upset the global subdomain number calculation.
634 */
635 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
636 {
637   PC_GASM         *osm = (PC_GASM*)pc->data;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
642 
643   if (n) {
644     *n = osm->n;
645   }
646   if (first) {
647     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
648     *first -= osm->n;
649   }
650   if (ksp) {
651     /* Assume that local solves are now different; not necessarily
652        true though!  This flag is used only for PCView_GASM() */
653     *ksp                   = osm->ksp;
654     osm->same_local_solves = PETSC_FALSE;
655   }
656   PetscFunctionReturn(0);
657 }/* PCGASMGetSubKSP_GASM() */
658 EXTERN_C_END
659 
660 
661 #undef __FUNCT__
662 #define __FUNCT__ "PCGASMSetLocalSubdomains"
663 /*@C
664     PCGASMSetLocalSubdomains - Sets the local subdomains (for this processor
665     only) for the additive Schwarz preconditioner.
666 
667     Collective on PC
668 
669     Input Parameters:
670 +   pc - the preconditioner context
671 .   n - the number of subdomains for this processor (default value = 1)
672 .   is - the index set that defines the subdomains for this processor
673          (or PETSC_NULL for PETSc to determine subdomains)
674 -   is_local - the index sets that define the local part of the subdomains for this processor
675          (or PETSC_NULL to use the default of 1 subdomain per process)
676 
677     Notes:
678     The IS numbering is in the parallel, global numbering of the vector.
679 
680     By default the GASM preconditioner uses 1 block per processor.
681 
682     Use PCGASMSetTotalSubdomains() to set the subdomains for all processors.
683 
684     Level: advanced
685 
686 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
687 
688 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
689           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
690 @*/
691 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
692 {
693   PetscErrorCode ierr;
694 
695   PetscFunctionBegin;
696   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
697   ierr = PetscTryMethod(pc,"PCGASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
698   PetscFunctionReturn(0);
699 }
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "PCGASMSetTotalSubdomains"
703 /*@C
704     PCGASMSetTotalSubdomains - Sets the subdomains for all processor for the
705     additive Schwarz preconditioner.  Either all or no processors in the
706     PC communicator must call this routine, with the same index sets.
707 
708     Collective on PC
709 
710     Input Parameters:
711 +   pc - the preconditioner context
712 .   n - the number of subdomains for all processors
713 .   is - the index sets that define the subdomains for all processor
714          (or PETSC_NULL for PETSc to determine subdomains)
715 -   is_local - the index sets that define the local part of the subdomains for this processor
716          (or PETSC_NULL to use the default of 1 subdomain per process)
717 
718     Options Database Key:
719     To set the total number of subdomain blocks rather than specify the
720     index sets, use the option
721 .    -pc_gasm_blocks <blks> - Sets total blocks
722 
723     Notes:
724     Currently you cannot use this to set the actual subdomains with the argument is.
725 
726     By default the GASM preconditioner uses 1 block per processor.
727 
728     These index sets cannot be destroyed until after completion of the
729     linear solves for which the GASM preconditioner is being used.
730 
731     Use PCGASMSetLocalSubdomains() to set local subdomains.
732 
733     Level: advanced
734 
735 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
736 
737 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
738           PCGASMCreateSubdomains2D()
739 @*/
740 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains(PC pc,PetscInt N)
741 {
742   PetscErrorCode ierr;
743 
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
746   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt),(pc,N));CHKERRQ(ierr);
747   PetscFunctionReturn(0);
748 }
749 
750 #undef __FUNCT__
751 #define __FUNCT__ "PCGASMSetOverlap"
752 /*@
753     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
754     additive Schwarz preconditioner.  Either all or no processors in the
755     PC communicator must call this routine.
756 
757     Logically Collective on PC
758 
759     Input Parameters:
760 +   pc  - the preconditioner context
761 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
762 
763     Options Database Key:
764 .   -pc_gasm_overlap <ovl> - Sets overlap
765 
766     Notes:
767     By default the GASM preconditioner uses 1 block per processor.  To use
768     multiple blocks per perocessor, see PCGASMSetTotalSubdomains() and
769     PCGASMSetLocalSubdomains() (and the option -pc_gasm_blocks <blks>).
770 
771     The overlap defaults to 1, so if one desires that no additional
772     overlap be computed beyond what may have been set with a call to
773     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(), then ovl
774     must be set to be 0.  In particular, if one does not explicitly set
775     the subdomains an application code, then all overlap would be computed
776     internally by PETSc, and using an overlap of 0 would result in an GASM
777     variant that is equivalent to the block Jacobi preconditioner.
778 
779     Note that one can define initial index sets with any overlap via
780     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(); the routine
781     PCGASMSetOverlap() merely allows PETSc to extend that overlap further
782     if desired.
783 
784     Level: intermediate
785 
786 .keywords: PC, GASM, set, overlap
787 
788 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
789           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
790 @*/
791 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap(PC pc,PetscInt ovl)
792 {
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
797   PetscValidLogicalCollectiveInt(pc,ovl,2);
798   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "PCGASMSetType"
804 /*@
805     PCGASMSetType - Sets the type of restriction and interpolation used
806     for local problems in the additive Schwarz method.
807 
808     Logically Collective on PC
809 
810     Input Parameters:
811 +   pc  - the preconditioner context
812 -   type - variant of GASM, one of
813 .vb
814       PC_GASM_BASIC       - full interpolation and restriction
815       PC_GASM_RESTRICT    - full restriction, local processor interpolation
816       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
817       PC_GASM_NONE        - local processor restriction and interpolation
818 .ve
819 
820     Options Database Key:
821 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
822 
823     Level: intermediate
824 
825 .keywords: PC, GASM, set, type
826 
827 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
828           PCGASMCreateSubdomains2D()
829 @*/
830 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType(PC pc,PCGASMType type)
831 {
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
836   PetscValidLogicalCollectiveEnum(pc,type,2);
837   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "PCGASMSetSortIndices"
843 /*@
844     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
845 
846     Logically Collective on PC
847 
848     Input Parameters:
849 +   pc  - the preconditioner context
850 -   doSort - sort the subdomain indices
851 
852     Level: intermediate
853 
854 .keywords: PC, GASM, set, type
855 
856 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
857           PCGASMCreateSubdomains2D()
858 @*/
859 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices(PC pc,PetscBool  doSort)
860 {
861   PetscErrorCode ierr;
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
865   PetscValidLogicalCollectiveBool(pc,doSort,2);
866   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
867   PetscFunctionReturn(0);
868 }
869 
870 #undef __FUNCT__
871 #define __FUNCT__ "PCGASMGetSubKSP"
872 /*@C
873    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
874    this processor.
875 
876    Collective on PC iff first_local is requested
877 
878    Input Parameter:
879 .  pc - the preconditioner context
880 
881    Output Parameters:
882 +  n_local - the number of blocks on this processor or PETSC_NULL
883 .  first_local - the global number of the first block on this processor or PETSC_NULL,
884                  all processors must request or all must pass PETSC_NULL
885 -  ksp - the array of KSP contexts
886 
887    Note:
888    After PCGASMGetSubKSP() the array of KSPes is not to be freed
889 
890    Currently for some matrix implementations only 1 block per processor
891    is supported.
892 
893    You must call KSPSetUp() before calling PCGASMGetSubKSP().
894 
895    Level: advanced
896 
897 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
898 
899 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap(),
900           PCGASMCreateSubdomains2D(),
901 @*/
902 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
903 {
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
908   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 /* -------------------------------------------------------------------------------------*/
913 /*MC
914    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
915            its own KSP object.
916 
917    Options Database Keys:
918 +  -pc_gasm_truelocal - Activates PCGGASMSetUseTrueLocal()
919 .  -pc_gasm_blocks <blks> - Sets total blocks
920 .  -pc_gasm_overlap <ovl> - Sets overlap
921 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
922 
923      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
924       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
925       -pc_gasm_type basic to use the standard GASM.
926 
927    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
928      than one processor. Defaults to one block per processor.
929 
930      To set options on the solvers for each block append -sub_ to all the KSP, and PC
931         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
932 
933      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
934          and set the options directly on the resulting KSP object (you can access its PC
935          with KSPGetPC())
936 
937 
938    Level: beginner
939 
940    Concepts: additive Schwarz method
941 
942     References:
943     An additive variant of the Schwarz alternating method for the case of many subregions
944     M Dryja, OB Widlund - Courant Institute, New York University Technical report
945 
946     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
947     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
948 
949 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
950            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetLocalSubdomains(),
951            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
952 
953 M*/
954 
955 EXTERN_C_BEGIN
956 #undef __FUNCT__
957 #define __FUNCT__ "PCCreate_GASM"
958 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_GASM(PC pc)
959 {
960   PetscErrorCode ierr;
961   PC_GASM         *osm;
962 
963   PetscFunctionBegin;
964   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
965   osm->N                 = PETSC_DECIDE;
966   osm->n                 = 0;
967   osm->nmax              = 0;
968   osm->overlap           = 1;
969   osm->ksp               = 0;
970   osm->grestriction      = 0;
971   osm->gprolongation     = 0;
972   osm->gx                = 0;
973   osm->gy                = 0;
974   osm->x                 = 0;
975   osm->y                 = 0;
976   osm->is                = 0;
977   osm->is_local          = 0;
978   osm->pmat              = 0;
979   osm->type              = PC_GASM_RESTRICT;
980   osm->same_local_solves = PETSC_TRUE;
981   osm->sort_indices      = PETSC_TRUE;
982 
983   pc->data                   = (void*)osm;
984   pc->ops->apply             = PCApply_GASM;
985   pc->ops->applytranspose    = PCApplyTranspose_GASM;
986   pc->ops->setup             = PCSetUp_GASM;
987   pc->ops->destroy           = PCDestroy_GASM;
988   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
989   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
990   pc->ops->view              = PCView_GASM;
991   pc->ops->applyrichardson   = 0;
992 
993   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetLocalSubdomains_C","PCGASMSetLocalSubdomains_GASM",
994                     PCGASMSetLocalSubdomains_GASM);CHKERRQ(ierr);
995   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
996                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
997   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
998                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
999   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1000                     PCGASMSetType_GASM);CHKERRQ(ierr);
1001   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1002                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1003   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1004                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1005   PetscFunctionReturn(0);
1006 }
1007 EXTERN_C_END
1008 
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "PCGASMCreateSubdomains"
1012 /*@C
1013    PCGASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1014    preconditioner for a any problem on a general grid.
1015 
1016    Collective
1017 
1018    Input Parameters:
1019 +  A - The global matrix operator
1020 -  n - the number of local blocks
1021 
1022    Output Parameters:
1023 .  outis - the array of index sets defining the subdomains
1024 
1025    Level: advanced
1026 
1027    Note: this generates nonoverlapping subdomains; the PCGASM will generate the overlap
1028     from these if you use PCGASMSetLocalSubdomains()
1029 
1030     In the Fortran version you must provide the array outis[] already allocated of length n.
1031 
1032 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1033 
1034 .seealso: PCGASMSetLocalSubdomains(), PCGASMDestroySubdomains()
1035 @*/
1036 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1037 {
1038   MatPartitioning           mpart;
1039   const char                *prefix;
1040   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1041   PetscMPIInt               size;
1042   PetscInt                  i,j,rstart,rend,bs;
1043   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1044   Mat                       Ad = PETSC_NULL, adj;
1045   IS                        ispart,isnumb,*is;
1046   PetscErrorCode            ierr;
1047 
1048   PetscFunctionBegin;
1049   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1050   PetscValidPointer(outis,3);
1051   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1052 
1053   /* Get prefix, row distribution, and block size */
1054   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1055   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1056   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1057   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1058 
1059   /* Get diagonal block from matrix if possible */
1060   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1061   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1062   if (f) {
1063     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1064   } else if (size == 1) {
1065     iscopy = PETSC_FALSE; Ad = A;
1066   } else {
1067     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1068   }
1069   if (Ad) {
1070     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1071     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1072   }
1073   if (Ad && n > 1) {
1074     PetscBool  match,done;
1075     /* Try to setup a good matrix partitioning if available */
1076     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1077     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1078     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1079     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1080     if (!match) {
1081       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1082     }
1083     if (!match) { /* assume a "good" partitioner is available */
1084       PetscInt na,*ia,*ja;
1085       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1086       if (done) {
1087 	/* Build adjacency matrix by hand. Unfortunately a call to
1088 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1089 	   remove the block-aij structure and we cannot expect
1090 	   MatPartitioning to split vertices as we need */
1091 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1092 	nnz = 0;
1093 	for (i=0; i<na; i++) { /* count number of nonzeros */
1094 	  len = ia[i+1] - ia[i];
1095 	  row = ja + ia[i];
1096 	  for (j=0; j<len; j++) {
1097 	    if (row[j] == i) { /* don't count diagonal */
1098 	      len--; break;
1099 	    }
1100 	  }
1101 	  nnz += len;
1102 	}
1103 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1104 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1105 	nnz    = 0;
1106 	iia[0] = 0;
1107 	for (i=0; i<na; i++) { /* fill adjacency */
1108 	  cnt = 0;
1109 	  len = ia[i+1] - ia[i];
1110 	  row = ja + ia[i];
1111 	  for (j=0; j<len; j++) {
1112 	    if (row[j] != i) { /* if not diagonal */
1113 	      jja[nnz+cnt++] = row[j];
1114 	    }
1115 	  }
1116 	  nnz += cnt;
1117 	  iia[i+1] = nnz;
1118 	}
1119 	/* Partitioning of the adjacency matrix */
1120 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1121 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1122 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1123 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1124 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1125 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1126 	foundpart = PETSC_TRUE;
1127       }
1128       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1129     }
1130     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1131   }
1132   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1133 
1134   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1135   *outis = is;
1136 
1137   if (!foundpart) {
1138 
1139     /* Partitioning by contiguous chunks of rows */
1140 
1141     PetscInt mbs   = (rend-rstart)/bs;
1142     PetscInt start = rstart;
1143     for (i=0; i<n; i++) {
1144       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1145       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1146       start += count;
1147     }
1148 
1149   } else {
1150 
1151     /* Partitioning by adjacency of diagonal block  */
1152 
1153     const PetscInt *numbering;
1154     PetscInt       *count,nidx,*indices,*newidx,start=0;
1155     /* Get node count in each partition */
1156     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1157     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1158     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1159       for (i=0; i<n; i++) count[i] *= bs;
1160     }
1161     /* Build indices from node numbering */
1162     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1163     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1164     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1165     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1166     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1167     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1168     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1169       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1170       for (i=0; i<nidx; i++)
1171 	for (j=0; j<bs; j++)
1172 	  newidx[i*bs+j] = indices[i]*bs + j;
1173       ierr = PetscFree(indices);CHKERRQ(ierr);
1174       nidx   *= bs;
1175       indices = newidx;
1176     }
1177     /* Shift to get global indices */
1178     for (i=0; i<nidx; i++) indices[i] += rstart;
1179 
1180     /* Build the index sets for each block */
1181     for (i=0; i<n; i++) {
1182       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1183       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1184       start += count[i];
1185     }
1186 
1187     ierr = PetscFree(count);
1188     ierr = PetscFree(indices);
1189     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1190     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1191 
1192   }
1193 
1194   PetscFunctionReturn(0);
1195 }
1196 
1197 #undef __FUNCT__
1198 #define __FUNCT__ "PCGASMDestroySubdomains"
1199 /*@C
1200    PCGASMDestroySubdomains - Destroys the index sets created with
1201    PCGASMCreateSubdomains(). Should be called after setting subdomains
1202    with PCGASMSetLocalSubdomains().
1203 
1204    Collective
1205 
1206    Input Parameters:
1207 +  n - the number of index sets
1208 .  is - the array of index sets
1209 -  is_local - the array of local index sets, can be PETSC_NULL
1210 
1211    Level: advanced
1212 
1213 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1214 
1215 .seealso: PCGASMCreateSubdomains(), PCGASMSetLocalSubdomains()
1216 @*/
1217 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1218 {
1219   PetscInt       i;
1220   PetscErrorCode ierr;
1221   PetscFunctionBegin;
1222   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1223   PetscValidPointer(is,2);
1224   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1225   ierr = PetscFree(is);CHKERRQ(ierr);
1226   if (is_local) {
1227     PetscValidPointer(is_local,3);
1228     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1229     ierr = PetscFree(is_local);CHKERRQ(ierr);
1230   }
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 #undef __FUNCT__
1235 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1236 /*@
1237    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1238    preconditioner for a two-dimensional problem on a regular grid.
1239 
1240    Not Collective
1241 
1242    Input Parameters:
1243 +  m, n - the number of mesh points in the x and y directions
1244 .  M, N - the number of subdomains in the x and y directions
1245 .  dof - degrees of freedom per node
1246 -  overlap - overlap in mesh lines
1247 
1248    Output Parameters:
1249 +  Nsub - the number of subdomains created
1250 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1251 -  is_local - array of index sets defining non-overlapping subdomains
1252 
1253    Note:
1254    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1255    preconditioners.  More general related routines are
1256    PCGASMSetTotalSubdomains() and PCGASMSetLocalSubdomains().
1257 
1258    Level: advanced
1259 
1260 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1261 
1262 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
1263           PCGASMSetOverlap()
1264 @*/
1265 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1266 {
1267   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1268   PetscErrorCode ierr;
1269   PetscInt       nidx,*idx,loc,ii,jj,count;
1270 
1271   PetscFunctionBegin;
1272   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1273 
1274   *Nsub = N*M;
1275   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1276   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1277   ystart = 0;
1278   loc_outer = 0;
1279   for (i=0; i<N; i++) {
1280     height = n/N + ((n % N) > i); /* height of subdomain */
1281     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1282     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1283     yright = ystart + height + overlap; if (yright > n) yright = n;
1284     xstart = 0;
1285     for (j=0; j<M; j++) {
1286       width = m/M + ((m % M) > j); /* width of subdomain */
1287       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1288       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1289       xright = xstart + width + overlap; if (xright > m) xright = m;
1290       nidx   = (xright - xleft)*(yright - yleft);
1291       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1292       loc    = 0;
1293       for (ii=yleft; ii<yright; ii++) {
1294         count = m*ii + xleft;
1295         for (jj=xleft; jj<xright; jj++) {
1296           idx[loc++] = count++;
1297         }
1298       }
1299       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1300       if (overlap == 0) {
1301         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1302         (*is_local)[loc_outer] = (*is)[loc_outer];
1303       } else {
1304         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1305           for (jj=xstart; jj<xstart+width; jj++) {
1306             idx[loc++] = m*ii + jj;
1307           }
1308         }
1309         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1310       }
1311       ierr = PetscFree(idx);CHKERRQ(ierr);
1312       xstart += width;
1313       loc_outer++;
1314     }
1315     ystart += height;
1316   }
1317   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1318   PetscFunctionReturn(0);
1319 }
1320 
1321 #undef __FUNCT__
1322 #define __FUNCT__ "PCGASMGetLocalSubdomains"
1323 /*@C
1324     PCGASMGetLocalSubdomains - Gets the local subdomains (for this processor
1325     only) for the additive Schwarz preconditioner.
1326 
1327     Not Collective
1328 
1329     Input Parameter:
1330 .   pc - the preconditioner context
1331 
1332     Output Parameters:
1333 +   n - the number of subdomains for this processor (default value = 1)
1334 .   is - the index sets that define the subdomains for this processor
1335 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1336 
1337 
1338     Notes:
1339     The IS numbering is in the parallel, global numbering of the vector.
1340 
1341     Level: advanced
1342 
1343 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
1344 
1345 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1346           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubmatrices()
1347 @*/
1348 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1349 {
1350   PC_GASM         *osm;
1351   PetscErrorCode ierr;
1352   PetscBool      match;
1353 
1354   PetscFunctionBegin;
1355   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1356   PetscValidIntPointer(n,2);
1357   if (is) PetscValidPointer(is,3);
1358   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1359   if (!match) {
1360     if (n)  *n  = 0;
1361     if (is) *is = PETSC_NULL;
1362   } else {
1363     osm = (PC_GASM*)pc->data;
1364     if (n)  *n  = osm->n;
1365     if (is) *is = osm->is;
1366     if (is_local) *is_local = osm->is_local;
1367   }
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 #undef __FUNCT__
1372 #define __FUNCT__ "PCGASMGetLocalSubmatrices"
1373 /*@C
1374     PCGASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1375     only) for the additive Schwarz preconditioner.
1376 
1377     Not Collective
1378 
1379     Input Parameter:
1380 .   pc - the preconditioner context
1381 
1382     Output Parameters:
1383 +   n - the number of matrices for this processor (default value = 1)
1384 -   mat - the matrices
1385 
1386 
1387     Level: advanced
1388 
1389 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1390 
1391 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1392           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubdomains()
1393 @*/
1394 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1395 {
1396   PC_GASM         *osm;
1397   PetscErrorCode ierr;
1398   PetscBool      match;
1399 
1400   PetscFunctionBegin;
1401   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1402   PetscValidIntPointer(n,2);
1403   if (mat) PetscValidPointer(mat,3);
1404   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1405   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1406   if (!match) {
1407     if (n)   *n   = 0;
1408     if (mat) *mat = PETSC_NULL;
1409   } else {
1410     osm = (PC_GASM*)pc->data;
1411     if (n)   *n   = osm->n;
1412     if (mat) *mat = osm->pmat;
1413   }
1414   PetscFunctionReturn(0);
1415 }
1416