xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision c47c1910cab33e6e696fc461c45d0761dd027705)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
5   Note that each processor may have any number of subdomains and a subdomain may live on multiple
6   processors.
7 
8        N    - total number of true subdomains on all processors
9        n    - actual number of subdomains on this processor
10        nmax - maximum number of subdomains per processor
11 */
12 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   IS         gis, gis_local;      /* merged ISs */
21   VecScatter grestriction;        /* merged restriction */
22   VecScatter gprolongation;       /* merged prolongation */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each local subdomain (same as subdomain with the overlap removed); may be NULL */
25   Mat        *pmat;               /* subdomain block matrices */
26   PCGASMType  type;               /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_GASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_GASM"
34 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
35 {
36   PC_GASM         *osm = (PC_GASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof blocks,"max number of  subdomain blocks = %D",osm->nmax);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: restriction/interpolation type - %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->nmax; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCGASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCGASMPrintSubdomains"
97 static PetscErrorCode PCGASMPrintSubdomains(PC pc)
98 {
99   PC_GASM         *osm  = (PC_GASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n;++i) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_GASM"
137 static PetscErrorCode PCSetUp_GASM(PC pc)
138 {
139   PC_GASM         *osm  = (PC_GASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i,firstRow,lastRow;
143   PetscMPIInt    size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   KSP            ksp;
146   PC             subpc;
147   const char     *prefix,*pprefix;
148   PetscInt       dn;       /* Number of indices in a single subdomain assigned to this processor. */
149   const PetscInt *didx;    /* Indices from a single subdomain assigned to this processor. */
150   PetscInt       ddn;      /* Number of indices in all subdomains assigned to this processor. */
151   PetscInt       *ddidx;   /* Indices of all subdomains assigned to this processor. */
152   IS             gid;      /* Identity IS of the size of all subdomains assigned to this processor. */
153   Vec            x,y;
154   PetscScalar    *gxarray, *gyarray;
155   IS             *is;
156 
157   PetscFunctionBegin;
158   if (!pc->setupcalled) {
159 
160     if (!osm->type_set) {
161       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
162       if (symset && flg) { osm->type = PC_GASM_BASIC; }
163     }
164 
165     if (osm->n == PETSC_DECIDE) {
166       /* no subdomains given, use one per processor */
167       osm->n = 1;
168       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
169       osm->N = size;
170     } else if (osm->N == PETSC_DECIDE) {
171       PetscInt inwork[2], outwork[2];
172       /* determine global number of subdomains and the max number of local subdomains */
173       inwork[0] = inwork[1] = osm->n;
174       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
175       osm->nmax = outwork[0];
176       osm->N    = outwork[1];
177     }
178     if (!osm->is){ /* create the index sets */
179       ierr = PCGASMCreateSubdomains(pc->pmat,osm->n,&osm->is);CHKERRQ(ierr);
180     }
181     if (osm->n > 1 && !osm->is_local) {
182       ierr = PetscMalloc(osm->n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
183       for (i=0; i<osm->n; i++) {
184         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
185           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
186           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
187         } else {
188           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
189           osm->is_local[i] = osm->is[i];
190         }
191       }
192     }
193     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
194     flg  = PETSC_FALSE;
195     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
196     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
197 
198     if (osm->overlap > 0) {
199       /* Extend the "overlapping" regions by a number of steps */
200       ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->is,osm->overlap);CHKERRQ(ierr);
201     }
202     if (osm->sort_indices) {
203       for (i=0; i<osm->n; i++) {
204         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
205         if (osm->is_local) {
206           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
207         }
208       }
209     }
210     /* Merge the ISs, create merged vectors and scatter contexts. */
211     /* Domain IS. */
212     ddn = 0;
213     for (i=0; i<osm->n; i++) {
214       ierr = ISGetLocalSize(osm->is[i],&dn);          CHKERRQ(ierr);
215       ddn += dn;
216     }
217     ierr = PetscMalloc(ddn*sizeof(PetscInt), &ddidx); CHKERRQ(ierr);
218     ddn = 0;
219     for (i=0; i<osm->n; i++) {
220       ierr = ISGetLocalSize(osm->is[i],&dn); CHKERRQ(ierr);
221       ierr = ISGetIndices(osm->is[i],&didx); CHKERRQ(ierr);
222       ierr = PetscMemcpy(ddidx+ddn, didx, sizeof(PetscInt)*dn); CHKERRQ(ierr);
223       ierr = ISRestoreIndices(osm->is[i], &didx); CHKERRQ(ierr);
224     }
225     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, ddn, ddidx, PETSC_OWN_POINTER, &osm->gis); CHKERRQ(ierr);
226     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
227     ierr = VecCreateMPI(((PetscObject)pc)->comm, ddn, PETSC_DECIDE, &osm->gx); CHKERRQ(ierr);
228     ierr = VecDuplicate(osm->gx,&osm->gy);                                     CHKERRQ(ierr);
229     ierr = ISCreateStride(((PetscObject)pc)->comm,ddn,0,1, &gid);              CHKERRQ(ierr);
230     ierr = VecScatterCreate(x,osm->gis,osm->gx,gid, &(osm->grestriction));   CHKERRQ(ierr);
231     ierr = ISDestroy(gid);                                                     CHKERRQ(ierr);
232     /* Local domain IS */
233     if(osm->is_local) { /* All ranks either have this or not, so collective calls within this branch are okay. */
234       PetscInt       dn_local;       /* Number of indices in the local part of single domain assigned to this processor. */
235       const PetscInt *didx_local;    /* Global indices from the local part of a single domain assigned to this processor. */
236       PetscInt       ddn_local;      /* Number of indices in the local part of the disjoint union all domains assigned to this processor. */
237       PetscInt       *ddidx_local;  /* Global indices of the local part of the disjoint union of all domains assigned to this processor. */
238       /**/
239       ISLocalToGlobalMapping ltog;          /* Mapping from global to local indices on the disjoint union of subdomains: local run from 0 to ddn-1. */
240       PetscInt              *ddidx_llocal;  /* Local (within disjoint union of subdomains) indices of the disjoint union of local parts of subdomains. */
241       PetscInt               ddn_llocal;    /* Number of indices in ddidx_llocal; must equal ddn_local, or else gis_local is not a sub-IS of gis. */
242       IS                     gis_llocal;    /* IS with ddidx_llocal indices. */
243       PetscInt               j;
244       ddn_local = 0;
245       for (i=0; i<osm->n; i++) {
246 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
247 	ddn_local += dn_local;
248       }
249       ierr = PetscMalloc(ddn_local*sizeof(PetscInt), &ddidx_local); CHKERRQ(ierr);
250       ddn_local = 0;
251       for (i=0; i<osm->n; i++) {
252 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
253 	ierr = ISGetIndices(osm->is_local[i],&didx_local); CHKERRQ(ierr);
254 	ierr = PetscMemcpy(ddidx_local+ddn_local, didx_local, sizeof(PetscInt)*dn_local); CHKERRQ(ierr);
255 	ierr = ISRestoreIndices(osm->is_local[i], &didx_local); CHKERRQ(ierr);
256 	ddn_local += dn_local;
257       }
258       ierr = VecGetOwnershipRange(y, &firstRow, &lastRow); CHKERRQ(ierr);
259       if(ddn_local != lastRow-firstRow) {
260 	SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Specified local domains of total size %D do not cover the local vector range [%D,%D)",
261 		 ddn_local, firstRow, lastRow);
262       }
263       ierr = PetscMalloc(sizeof(PetscInt)*ddn_local, &ddidx_llocal); CHKERRQ(ierr);
264       ierr = ISCreateGeneral(((PetscObject)pc)->comm, ddn_local, ddidx_local, PETSC_OWN_POINTER, &(osm->gis_local)); CHKERRQ(ierr);
265       ierr = ISLocalToGlobalMappingCreateIS(osm->gis,&ltog);CHKERRQ(ierr);
266       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,dn_local,ddidx_local,&ddn_llocal,ddidx_llocal);CHKERRQ(ierr);
267       ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
268       if (ddn_llocal != ddn_local) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"gis_local contains %D nonlocal indices", ddn_llocal - ddn_local);
269       /* Now convert these localized indices into the global indices into the merged output vector. */
270       ierr = VecGetOwnershipRange(osm->gy, &firstRow, &lastRow); CHKERRQ(ierr);
271       for(j=0; j < ddn_llocal; ++j) {
272 	ddidx_llocal[j] += firstRow;
273       }
274       ierr = ISCreateGeneral(PETSC_COMM_SELF,ddn_llocal,ddidx_llocal,PETSC_OWN_POINTER,&gis_llocal); CHKERRQ(ierr);
275       ierr = VecScatterCreate(y,osm->gis_local,osm->gy,gis_llocal,&osm->gprolongation);              CHKERRQ(ierr);
276       ierr = ISDestroy(gis_llocal);CHKERRQ(ierr);
277 
278     }
279     /* Create the subdomain work vectors. */
280     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
281     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
282     ierr = VecGetOwnershipRange(osm->gx, &firstRow, &lastRow);CHKERRQ(ierr);
283     ierr = VecGetArray(osm->gx, &gxarray);                                     CHKERRQ(ierr);
284     ierr = VecGetArray(osm->gy, &gyarray);                                     CHKERRQ(ierr);
285     for (i=0, ddn=0; i<osm->n; ++i, ddn += dn) {
286       ierr = ISGetLocalSize(osm->is[i],&dn);CHKERRQ(ierr);
287       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gxarray+ddn,&osm->x[i]);CHKERRQ(ierr);
288       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gyarray+ddn,&osm->y[i]);CHKERRQ(ierr);
289     }
290     ierr = VecRestoreArray(osm->gx, &gxarray); CHKERRQ(ierr);
291     ierr = VecRestoreArray(osm->gy, &gyarray); CHKERRQ(ierr);
292     ierr = VecDestroy(x); CHKERRQ(ierr);
293     ierr = VecDestroy(y); CHKERRQ(ierr);
294     /* Create the local solvers */
295     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
296     for (i=0; i<osm->n; i++) {
297       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
298       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
299       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
300       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
301       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
302       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
303       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
304       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
305       osm->ksp[i] = ksp;
306     }
307     scall = MAT_INITIAL_MATRIX;
308 
309   } else {
310     /*
311        Destroy the blocks from the previous iteration
312     */
313     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
314       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
315       scall = MAT_INITIAL_MATRIX;
316     }
317   }
318 
319   /*
320      Extract out the submatrices.
321      Here we need to use the same number of submatrices per processor across the whole comm,
322      since we can't very well merge the submatrix extraction.
323   */
324   ierr = PetscMalloc(sizeof(IS)*osm->nmax, &is); CHKERRQ(ierr);
325   for(i=0; i<osm->n; ++i) {
326     is[i] = osm->is[i];
327   }
328   for (i=osm->n; i<osm->nmax; i++) {
329     ierr = ISCreateStride(((PetscObject)pc)->comm,0,0,1,&is[i]);CHKERRQ(ierr);
330   }
331 
332   if (scall == MAT_INITIAL_MATRIX) {
333     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
334     for (i=0; i<osm->n; i++) {
335       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
336       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
337     }
338   }
339 
340   /* Return control to the user so that the submatrices can be modified (e.g., to apply
341      different boundary conditions for the submatrices than for the global problem) */
342   ierr = PCModifySubMatrices(pc,osm->nmax,is,is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
343   for (i=osm->n; i<osm->nmax; i++) {
344     ierr = ISDestroy(is[i]);CHKERRQ(ierr);
345   }
346   ierr = PetscFree(is); CHKERRQ(ierr);
347 
348   /*
349      Loop over submatrices putting them into local ksp
350   */
351   for (i=0; i<osm->n; i++) {
352     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
353     if (!pc->setupcalled) {
354       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
355     }
356   }
357 
358   PetscFunctionReturn(0);
359 }
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
363 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
364 {
365   PC_GASM         *osm = (PC_GASM*)pc->data;
366   PetscErrorCode ierr;
367   PetscInt       i;
368 
369   PetscFunctionBegin;
370   for (i=0; i<osm->n; i++) {
371     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
372   }
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "PCApply_GASM"
378 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
379 {
380   PC_GASM         *osm = (PC_GASM*)pc->data;
381   PetscErrorCode ierr;
382   PetscInt       i;
383   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
384 
385   PetscFunctionBegin;
386   /*
387      Support for limiting the restriction or interpolation to only local
388      subdomain values (leaving the other values 0).
389   */
390   if (!(osm->type & PC_GASM_RESTRICT)) {
391     forward = SCATTER_FORWARD_LOCAL;
392     /* have to zero the work RHS since scatter may leave some slots empty */
393     ierr = VecZeroEntries(osm->gx); CHKERRQ(ierr);
394   }
395   if (!(osm->type & PC_GASM_INTERPOLATE)) {
396     reverse = SCATTER_REVERSE_LOCAL;
397   }
398 
399   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
400   ierr = VecZeroEntries(y);CHKERRQ(ierr);
401   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
402   /* do the local solves */
403   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
404     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
405   }
406   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
407   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
408   PetscFunctionReturn(0);
409 }
410 
411 #undef __FUNCT__
412 #define __FUNCT__ "PCApplyTranspose_GASM"
413 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
414 {
415   PC_GASM         *osm = (PC_GASM*)pc->data;
416   PetscErrorCode ierr;
417   PetscInt       i;
418   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
419 
420   PetscFunctionBegin;
421   /*
422      Support for limiting the restriction or interpolation to only local
423      subdomain values (leaving the other values 0).
424 
425      Note: these are reversed from the PCApply_GASM() because we are applying the
426      transpose of the three terms
427   */
428   if (!(osm->type & PC_GASM_INTERPOLATE)) {
429     forward = SCATTER_FORWARD_LOCAL;
430     /* have to zero the work RHS since scatter may leave some slots empty */
431     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
432   }
433   if (!(osm->type & PC_GASM_RESTRICT)) {
434     reverse = SCATTER_REVERSE_LOCAL;
435   }
436 
437   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
438   ierr = VecZeroEntries(y);CHKERRQ(ierr);
439   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
440   /* do the local solves */
441   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
442     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
443   }
444   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
445   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 #undef __FUNCT__
450 #define __FUNCT__ "PCDestroy_GASM"
451 static PetscErrorCode PCDestroy_GASM(PC pc)
452 {
453   PC_GASM         *osm = (PC_GASM*)pc->data;
454   PetscErrorCode ierr;
455   PetscInt       i;
456 
457   PetscFunctionBegin;
458   if (osm->ksp) {
459     for (i=0; i<osm->n; i++) {
460       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
461     }
462     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
463   }
464   if (osm->pmat) {
465     if (osm->n > 0) {
466       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
467     }
468   }
469   for (i=0; i<osm->n; i++) {
470     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
471     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
472   }
473   ierr = PetscFree(osm->x);CHKERRQ(ierr);
474   ierr = PetscFree(osm->y);CHKERRQ(ierr);
475   ierr = VecDestroy(osm->gx); CHKERRQ(ierr);
476   ierr = VecDestroy(osm->gy); CHKERRQ(ierr);
477 
478   ierr = VecScatterDestroy(osm->grestriction); CHKERRQ(ierr);
479   ierr = VecScatterDestroy(osm->gprolongation); CHKERRQ(ierr);
480 
481   if (osm->is) {
482     ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
483   }
484   ierr = PetscFree(osm);CHKERRQ(ierr);
485   PetscFunctionReturn(0);
486 }
487 
488 #undef __FUNCT__
489 #define __FUNCT__ "PCSetFromOptions_GASM"
490 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
491   PC_GASM         *osm = (PC_GASM*)pc->data;
492   PetscErrorCode ierr;
493   PetscInt       blocks,ovl;
494   PetscBool      symset,flg;
495   PCGASMType      gasmtype;
496 
497   PetscFunctionBegin;
498   /* set the type to symmetric if matrix is symmetric */
499   if (!osm->type_set && pc->pmat) {
500     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
501     if (symset && flg) { osm->type = PC_GASM_BASIC; }
502   }
503   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
504     ierr = PetscOptionsInt("-pc_gasm_blocks","Number of subdomains","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
505     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks);CHKERRQ(ierr); }
506     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of grid points overlap","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
507     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
508     flg  = PETSC_FALSE;
509     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
510     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
511   ierr = PetscOptionsTail();CHKERRQ(ierr);
512   PetscFunctionReturn(0);
513 }
514 
515 /*------------------------------------------------------------------------------------*/
516 
517 EXTERN_C_BEGIN
518 #undef __FUNCT__
519 #define __FUNCT__ "PCGASMSetLocalSubdomains_GASM"
520 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains_GASM(PC pc,PetscInt n,IS is[],IS is_local[])
521 {
522   PC_GASM         *osm = (PC_GASM*)pc->data;
523   PetscErrorCode ierr;
524   PetscInt       i;
525 
526   PetscFunctionBegin;
527   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
528   if (pc->setupcalled && (n != osm->n || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetLocalSubdomains() should be called before calling PCSetUp().");
529 
530   if (!pc->setupcalled) {
531     if (is) {
532       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
533     }
534     if (is_local) {
535       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
536     }
537     if (osm->is) {
538       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
539     }
540     osm->n            = n;
541     osm->is           = 0;
542     osm->is_local     = 0;
543     if (is) {
544       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
545       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
546       /* Flag indicating that the user has set overlapping subdomains so PCGASM should not increase their size. */
547       osm->overlap = -1;
548     }
549     if (is_local) {
550       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
551       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
552     }
553   }
554   PetscFunctionReturn(0);
555 }
556 EXTERN_C_END
557 
558 EXTERN_C_BEGIN
559 #undef __FUNCT__
560 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
561 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N) {
562   PC_GASM         *osm = (PC_GASM*)pc->data;
563   PetscErrorCode ierr;
564   PetscMPIInt    rank,size;
565   PetscInt       n;
566 
567   PetscFunctionBegin;
568   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
569 
570   /*
571      Split the subdomains equally among all processors
572   */
573   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
574   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
575   n = N/size + ((N % size) > rank);
576   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
577   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
578   if (!pc->setupcalled) {
579     if (osm->is) {
580       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
581     }
582     osm->n            = n;
583     osm->is           = 0;
584     osm->is_local     = 0;
585   }
586   PetscFunctionReturn(0);
587 }/* PCGASMSetTotalSubdomains_GASM() */
588 EXTERN_C_END
589 
590 EXTERN_C_BEGIN
591 #undef __FUNCT__
592 #define __FUNCT__ "PCGASMSetOverlap_GASM"
593 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
594 {
595   PC_GASM *osm = (PC_GASM*)pc->data;
596 
597   PetscFunctionBegin;
598   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
599   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
600   if (!pc->setupcalled) {
601     osm->overlap = ovl;
602   }
603   PetscFunctionReturn(0);
604 }
605 EXTERN_C_END
606 
607 EXTERN_C_BEGIN
608 #undef __FUNCT__
609 #define __FUNCT__ "PCGASMSetType_GASM"
610 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType_GASM(PC pc,PCGASMType type)
611 {
612   PC_GASM *osm = (PC_GASM*)pc->data;
613 
614   PetscFunctionBegin;
615   osm->type     = type;
616   osm->type_set = PETSC_TRUE;
617   PetscFunctionReturn(0);
618 }
619 EXTERN_C_END
620 
621 EXTERN_C_BEGIN
622 #undef __FUNCT__
623 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
624 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
625 {
626   PC_GASM *osm = (PC_GASM*)pc->data;
627 
628   PetscFunctionBegin;
629   osm->sort_indices = doSort;
630   PetscFunctionReturn(0);
631 }
632 EXTERN_C_END
633 
634 EXTERN_C_BEGIN
635 #undef __FUNCT__
636 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
637 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
638 {
639   PC_GASM         *osm = (PC_GASM*)pc->data;
640   PetscErrorCode ierr;
641 
642   PetscFunctionBegin;
643   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
644 
645   if (n) {
646     *n = osm->n;
647   }
648   if (first) {
649     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
650     *first -= osm->n;
651   }
652   if (ksp) {
653     /* Assume that local solves are now different; not necessarily
654        true though!  This flag is used only for PCView_GASM() */
655     *ksp                   = osm->ksp;
656     osm->same_local_solves = PETSC_FALSE;
657   }
658   PetscFunctionReturn(0);
659 }/* PCGASMGetSubKSP_GASM() */
660 EXTERN_C_END
661 
662 
663 #undef __FUNCT__
664 #define __FUNCT__ "PCGASMSetLocalSubdomains"
665 /*@C
666     PCGASMSetLocalSubdomains - Sets the local subdomains (for this processor
667     only) for the additive Schwarz preconditioner.
668 
669     Collective on PC
670 
671     Input Parameters:
672 +   pc - the preconditioner context
673 .   n - the number of subdomains for this processor (default value = 1)
674 .   is - the index set that defines the subdomains for this processor
675          (or PETSC_NULL for PETSc to determine subdomains)
676 -   is_local - the index sets that define the local part of the subdomains for this processor
677          (or PETSC_NULL to use the default of 1 subdomain per process)
678 
679     Notes:
680     The IS numbering is in the parallel, global numbering of the vector.
681 
682     By default the GASM preconditioner uses 1 block per processor.
683 
684     Use PCGASMSetTotalSubdomains() to set the subdomains for all processors.
685 
686     Level: advanced
687 
688 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
689 
690 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
691           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
692 @*/
693 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
694 {
695   PetscErrorCode ierr;
696 
697   PetscFunctionBegin;
698   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
699   ierr = PetscTryMethod(pc,"PCGASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
700   PetscFunctionReturn(0);
701 }
702 
703 #undef __FUNCT__
704 #define __FUNCT__ "PCGASMSetTotalSubdomains"
705 /*@C
706     PCGASMSetTotalSubdomains - Sets the subdomains for all processor for the
707     additive Schwarz preconditioner.  Either all or no processors in the
708     PC communicator must call this routine, with the same index sets.
709 
710     Collective on PC
711 
712     Input Parameters:
713 +   pc - the preconditioner context
714 .   n - the number of subdomains for all processors
715 .   is - the index sets that define the subdomains for all processor
716          (or PETSC_NULL for PETSc to determine subdomains)
717 -   is_local - the index sets that define the local part of the subdomains for this processor
718          (or PETSC_NULL to use the default of 1 subdomain per process)
719 
720     Options Database Key:
721     To set the total number of subdomain blocks rather than specify the
722     index sets, use the option
723 .    -pc_gasm_blocks <blks> - Sets total blocks
724 
725     Notes:
726     Currently you cannot use this to set the actual subdomains with the argument is.
727 
728     By default the GASM preconditioner uses 1 block per processor.
729 
730     These index sets cannot be destroyed until after completion of the
731     linear solves for which the GASM preconditioner is being used.
732 
733     Use PCGASMSetLocalSubdomains() to set local subdomains.
734 
735     Level: advanced
736 
737 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
738 
739 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
740           PCGASMCreateSubdomains2D()
741 @*/
742 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains(PC pc,PetscInt N)
743 {
744   PetscErrorCode ierr;
745 
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
748   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt),(pc,N));CHKERRQ(ierr);
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "PCGASMSetOverlap"
754 /*@
755     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
756     additive Schwarz preconditioner.  Either all or no processors in the
757     PC communicator must call this routine.
758 
759     Logically Collective on PC
760 
761     Input Parameters:
762 +   pc  - the preconditioner context
763 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
764 
765     Options Database Key:
766 .   -pc_gasm_overlap <ovl> - Sets overlap
767 
768     Notes:
769     By default the GASM preconditioner uses 1 block per processor.  To use
770     multiple blocks per perocessor, see PCGASMSetTotalSubdomains() and
771     PCGASMSetLocalSubdomains() (and the option -pc_gasm_blocks <blks>).
772 
773     The overlap defaults to 1, so if one desires that no additional
774     overlap be computed beyond what may have been set with a call to
775     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(), then ovl
776     must be set to be 0.  In particular, if one does not explicitly set
777     the subdomains an application code, then all overlap would be computed
778     internally by PETSc, and using an overlap of 0 would result in an GASM
779     variant that is equivalent to the block Jacobi preconditioner.
780 
781     Note that one can define initial index sets with any overlap via
782     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(); the routine
783     PCGASMSetOverlap() merely allows PETSc to extend that overlap further
784     if desired.
785 
786     Level: intermediate
787 
788 .keywords: PC, GASM, set, overlap
789 
790 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
791           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
792 @*/
793 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap(PC pc,PetscInt ovl)
794 {
795   PetscErrorCode ierr;
796 
797   PetscFunctionBegin;
798   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
799   PetscValidLogicalCollectiveInt(pc,ovl,2);
800   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
801   PetscFunctionReturn(0);
802 }
803 
804 #undef __FUNCT__
805 #define __FUNCT__ "PCGASMSetType"
806 /*@
807     PCGASMSetType - Sets the type of restriction and interpolation used
808     for local problems in the additive Schwarz method.
809 
810     Logically Collective on PC
811 
812     Input Parameters:
813 +   pc  - the preconditioner context
814 -   type - variant of GASM, one of
815 .vb
816       PC_GASM_BASIC       - full interpolation and restriction
817       PC_GASM_RESTRICT    - full restriction, local processor interpolation
818       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
819       PC_GASM_NONE        - local processor restriction and interpolation
820 .ve
821 
822     Options Database Key:
823 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
824 
825     Level: intermediate
826 
827 .keywords: PC, GASM, set, type
828 
829 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
830           PCGASMCreateSubdomains2D()
831 @*/
832 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType(PC pc,PCGASMType type)
833 {
834   PetscErrorCode ierr;
835 
836   PetscFunctionBegin;
837   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
838   PetscValidLogicalCollectiveEnum(pc,type,2);
839   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
840   PetscFunctionReturn(0);
841 }
842 
843 #undef __FUNCT__
844 #define __FUNCT__ "PCGASMSetSortIndices"
845 /*@
846     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
847 
848     Logically Collective on PC
849 
850     Input Parameters:
851 +   pc  - the preconditioner context
852 -   doSort - sort the subdomain indices
853 
854     Level: intermediate
855 
856 .keywords: PC, GASM, set, type
857 
858 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
859           PCGASMCreateSubdomains2D()
860 @*/
861 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices(PC pc,PetscBool  doSort)
862 {
863   PetscErrorCode ierr;
864 
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
867   PetscValidLogicalCollectiveBool(pc,doSort,2);
868   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
869   PetscFunctionReturn(0);
870 }
871 
872 #undef __FUNCT__
873 #define __FUNCT__ "PCGASMGetSubKSP"
874 /*@C
875    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
876    this processor.
877 
878    Collective on PC iff first_local is requested
879 
880    Input Parameter:
881 .  pc - the preconditioner context
882 
883    Output Parameters:
884 +  n_local - the number of blocks on this processor or PETSC_NULL
885 .  first_local - the global number of the first block on this processor or PETSC_NULL,
886                  all processors must request or all must pass PETSC_NULL
887 -  ksp - the array of KSP contexts
888 
889    Note:
890    After PCGASMGetSubKSP() the array of KSPes is not to be freed
891 
892    Currently for some matrix implementations only 1 block per processor
893    is supported.
894 
895    You must call KSPSetUp() before calling PCGASMGetSubKSP().
896 
897    Level: advanced
898 
899 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
900 
901 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap(),
902           PCGASMCreateSubdomains2D(),
903 @*/
904 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
905 {
906   PetscErrorCode ierr;
907 
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
910   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 /* -------------------------------------------------------------------------------------*/
915 /*MC
916    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
917            its own KSP object.
918 
919    Options Database Keys:
920 +  -pc_gasm_truelocal - Activates PCGGASMSetUseTrueLocal()
921 .  -pc_gasm_blocks <blks> - Sets total blocks
922 .  -pc_gasm_overlap <ovl> - Sets overlap
923 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
924 
925      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
926       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
927       -pc_gasm_type basic to use the standard GASM.
928 
929    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
930      than one processor. Defaults to one block per processor.
931 
932      To set options on the solvers for each block append -sub_ to all the KSP, and PC
933         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
934 
935      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
936          and set the options directly on the resulting KSP object (you can access its PC
937          with KSPGetPC())
938 
939 
940    Level: beginner
941 
942    Concepts: additive Schwarz method
943 
944     References:
945     An additive variant of the Schwarz alternating method for the case of many subregions
946     M Dryja, OB Widlund - Courant Institute, New York University Technical report
947 
948     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
949     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
950 
951 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
952            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetLocalSubdomains(),
953            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
954 
955 M*/
956 
957 EXTERN_C_BEGIN
958 #undef __FUNCT__
959 #define __FUNCT__ "PCCreate_GASM"
960 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_GASM(PC pc)
961 {
962   PetscErrorCode ierr;
963   PC_GASM         *osm;
964 
965   PetscFunctionBegin;
966   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
967   osm->N                 = PETSC_DECIDE;
968   osm->n                 = 0;
969   osm->nmax              = 0;
970   osm->overlap           = 1;
971   osm->ksp               = 0;
972   osm->grestriction      = 0;
973   osm->gprolongation     = 0;
974   osm->gx                = 0;
975   osm->gy                = 0;
976   osm->x                 = 0;
977   osm->y                 = 0;
978   osm->is                = 0;
979   osm->is_local          = 0;
980   osm->pmat              = 0;
981   osm->type              = PC_GASM_RESTRICT;
982   osm->same_local_solves = PETSC_TRUE;
983   osm->sort_indices      = PETSC_TRUE;
984 
985   pc->data                   = (void*)osm;
986   pc->ops->apply             = PCApply_GASM;
987   pc->ops->applytranspose    = PCApplyTranspose_GASM;
988   pc->ops->setup             = PCSetUp_GASM;
989   pc->ops->destroy           = PCDestroy_GASM;
990   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
991   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
992   pc->ops->view              = PCView_GASM;
993   pc->ops->applyrichardson   = 0;
994 
995   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetLocalSubdomains_C","PCGASMSetLocalSubdomains_GASM",
996                     PCGASMSetLocalSubdomains_GASM);CHKERRQ(ierr);
997   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
998                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
999   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1000                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1001   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1002                     PCGASMSetType_GASM);CHKERRQ(ierr);
1003   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1004                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1005   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1006                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1007   PetscFunctionReturn(0);
1008 }
1009 EXTERN_C_END
1010 
1011 
1012 #undef __FUNCT__
1013 #define __FUNCT__ "PCGASMCreateSubdomains"
1014 /*@C
1015    PCGASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1016    preconditioner for a any problem on a general grid.
1017 
1018    Collective
1019 
1020    Input Parameters:
1021 +  A - The global matrix operator
1022 -  n - the number of local blocks
1023 
1024    Output Parameters:
1025 .  outis - the array of index sets defining the subdomains
1026 
1027    Level: advanced
1028 
1029    Note: this generates nonoverlapping subdomains; the PCGASM will generate the overlap
1030     from these if you use PCGASMSetLocalSubdomains()
1031 
1032     In the Fortran version you must provide the array outis[] already allocated of length n.
1033 
1034 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1035 
1036 .seealso: PCGASMSetLocalSubdomains(), PCGASMDestroySubdomains()
1037 @*/
1038 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1039 {
1040   MatPartitioning           mpart;
1041   const char                *prefix;
1042   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1043   PetscMPIInt               size;
1044   PetscInt                  i,j,rstart,rend,bs;
1045   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1046   Mat                       Ad = PETSC_NULL, adj;
1047   IS                        ispart,isnumb,*is;
1048   PetscErrorCode            ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1052   PetscValidPointer(outis,3);
1053   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1054 
1055   /* Get prefix, row distribution, and block size */
1056   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1057   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1058   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1059   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1060 
1061   /* Get diagonal block from matrix if possible */
1062   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1063   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1064   if (f) {
1065     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1066   } else if (size == 1) {
1067     iscopy = PETSC_FALSE; Ad = A;
1068   } else {
1069     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1070   }
1071   if (Ad) {
1072     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1073     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1074   }
1075   if (Ad && n > 1) {
1076     PetscBool  match,done;
1077     /* Try to setup a good matrix partitioning if available */
1078     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1079     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1080     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1081     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1082     if (!match) {
1083       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1084     }
1085     if (!match) { /* assume a "good" partitioner is available */
1086       PetscInt na,*ia,*ja;
1087       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1088       if (done) {
1089 	/* Build adjacency matrix by hand. Unfortunately a call to
1090 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1091 	   remove the block-aij structure and we cannot expect
1092 	   MatPartitioning to split vertices as we need */
1093 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1094 	nnz = 0;
1095 	for (i=0; i<na; i++) { /* count number of nonzeros */
1096 	  len = ia[i+1] - ia[i];
1097 	  row = ja + ia[i];
1098 	  for (j=0; j<len; j++) {
1099 	    if (row[j] == i) { /* don't count diagonal */
1100 	      len--; break;
1101 	    }
1102 	  }
1103 	  nnz += len;
1104 	}
1105 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1106 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1107 	nnz    = 0;
1108 	iia[0] = 0;
1109 	for (i=0; i<na; i++) { /* fill adjacency */
1110 	  cnt = 0;
1111 	  len = ia[i+1] - ia[i];
1112 	  row = ja + ia[i];
1113 	  for (j=0; j<len; j++) {
1114 	    if (row[j] != i) { /* if not diagonal */
1115 	      jja[nnz+cnt++] = row[j];
1116 	    }
1117 	  }
1118 	  nnz += cnt;
1119 	  iia[i+1] = nnz;
1120 	}
1121 	/* Partitioning of the adjacency matrix */
1122 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1123 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1124 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1125 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1126 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1127 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1128 	foundpart = PETSC_TRUE;
1129       }
1130       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1131     }
1132     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1133   }
1134   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1135 
1136   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1137   *outis = is;
1138 
1139   if (!foundpart) {
1140 
1141     /* Partitioning by contiguous chunks of rows */
1142 
1143     PetscInt mbs   = (rend-rstart)/bs;
1144     PetscInt start = rstart;
1145     for (i=0; i<n; i++) {
1146       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1147       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1148       start += count;
1149     }
1150 
1151   } else {
1152 
1153     /* Partitioning by adjacency of diagonal block  */
1154 
1155     const PetscInt *numbering;
1156     PetscInt       *count,nidx,*indices,*newidx,start=0;
1157     /* Get node count in each partition */
1158     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1159     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1160     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1161       for (i=0; i<n; i++) count[i] *= bs;
1162     }
1163     /* Build indices from node numbering */
1164     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1165     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1166     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1167     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1168     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1169     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1170     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1171       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1172       for (i=0; i<nidx; i++)
1173 	for (j=0; j<bs; j++)
1174 	  newidx[i*bs+j] = indices[i]*bs + j;
1175       ierr = PetscFree(indices);CHKERRQ(ierr);
1176       nidx   *= bs;
1177       indices = newidx;
1178     }
1179     /* Shift to get global indices */
1180     for (i=0; i<nidx; i++) indices[i] += rstart;
1181 
1182     /* Build the index sets for each block */
1183     for (i=0; i<n; i++) {
1184       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1185       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1186       start += count[i];
1187     }
1188 
1189     ierr = PetscFree(count);
1190     ierr = PetscFree(indices);
1191     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1192     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1193 
1194   }
1195 
1196   PetscFunctionReturn(0);
1197 }
1198 
1199 #undef __FUNCT__
1200 #define __FUNCT__ "PCGASMDestroySubdomains"
1201 /*@C
1202    PCGASMDestroySubdomains - Destroys the index sets created with
1203    PCGASMCreateSubdomains(). Should be called after setting subdomains
1204    with PCGASMSetLocalSubdomains().
1205 
1206    Collective
1207 
1208    Input Parameters:
1209 +  n - the number of index sets
1210 .  is - the array of index sets
1211 -  is_local - the array of local index sets, can be PETSC_NULL
1212 
1213    Level: advanced
1214 
1215 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1216 
1217 .seealso: PCGASMCreateSubdomains(), PCGASMSetLocalSubdomains()
1218 @*/
1219 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1220 {
1221   PetscInt       i;
1222   PetscErrorCode ierr;
1223   PetscFunctionBegin;
1224   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1225   PetscValidPointer(is,2);
1226   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1227   ierr = PetscFree(is);CHKERRQ(ierr);
1228   if (is_local) {
1229     PetscValidPointer(is_local,3);
1230     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1231     ierr = PetscFree(is_local);CHKERRQ(ierr);
1232   }
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 #undef __FUNCT__
1237 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1238 /*@
1239    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1240    preconditioner for a two-dimensional problem on a regular grid.
1241 
1242    Not Collective
1243 
1244    Input Parameters:
1245 +  m, n - the number of mesh points in the x and y directions
1246 .  M, N - the number of subdomains in the x and y directions
1247 .  dof - degrees of freedom per node
1248 -  overlap - overlap in mesh lines
1249 
1250    Output Parameters:
1251 +  Nsub - the number of subdomains created
1252 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1253 -  is_local - array of index sets defining non-overlapping subdomains
1254 
1255    Note:
1256    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1257    preconditioners.  More general related routines are
1258    PCGASMSetTotalSubdomains() and PCGASMSetLocalSubdomains().
1259 
1260    Level: advanced
1261 
1262 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1263 
1264 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
1265           PCGASMSetOverlap()
1266 @*/
1267 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1268 {
1269   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1270   PetscErrorCode ierr;
1271   PetscInt       nidx,*idx,loc,ii,jj,count;
1272 
1273   PetscFunctionBegin;
1274   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1275 
1276   *Nsub = N*M;
1277   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1278   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1279   ystart = 0;
1280   loc_outer = 0;
1281   for (i=0; i<N; i++) {
1282     height = n/N + ((n % N) > i); /* height of subdomain */
1283     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1284     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1285     yright = ystart + height + overlap; if (yright > n) yright = n;
1286     xstart = 0;
1287     for (j=0; j<M; j++) {
1288       width = m/M + ((m % M) > j); /* width of subdomain */
1289       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1290       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1291       xright = xstart + width + overlap; if (xright > m) xright = m;
1292       nidx   = (xright - xleft)*(yright - yleft);
1293       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1294       loc    = 0;
1295       for (ii=yleft; ii<yright; ii++) {
1296         count = m*ii + xleft;
1297         for (jj=xleft; jj<xright; jj++) {
1298           idx[loc++] = count++;
1299         }
1300       }
1301       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1302       if (overlap == 0) {
1303         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1304         (*is_local)[loc_outer] = (*is)[loc_outer];
1305       } else {
1306         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1307           for (jj=xstart; jj<xstart+width; jj++) {
1308             idx[loc++] = m*ii + jj;
1309           }
1310         }
1311         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1312       }
1313       ierr = PetscFree(idx);CHKERRQ(ierr);
1314       xstart += width;
1315       loc_outer++;
1316     }
1317     ystart += height;
1318   }
1319   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 #undef __FUNCT__
1324 #define __FUNCT__ "PCGASMGetLocalSubdomains"
1325 /*@C
1326     PCGASMGetLocalSubdomains - Gets the local subdomains (for this processor
1327     only) for the additive Schwarz preconditioner.
1328 
1329     Not Collective
1330 
1331     Input Parameter:
1332 .   pc - the preconditioner context
1333 
1334     Output Parameters:
1335 +   n - the number of subdomains for this processor (default value = 1)
1336 .   is - the index sets that define the subdomains for this processor
1337 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1338 
1339 
1340     Notes:
1341     The IS numbering is in the parallel, global numbering of the vector.
1342 
1343     Level: advanced
1344 
1345 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
1346 
1347 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1348           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubmatrices()
1349 @*/
1350 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1351 {
1352   PC_GASM         *osm;
1353   PetscErrorCode ierr;
1354   PetscBool      match;
1355 
1356   PetscFunctionBegin;
1357   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1358   PetscValidIntPointer(n,2);
1359   if (is) PetscValidPointer(is,3);
1360   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1361   if (!match) {
1362     if (n)  *n  = 0;
1363     if (is) *is = PETSC_NULL;
1364   } else {
1365     osm = (PC_GASM*)pc->data;
1366     if (n)  *n  = osm->n;
1367     if (is) *is = osm->is;
1368     if (is_local) *is_local = osm->is_local;
1369   }
1370   PetscFunctionReturn(0);
1371 }
1372 
1373 #undef __FUNCT__
1374 #define __FUNCT__ "PCGASMGetLocalSubmatrices"
1375 /*@C
1376     PCGASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1377     only) for the additive Schwarz preconditioner.
1378 
1379     Not Collective
1380 
1381     Input Parameter:
1382 .   pc - the preconditioner context
1383 
1384     Output Parameters:
1385 +   n - the number of matrices for this processor (default value = 1)
1386 -   mat - the matrices
1387 
1388 
1389     Level: advanced
1390 
1391 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1392 
1393 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1394           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubdomains()
1395 @*/
1396 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1397 {
1398   PC_GASM         *osm;
1399   PetscErrorCode ierr;
1400   PetscBool      match;
1401 
1402   PetscFunctionBegin;
1403   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1404   PetscValidIntPointer(n,2);
1405   if (mat) PetscValidPointer(mat,3);
1406   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1407   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1408   if (!match) {
1409     if (n)   *n   = 0;
1410     if (mat) *mat = PETSC_NULL;
1411   } else {
1412     osm = (PC_GASM*)pc->data;
1413     if (n)   *n   = osm->n;
1414     if (mat) *mat = osm->pmat;
1415   }
1416   PetscFunctionReturn(0);
1417 }
1418