xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision d9822059a04b3bd78629edfbf3ebfea74b8f6e57)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <private/pcimpl.h>     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter *restriction;        /* mapping from global to subregion */
20   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
21   VecScatter *prolongation;       /* mapping from subregion to global */
22   Vec        *x,*y,*y_local;      /* work vectors */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
25   Mat        *mat,*pmat;          /* mat is not currently used */
26   PCASMType  type;                /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_ASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_ASM"
34 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
35 {
36   PC_ASM         *osm = (PC_ASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof blocks,"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
54     if (osm->same_local_solves) {
55       if (osm->ksp) {
56         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
57         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
58         if (!rank) {
59           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
60           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
61           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
62         }
63         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
64       }
65     } else {
66       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->n_local; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n_local_true) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCASMPrintSubdomains"
97 static PetscErrorCode PCASMPrintSubdomains(PC pc)
98 {
99   PC_ASM         *osm  = (PC_ASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n_local_true;i++) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_ASM"
137 static PetscErrorCode PCSetUp_ASM(PC pc)
138 {
139   PC_ASM         *osm  = (PC_ASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i,m,m_local,firstRow,lastRow;
143   PetscMPIInt    size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   IS             isl;
146   KSP            ksp;
147   PC             subpc;
148   const char     *prefix,*pprefix;
149   Vec            vec;
150 
151   PetscFunctionBegin;
152   if (!pc->setupcalled) {
153 
154     if (!osm->type_set) {
155       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
156       if (symset && flg) { osm->type = PC_ASM_BASIC; }
157     }
158 
159     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
160       /* no subdomains given, use one per processor */
161       osm->n_local = osm->n_local_true = 1;
162       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
163       osm->n = size;
164     } else if (osm->n == PETSC_DECIDE) {
165       /* determine global number of subdomains */
166       PetscInt inwork[2],outwork[2];
167       inwork[0] = inwork[1] = osm->n_local_true;
168       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
169       osm->n_local = outwork[0];
170       osm->n       = outwork[1];
171     }
172 
173     if (!osm->is){ /* create the index sets */
174       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
175     }
176     if (osm->n_local_true > 1 && !osm->is_local) {
177       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
178       for (i=0; i<osm->n_local_true; i++) {
179         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
180           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
181           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
182         } else {
183           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
184           osm->is_local[i] = osm->is[i];
185         }
186       }
187     }
188     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
189     flg  = PETSC_FALSE;
190     ierr = PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
191     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
192 
193     if (osm->overlap > 0) {
194       /* Extend the "overlapping" regions by a number of steps */
195       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
196     }
197     if (osm->sort_indices) {
198       for (i=0; i<osm->n_local_true; i++) {
199         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
200         if (osm->is_local) {
201           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
202         }
203       }
204     }
205 
206     /* Create the local work vectors and scatter contexts */
207     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
208     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->restriction);CHKERRQ(ierr);
209     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->localization);CHKERRQ(ierr);}
210     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->prolongation);CHKERRQ(ierr);
211     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->x);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y);CHKERRQ(ierr);
213     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y_local);CHKERRQ(ierr);
214     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
215     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
216       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
217       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
218       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
219       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
220       ierr = ISDestroy(isl);CHKERRQ(ierr);
221       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
222       if (osm->is_local) {
223         ISLocalToGlobalMapping ltog;
224         IS                     isll;
225         const PetscInt         *idx_local;
226         PetscInt               *idx,nout;
227 
228         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
229         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
230         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
231         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
232         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
233         ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
234         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
235         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
236         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
237         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
238         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
239         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
240         ierr = ISDestroy(isll);CHKERRQ(ierr);
241 
242         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
243         ierr = ISDestroy(isl);CHKERRQ(ierr);
244       } else {
245         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
246         osm->y_local[i] = osm->y[i];
247         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
248         osm->prolongation[i] = osm->restriction[i];
249         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
250       }
251     }
252     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
253     for (i=osm->n_local_true; i<osm->n_local; i++) {
254       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
255       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
256       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
257       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
258       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
259       if (osm->is_local) {
260         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
261         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
262       } else {
263         osm->prolongation[i] = osm->restriction[i];
264         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
265       }
266       ierr = ISDestroy(isl);CHKERRQ(ierr);
267     }
268     ierr = VecDestroy(vec);CHKERRQ(ierr);
269 
270     /* Create the local solvers */
271     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
272     for (i=0; i<osm->n_local_true; i++) {
273       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
274       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
275       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
276       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
277       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
278       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
279       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
280       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
281       osm->ksp[i] = ksp;
282     }
283     scall = MAT_INITIAL_MATRIX;
284 
285   } else {
286     /*
287        Destroy the blocks from the previous iteration
288     */
289     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
290       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
291       scall = MAT_INITIAL_MATRIX;
292     }
293   }
294 
295   /*
296      Extract out the submatrices
297   */
298   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
299   if (scall == MAT_INITIAL_MATRIX) {
300     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
301     for (i=0; i<osm->n_local_true; i++) {
302       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
303       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
304     }
305   }
306 
307   /* Return control to the user so that the submatrices can be modified (e.g., to apply
308      different boundary conditions for the submatrices than for the global problem) */
309   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
310 
311   /*
312      Loop over subdomains putting them into local ksp
313   */
314   for (i=0; i<osm->n_local_true; i++) {
315     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
316     if (!pc->setupcalled) {
317       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
318     }
319   }
320 
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
326 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
327 {
328   PC_ASM         *osm = (PC_ASM*)pc->data;
329   PetscErrorCode ierr;
330   PetscInt       i;
331 
332   PetscFunctionBegin;
333   for (i=0; i<osm->n_local_true; i++) {
334     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 #undef __FUNCT__
340 #define __FUNCT__ "PCApply_ASM"
341 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
342 {
343   PC_ASM         *osm = (PC_ASM*)pc->data;
344   PetscErrorCode ierr;
345   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
346   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
347 
348   PetscFunctionBegin;
349   /*
350      Support for limiting the restriction or interpolation to only local
351      subdomain values (leaving the other values 0).
352   */
353   if (!(osm->type & PC_ASM_RESTRICT)) {
354     forward = SCATTER_FORWARD_LOCAL;
355     /* have to zero the work RHS since scatter may leave some slots empty */
356     for (i=0; i<n_local_true; i++) {
357       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
358     }
359   }
360   if (!(osm->type & PC_ASM_INTERPOLATE)) {
361     reverse = SCATTER_REVERSE_LOCAL;
362   }
363 
364   for (i=0; i<n_local; i++) {
365     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
366   }
367   ierr = VecZeroEntries(y);CHKERRQ(ierr);
368   /* do the local solves */
369   for (i=0; i<n_local_true; i++) {
370     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
371     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
372     if (osm->localization) {
373       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
374       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
375     }
376     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
377   }
378   /* handle the rest of the scatters that do not have local solves */
379   for (i=n_local_true; i<n_local; i++) {
380     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
381     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
382   }
383   for (i=0; i<n_local; i++) {
384     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
385   }
386   PetscFunctionReturn(0);
387 }
388 
389 #undef __FUNCT__
390 #define __FUNCT__ "PCApplyTranspose_ASM"
391 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
392 {
393   PC_ASM         *osm = (PC_ASM*)pc->data;
394   PetscErrorCode ierr;
395   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
396   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
397 
398   PetscFunctionBegin;
399   /*
400      Support for limiting the restriction or interpolation to only local
401      subdomain values (leaving the other values 0).
402 
403      Note: these are reversed from the PCApply_ASM() because we are applying the
404      transpose of the three terms
405   */
406   if (!(osm->type & PC_ASM_INTERPOLATE)) {
407     forward = SCATTER_FORWARD_LOCAL;
408     /* have to zero the work RHS since scatter may leave some slots empty */
409     for (i=0; i<n_local_true; i++) {
410       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
411     }
412   }
413   if (!(osm->type & PC_ASM_RESTRICT)) {
414     reverse = SCATTER_REVERSE_LOCAL;
415   }
416 
417   for (i=0; i<n_local; i++) {
418     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
419   }
420   ierr = VecZeroEntries(y);CHKERRQ(ierr);
421   /* do the local solves */
422   for (i=0; i<n_local_true; i++) {
423     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
424     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
425     if (osm->localization) {
426       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
427       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
428     }
429     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
430   }
431   /* handle the rest of the scatters that do not have local solves */
432   for (i=n_local_true; i<n_local; i++) {
433     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
434     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
435   }
436   for (i=0; i<n_local; i++) {
437     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCReset_ASM"
444 static PetscErrorCode PCReset_ASM(PC pc)
445 {
446   PC_ASM         *osm = (PC_ASM*)pc->data;
447   PetscErrorCode ierr;
448   PetscInt       i;
449 
450   PetscFunctionBegin;
451   if (osm->ksp) {
452     for (i=0; i<osm->n_local_true; i++) {
453       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
454     }
455   }
456   if (osm->pmat) {
457     if (osm->n_local_true > 0) {
458       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
459     }
460   }
461   if (osm->restriction) {
462     for (i=0; i<osm->n_local; i++) {
463       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
464       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
465       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
466       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
467       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
468       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
469     }
470     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
471     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
472     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
473     ierr = PetscFree(osm->x);CHKERRQ(ierr);
474     ierr = PetscFree(osm->y);CHKERRQ(ierr);
475     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
476   }
477   if (osm->is) {
478     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
479     osm->is       = 0;
480     osm->is_local = 0;
481   }
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "PCDestroy_ASM"
487 static PetscErrorCode PCDestroy_ASM(PC pc)
488 {
489   PC_ASM         *osm = (PC_ASM*)pc->data;
490   PetscErrorCode ierr;
491   PetscInt       i;
492 
493   PetscFunctionBegin;
494   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
495   if (osm->ksp) {
496     for (i=0; i<osm->n_local_true; i++) {
497       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
498     }
499     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
500   }
501   ierr = PetscFree(pc->data);CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "PCSetFromOptions_ASM"
507 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
508 {
509   PC_ASM         *osm = (PC_ASM*)pc->data;
510   PetscErrorCode ierr;
511   PetscInt       blocks,ovl;
512   PetscBool      symset,flg;
513   PCASMType      asmtype;
514 
515   PetscFunctionBegin;
516   /* set the type to symmetric if matrix is symmetric */
517   if (!osm->type_set && pc->pmat) {
518     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
519     if (symset && flg) { osm->type = PC_ASM_BASIC; }
520   }
521   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
522     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
523     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
524     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
525     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
526     flg  = PETSC_FALSE;
527     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
528     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
529   ierr = PetscOptionsTail();CHKERRQ(ierr);
530   PetscFunctionReturn(0);
531 }
532 
533 /*------------------------------------------------------------------------------------*/
534 
535 EXTERN_C_BEGIN
536 #undef __FUNCT__
537 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
538 PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
539 {
540   PC_ASM         *osm = (PC_ASM*)pc->data;
541   PetscErrorCode ierr;
542   PetscInt       i;
543 
544   PetscFunctionBegin;
545   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
546   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
547 
548   if (!pc->setupcalled) {
549     if (is) {
550       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
551     }
552     if (is_local) {
553       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
554     }
555     if (osm->is) {
556       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
557     }
558     osm->n_local_true = n;
559     osm->is           = 0;
560     osm->is_local     = 0;
561     if (is) {
562       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
563       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
564       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
565       osm->overlap = -1;
566     }
567     if (is_local) {
568       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
569       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
570     }
571   }
572   PetscFunctionReturn(0);
573 }
574 EXTERN_C_END
575 
576 EXTERN_C_BEGIN
577 #undef __FUNCT__
578 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
579 PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
580 {
581   PC_ASM         *osm = (PC_ASM*)pc->data;
582   PetscErrorCode ierr;
583   PetscMPIInt    rank,size;
584   PetscInt       n;
585 
586   PetscFunctionBegin;
587   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
588   if (is || is_local) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
589 
590   /*
591      Split the subdomains equally among all processors
592   */
593   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
594   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
595   n = N/size + ((N % size) > rank);
596   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
597   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
598   if (!pc->setupcalled) {
599     if (osm->is) {
600       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
601     }
602     osm->n_local_true = n;
603     osm->is           = 0;
604     osm->is_local     = 0;
605   }
606   PetscFunctionReturn(0);
607 }
608 EXTERN_C_END
609 
610 EXTERN_C_BEGIN
611 #undef __FUNCT__
612 #define __FUNCT__ "PCASMSetOverlap_ASM"
613 PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
614 {
615   PC_ASM *osm = (PC_ASM*)pc->data;
616 
617   PetscFunctionBegin;
618   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
619   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
620   if (!pc->setupcalled) {
621     osm->overlap = ovl;
622   }
623   PetscFunctionReturn(0);
624 }
625 EXTERN_C_END
626 
627 EXTERN_C_BEGIN
628 #undef __FUNCT__
629 #define __FUNCT__ "PCASMSetType_ASM"
630 PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
631 {
632   PC_ASM *osm = (PC_ASM*)pc->data;
633 
634   PetscFunctionBegin;
635   osm->type     = type;
636   osm->type_set = PETSC_TRUE;
637   PetscFunctionReturn(0);
638 }
639 EXTERN_C_END
640 
641 EXTERN_C_BEGIN
642 #undef __FUNCT__
643 #define __FUNCT__ "PCASMSetSortIndices_ASM"
644 PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
645 {
646   PC_ASM *osm = (PC_ASM*)pc->data;
647 
648   PetscFunctionBegin;
649   osm->sort_indices = doSort;
650   PetscFunctionReturn(0);
651 }
652 EXTERN_C_END
653 
654 EXTERN_C_BEGIN
655 #undef __FUNCT__
656 #define __FUNCT__ "PCASMGetSubKSP_ASM"
657 PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
658 {
659   PC_ASM         *osm = (PC_ASM*)pc->data;
660   PetscErrorCode ierr;
661 
662   PetscFunctionBegin;
663   if (osm->n_local_true < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
664 
665   if (n_local) {
666     *n_local = osm->n_local_true;
667   }
668   if (first_local) {
669     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
670     *first_local -= osm->n_local_true;
671   }
672   if (ksp) {
673     /* Assume that local solves are now different; not necessarily
674        true though!  This flag is used only for PCView_ASM() */
675     *ksp                   = osm->ksp;
676     osm->same_local_solves = PETSC_FALSE;
677   }
678   PetscFunctionReturn(0);
679 }
680 EXTERN_C_END
681 
682 
683 #undef __FUNCT__
684 #define __FUNCT__ "PCASMSetLocalSubdomains"
685 /*@C
686     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
687 
688     Collective on PC
689 
690     Input Parameters:
691 +   pc - the preconditioner context
692 .   n - the number of subdomains for this processor (default value = 1)
693 .   is - the index set that defines the subdomains for this processor
694          (or PETSC_NULL for PETSc to determine subdomains)
695 -   is_local - the index sets that define the local part of the subdomains for this processor
696          (or PETSC_NULL to use the default of 1 subdomain per process)
697 
698     Notes:
699     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
700 
701     By default the ASM preconditioner uses 1 block per processor.
702 
703     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
704 
705     Level: advanced
706 
707 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
708 
709 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
710           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
711 @*/
712 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
713 {
714   PetscErrorCode ierr;
715 
716   PetscFunctionBegin;
717   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
718   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "PCASMSetTotalSubdomains"
724 /*@C
725     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
726     additive Schwarz preconditioner.  Either all or no processors in the
727     PC communicator must call this routine, with the same index sets.
728 
729     Collective on PC
730 
731     Input Parameters:
732 +   pc - the preconditioner context
733 .   n - the number of subdomains for all processors
734 .   is - the index sets that define the subdomains for all processor
735          (or PETSC_NULL for PETSc to determine subdomains)
736 -   is_local - the index sets that define the local part of the subdomains for this processor
737          (or PETSC_NULL to use the default of 1 subdomain per process)
738 
739     Options Database Key:
740     To set the total number of subdomain blocks rather than specify the
741     index sets, use the option
742 .    -pc_asm_blocks <blks> - Sets total blocks
743 
744     Notes:
745     Currently you cannot use this to set the actual subdomains with the argument is.
746 
747     By default the ASM preconditioner uses 1 block per processor.
748 
749     These index sets cannot be destroyed until after completion of the
750     linear solves for which the ASM preconditioner is being used.
751 
752     Use PCASMSetLocalSubdomains() to set local subdomains.
753 
754     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
755 
756     Level: advanced
757 
758 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
759 
760 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
761           PCASMCreateSubdomains2D()
762 @*/
763 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
764 {
765   PetscErrorCode ierr;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
769   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 #undef __FUNCT__
774 #define __FUNCT__ "PCASMSetOverlap"
775 /*@
776     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
777     additive Schwarz preconditioner.  Either all or no processors in the
778     PC communicator must call this routine.
779 
780     Logically Collective on PC
781 
782     Input Parameters:
783 +   pc  - the preconditioner context
784 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
785 
786     Options Database Key:
787 .   -pc_asm_overlap <ovl> - Sets overlap
788 
789     Notes:
790     By default the ASM preconditioner uses 1 block per processor.  To use
791     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
792     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
793 
794     The overlap defaults to 1, so if one desires that no additional
795     overlap be computed beyond what may have been set with a call to
796     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
797     must be set to be 0.  In particular, if one does not explicitly set
798     the subdomains an application code, then all overlap would be computed
799     internally by PETSc, and using an overlap of 0 would result in an ASM
800     variant that is equivalent to the block Jacobi preconditioner.
801 
802     Note that one can define initial index sets with any overlap via
803     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
804     PCASMSetOverlap() merely allows PETSc to extend that overlap further
805     if desired.
806 
807     Level: intermediate
808 
809 .keywords: PC, ASM, set, overlap
810 
811 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
812           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
813 @*/
814 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
815 {
816   PetscErrorCode ierr;
817 
818   PetscFunctionBegin;
819   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
820   PetscValidLogicalCollectiveInt(pc,ovl,2);
821   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
822   PetscFunctionReturn(0);
823 }
824 
825 #undef __FUNCT__
826 #define __FUNCT__ "PCASMSetType"
827 /*@
828     PCASMSetType - Sets the type of restriction and interpolation used
829     for local problems in the additive Schwarz method.
830 
831     Logically Collective on PC
832 
833     Input Parameters:
834 +   pc  - the preconditioner context
835 -   type - variant of ASM, one of
836 .vb
837       PC_ASM_BASIC       - full interpolation and restriction
838       PC_ASM_RESTRICT    - full restriction, local processor interpolation
839       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
840       PC_ASM_NONE        - local processor restriction and interpolation
841 .ve
842 
843     Options Database Key:
844 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
845 
846     Level: intermediate
847 
848 .keywords: PC, ASM, set, type
849 
850 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
851           PCASMCreateSubdomains2D()
852 @*/
853 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
859   PetscValidLogicalCollectiveEnum(pc,type,2);
860   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 #undef __FUNCT__
865 #define __FUNCT__ "PCASMSetSortIndices"
866 /*@
867     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
868 
869     Logically Collective on PC
870 
871     Input Parameters:
872 +   pc  - the preconditioner context
873 -   doSort - sort the subdomain indices
874 
875     Level: intermediate
876 
877 .keywords: PC, ASM, set, type
878 
879 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
880           PCASMCreateSubdomains2D()
881 @*/
882 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool  doSort)
883 {
884   PetscErrorCode ierr;
885 
886   PetscFunctionBegin;
887   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
888   PetscValidLogicalCollectiveBool(pc,doSort,2);
889   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "PCASMGetSubKSP"
895 /*@C
896    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
897    this processor.
898 
899    Collective on PC iff first_local is requested
900 
901    Input Parameter:
902 .  pc - the preconditioner context
903 
904    Output Parameters:
905 +  n_local - the number of blocks on this processor or PETSC_NULL
906 .  first_local - the global number of the first block on this processor or PETSC_NULL,
907                  all processors must request or all must pass PETSC_NULL
908 -  ksp - the array of KSP contexts
909 
910    Note:
911    After PCASMGetSubKSP() the array of KSPes is not to be freed
912 
913    Currently for some matrix implementations only 1 block per processor
914    is supported.
915 
916    You must call KSPSetUp() before calling PCASMGetSubKSP().
917 
918    Level: advanced
919 
920 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
921 
922 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
923           PCASMCreateSubdomains2D(),
924 @*/
925 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 /* -------------------------------------------------------------------------------------*/
936 /*MC
937    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
938            its own KSP object.
939 
940    Options Database Keys:
941 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
942 .  -pc_asm_blocks <blks> - Sets total blocks
943 .  -pc_asm_overlap <ovl> - Sets overlap
944 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
945 
946      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
947       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
948       -pc_asm_type basic to use the standard ASM.
949 
950    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
951      than one processor. Defaults to one block per processor.
952 
953      To set options on the solvers for each block append -sub_ to all the KSP, and PC
954         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
955 
956      To set the options on the solvers separate for each block call PCASMGetSubKSP()
957          and set the options directly on the resulting KSP object (you can access its PC
958          with KSPGetPC())
959 
960 
961    Level: beginner
962 
963    Concepts: additive Schwarz method
964 
965     References:
966     An additive variant of the Schwarz alternating method for the case of many subregions
967     M Dryja, OB Widlund - Courant Institute, New York University Technical report
968 
969     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
970     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
971 
972 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
973            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
974            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
975 
976 M*/
977 
978 EXTERN_C_BEGIN
979 #undef __FUNCT__
980 #define __FUNCT__ "PCCreate_ASM"
981 PetscErrorCode  PCCreate_ASM(PC pc)
982 {
983   PetscErrorCode ierr;
984   PC_ASM         *osm;
985 
986   PetscFunctionBegin;
987   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
988   osm->n                 = PETSC_DECIDE;
989   osm->n_local           = 0;
990   osm->n_local_true      = 0;
991   osm->overlap           = 1;
992   osm->ksp               = 0;
993   osm->restriction       = 0;
994   osm->localization      = 0;
995   osm->prolongation      = 0;
996   osm->x                 = 0;
997   osm->y                 = 0;
998   osm->y_local           = 0;
999   osm->is                = 0;
1000   osm->is_local          = 0;
1001   osm->mat               = 0;
1002   osm->pmat              = 0;
1003   osm->type              = PC_ASM_RESTRICT;
1004   osm->same_local_solves = PETSC_TRUE;
1005   osm->sort_indices      = PETSC_TRUE;
1006 
1007   pc->data                   = (void*)osm;
1008   pc->ops->apply             = PCApply_ASM;
1009   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1010   pc->ops->setup             = PCSetUp_ASM;
1011   pc->ops->reset             = PCReset_ASM;
1012   pc->ops->destroy           = PCDestroy_ASM;
1013   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1014   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1015   pc->ops->view              = PCView_ASM;
1016   pc->ops->applyrichardson   = 0;
1017 
1018   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1019                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1020   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1021                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1022   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1023                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1024   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1025                     PCASMSetType_ASM);CHKERRQ(ierr);
1026   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1027                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1028   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1029                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1030   PetscFunctionReturn(0);
1031 }
1032 EXTERN_C_END
1033 
1034 
1035 #undef __FUNCT__
1036 #define __FUNCT__ "PCASMCreateSubdomains"
1037 /*@C
1038    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1039    preconditioner for a any problem on a general grid.
1040 
1041    Collective
1042 
1043    Input Parameters:
1044 +  A - The global matrix operator
1045 -  n - the number of local blocks
1046 
1047    Output Parameters:
1048 .  outis - the array of index sets defining the subdomains
1049 
1050    Level: advanced
1051 
1052    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1053     from these if you use PCASMSetLocalSubdomains()
1054 
1055     In the Fortran version you must provide the array outis[] already allocated of length n.
1056 
1057 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1058 
1059 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1060 @*/
1061 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1062 {
1063   MatPartitioning           mpart;
1064   const char                *prefix;
1065   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1066   PetscMPIInt               size;
1067   PetscInt                  i,j,rstart,rend,bs;
1068   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1069   Mat                       Ad = PETSC_NULL, adj;
1070   IS                        ispart,isnumb,*is;
1071   PetscErrorCode            ierr;
1072 
1073   PetscFunctionBegin;
1074   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1075   PetscValidPointer(outis,3);
1076   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1077 
1078   /* Get prefix, row distribution, and block size */
1079   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1080   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1081   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1082   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1083 
1084   /* Get diagonal block from matrix if possible */
1085   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1086   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1087   if (f) {
1088     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1089   } else if (size == 1) {
1090     iscopy = PETSC_FALSE; Ad = A;
1091   } else {
1092     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1093   }
1094   if (Ad) {
1095     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1096     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1097   }
1098   if (Ad && n > 1) {
1099     PetscBool  match,done;
1100     /* Try to setup a good matrix partitioning if available */
1101     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1102     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1103     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1104     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1105     if (!match) {
1106       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1107     }
1108     if (!match) { /* assume a "good" partitioner is available */
1109       PetscInt na,*ia,*ja;
1110       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1111       if (done) {
1112 	/* Build adjacency matrix by hand. Unfortunately a call to
1113 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1114 	   remove the block-aij structure and we cannot expect
1115 	   MatPartitioning to split vertices as we need */
1116 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1117 	nnz = 0;
1118 	for (i=0; i<na; i++) { /* count number of nonzeros */
1119 	  len = ia[i+1] - ia[i];
1120 	  row = ja + ia[i];
1121 	  for (j=0; j<len; j++) {
1122 	    if (row[j] == i) { /* don't count diagonal */
1123 	      len--; break;
1124 	    }
1125 	  }
1126 	  nnz += len;
1127 	}
1128 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1129 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1130 	nnz    = 0;
1131 	iia[0] = 0;
1132 	for (i=0; i<na; i++) { /* fill adjacency */
1133 	  cnt = 0;
1134 	  len = ia[i+1] - ia[i];
1135 	  row = ja + ia[i];
1136 	  for (j=0; j<len; j++) {
1137 	    if (row[j] != i) { /* if not diagonal */
1138 	      jja[nnz+cnt++] = row[j];
1139 	    }
1140 	  }
1141 	  nnz += cnt;
1142 	  iia[i+1] = nnz;
1143 	}
1144 	/* Partitioning of the adjacency matrix */
1145 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1146 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1147 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1148 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1149 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1150 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1151 	foundpart = PETSC_TRUE;
1152       }
1153       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1154     }
1155     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1156   }
1157   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1158 
1159   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1160   *outis = is;
1161 
1162   if (!foundpart) {
1163 
1164     /* Partitioning by contiguous chunks of rows */
1165 
1166     PetscInt mbs   = (rend-rstart)/bs;
1167     PetscInt start = rstart;
1168     for (i=0; i<n; i++) {
1169       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1170       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1171       start += count;
1172     }
1173 
1174   } else {
1175 
1176     /* Partitioning by adjacency of diagonal block  */
1177 
1178     const PetscInt *numbering;
1179     PetscInt       *count,nidx,*indices,*newidx,start=0;
1180     /* Get node count in each partition */
1181     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1182     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1183     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1184       for (i=0; i<n; i++) count[i] *= bs;
1185     }
1186     /* Build indices from node numbering */
1187     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1188     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1189     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1190     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1191     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1192     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1193     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1194       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1195       for (i=0; i<nidx; i++)
1196 	for (j=0; j<bs; j++)
1197 	  newidx[i*bs+j] = indices[i]*bs + j;
1198       ierr = PetscFree(indices);CHKERRQ(ierr);
1199       nidx   *= bs;
1200       indices = newidx;
1201     }
1202     /* Shift to get global indices */
1203     for (i=0; i<nidx; i++) indices[i] += rstart;
1204 
1205     /* Build the index sets for each block */
1206     for (i=0; i<n; i++) {
1207       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1208       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1209       start += count[i];
1210     }
1211 
1212     ierr = PetscFree(count);
1213     ierr = PetscFree(indices);
1214     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1215     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1216 
1217   }
1218 
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 #undef __FUNCT__
1223 #define __FUNCT__ "PCASMDestroySubdomains"
1224 /*@C
1225    PCASMDestroySubdomains - Destroys the index sets created with
1226    PCASMCreateSubdomains(). Should be called after setting subdomains
1227    with PCASMSetLocalSubdomains().
1228 
1229    Collective
1230 
1231    Input Parameters:
1232 +  n - the number of index sets
1233 .  is - the array of index sets
1234 -  is_local - the array of local index sets, can be PETSC_NULL
1235 
1236    Level: advanced
1237 
1238 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1239 
1240 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1241 @*/
1242 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1243 {
1244   PetscInt       i;
1245   PetscErrorCode ierr;
1246   PetscFunctionBegin;
1247   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1248   PetscValidPointer(is,2);
1249   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1250   ierr = PetscFree(is);CHKERRQ(ierr);
1251   if (is_local) {
1252     PetscValidPointer(is_local,3);
1253     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1254     ierr = PetscFree(is_local);CHKERRQ(ierr);
1255   }
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 #undef __FUNCT__
1260 #define __FUNCT__ "PCASMCreateSubdomains2D"
1261 /*@
1262    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1263    preconditioner for a two-dimensional problem on a regular grid.
1264 
1265    Not Collective
1266 
1267    Input Parameters:
1268 +  m, n - the number of mesh points in the x and y directions
1269 .  M, N - the number of subdomains in the x and y directions
1270 .  dof - degrees of freedom per node
1271 -  overlap - overlap in mesh lines
1272 
1273    Output Parameters:
1274 +  Nsub - the number of subdomains created
1275 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1276 -  is_local - array of index sets defining non-overlapping subdomains
1277 
1278    Note:
1279    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1280    preconditioners.  More general related routines are
1281    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1282 
1283    Level: advanced
1284 
1285 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1286 
1287 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1288           PCASMSetOverlap()
1289 @*/
1290 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1291 {
1292   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1293   PetscErrorCode ierr;
1294   PetscInt       nidx,*idx,loc,ii,jj,count;
1295 
1296   PetscFunctionBegin;
1297   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1298 
1299   *Nsub = N*M;
1300   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1301   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1302   ystart = 0;
1303   loc_outer = 0;
1304   for (i=0; i<N; i++) {
1305     height = n/N + ((n % N) > i); /* height of subdomain */
1306     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1307     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1308     yright = ystart + height + overlap; if (yright > n) yright = n;
1309     xstart = 0;
1310     for (j=0; j<M; j++) {
1311       width = m/M + ((m % M) > j); /* width of subdomain */
1312       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1313       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1314       xright = xstart + width + overlap; if (xright > m) xright = m;
1315       nidx   = (xright - xleft)*(yright - yleft);
1316       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1317       loc    = 0;
1318       for (ii=yleft; ii<yright; ii++) {
1319         count = m*ii + xleft;
1320         for (jj=xleft; jj<xright; jj++) {
1321           idx[loc++] = count++;
1322         }
1323       }
1324       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1325       if (overlap == 0) {
1326         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1327         (*is_local)[loc_outer] = (*is)[loc_outer];
1328       } else {
1329         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1330           for (jj=xstart; jj<xstart+width; jj++) {
1331             idx[loc++] = m*ii + jj;
1332           }
1333         }
1334         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1335       }
1336       ierr = PetscFree(idx);CHKERRQ(ierr);
1337       xstart += width;
1338       loc_outer++;
1339     }
1340     ystart += height;
1341   }
1342   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 #undef __FUNCT__
1347 #define __FUNCT__ "PCASMGetLocalSubdomains"
1348 /*@C
1349     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1350     only) for the additive Schwarz preconditioner.
1351 
1352     Not Collective
1353 
1354     Input Parameter:
1355 .   pc - the preconditioner context
1356 
1357     Output Parameters:
1358 +   n - the number of subdomains for this processor (default value = 1)
1359 .   is - the index sets that define the subdomains for this processor
1360 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1361 
1362 
1363     Notes:
1364     The IS numbering is in the parallel, global numbering of the vector.
1365 
1366     Level: advanced
1367 
1368 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1369 
1370 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1371           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1372 @*/
1373 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1374 {
1375   PC_ASM         *osm;
1376   PetscErrorCode ierr;
1377   PetscBool      match;
1378 
1379   PetscFunctionBegin;
1380   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1381   PetscValidIntPointer(n,2);
1382   if (is) PetscValidPointer(is,3);
1383   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1384   if (!match) {
1385     if (n)  *n  = 0;
1386     if (is) *is = PETSC_NULL;
1387   } else {
1388     osm = (PC_ASM*)pc->data;
1389     if (n)  *n  = osm->n_local_true;
1390     if (is) *is = osm->is;
1391     if (is_local) *is_local = osm->is_local;
1392   }
1393   PetscFunctionReturn(0);
1394 }
1395 
1396 #undef __FUNCT__
1397 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1398 /*@C
1399     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1400     only) for the additive Schwarz preconditioner.
1401 
1402     Not Collective
1403 
1404     Input Parameter:
1405 .   pc - the preconditioner context
1406 
1407     Output Parameters:
1408 +   n - the number of matrices for this processor (default value = 1)
1409 -   mat - the matrices
1410 
1411 
1412     Level: advanced
1413 
1414 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1415 
1416 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1417           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1418 @*/
1419 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1420 {
1421   PC_ASM         *osm;
1422   PetscErrorCode ierr;
1423   PetscBool      match;
1424 
1425   PetscFunctionBegin;
1426   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1427   PetscValidIntPointer(n,2);
1428   if (mat) PetscValidPointer(mat,3);
1429   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1430   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1431   if (!match) {
1432     if (n)   *n   = 0;
1433     if (mat) *mat = PETSC_NULL;
1434   } else {
1435     osm = (PC_ASM*)pc->data;
1436     if (n)   *n   = osm->n_local_true;
1437     if (mat) *mat = osm->pmat;
1438   }
1439   PetscFunctionReturn(0);
1440 }
1441