xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 6849ba73f22fecb8f92ef896a42e4e8bd4cd6965)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   int        n,n_local,n_local_true;
16   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
17   int        overlap;             /* overlap requested by user */
18   KSP       *ksp;               /* linear solvers for each block */
19   VecScatter *scat;               /* mapping to subregion */
20   Vec        *x,*y;
21   IS         *is;                 /* index set that defines each subdomain */
22   Mat        *mat,*pmat;          /* mat is not currently used */
23   PCASMType  type;                /* use reduced interpolation, restriction or both */
24   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
25   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
26   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
27                                      PCSetUpOnBlocks() is done. Similar to inplace
28                                      factorization in the case of LU and ILU */
29 } PC_ASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCView_ASM"
33 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
34 {
35   PC_ASM      *jac = (PC_ASM*)pc->data;
36   PetscErrorCode ierr;
37   int         rank,i;
38   const char  *cstring = 0;
39   PetscTruth  iascii,isstring;
40   PetscViewer sviewer;
41 
42 
43   PetscFunctionBegin;
44   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
46   if (iascii) {
47     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %d, amount of overlap = %d\n",jac->n,jac->overlap);CHKERRQ(ierr);
48     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
49     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
50     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
51     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
52     else                                      cstring = "Unknown ASM type";
53     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
55     if (jac->same_local_solves) {
56       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
57       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
58       if (!rank && jac->ksp) {
59         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
60         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
61         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
62       }
63       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
64     } else {
65       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
66       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d\n",rank,jac->n_local);CHKERRQ(ierr);
67       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
68       for (i=0; i<jac->n_local; i++) {
69         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr);
70         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
71         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
72         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i != jac->n_local-1) {
74           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
75         }
76       }
77       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
79     }
80   } else if (isstring) {
81     ierr = PetscViewerStringSPrintf(viewer," blks=%d, overlap=%d, type=%d",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
82     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
83       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
84     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
85   } else {
86     SETERRQ1(1,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
87   }
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "PCSetUp_ASM"
93 static PetscErrorCode PCSetUp_ASM(PC pc)
94 {
95   PC_ASM   *osm  = (PC_ASM*)pc->data;
96   PetscErrorCode ierr;
97   int      i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
98   int      start,start_val,end_val,size,sz,bs;
99   MatReuse scall = MAT_REUSE_MATRIX;
100   IS       isl;
101   KSP      ksp;
102   PC       subpc;
103   char     *prefix,*pprefix;
104   Vec      vec;
105 
106   PetscFunctionBegin;
107   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
108   if (!pc->setupcalled) {
109     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
110       /* no subdomains given, use one per processor */
111       osm->n_local_true = osm->n_local = 1;
112       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
113       osm->n = size;
114     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
115       int inwork[2],outwork[2];
116       inwork[0] = inwork[1] = osm->n_local_true;
117       ierr = MPI_Allreduce(inwork,outwork,1,MPI_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
118       osm->n_local = outwork[0];
119       osm->n       = outwork[1];
120     }
121     n_local      = osm->n_local;
122     n_local_true = osm->n_local_true;
123     if (!osm->is){ /* build the index sets */
124       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
125       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
126       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
127       sz    = end_val - start_val;
128       start = start_val;
129       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
130         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
131       }
132       for (i=0; i<n_local_true; i++){
133         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
134         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
135         start      += size;
136         osm->is[i] =  isl;
137       }
138       osm->is_flg = PETSC_TRUE;
139     }
140 
141     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
142     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
143     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
144     osm->y = osm->x + n_local;
145 
146     /*  Extend the "overlapping" regions by a number of steps  */
147     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
148     for (i=0; i<n_local_true; i++) {
149       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
150     }
151 
152     /* create the local work vectors and scatter contexts */
153     for (i=0; i<n_local_true; i++) {
154       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
155       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
156       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
157       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
158       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
159       ierr = ISDestroy(isl);CHKERRQ(ierr);
160     }
161     for (i=n_local_true; i<n_local; i++) {
162       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
163       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
164       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
165       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
166       ierr = ISDestroy(isl);CHKERRQ(ierr);
167     }
168 
169    /*
170        Create the local solvers.
171     */
172     for (i=0; i<n_local_true; i++) {
173       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
174       PetscLogObjectParent(pc,ksp);
175       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
176       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
177       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
178       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
179       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
180       osm->ksp[i] = ksp;
181     }
182     scall = MAT_INITIAL_MATRIX;
183   } else {
184     /*
185        Destroy the blocks from the previous iteration
186     */
187     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
188       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
189       scall = MAT_INITIAL_MATRIX;
190     }
191   }
192 
193   /* extract out the submatrices */
194   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
195 
196   /* Return control to the user so that the submatrices can be modified (e.g., to apply
197      different boundary conditions for the submatrices than for the global problem) */
198   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
199 
200   /* loop over subdomains putting them into local ksp */
201   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
202   for (i=0; i<n_local_true; i++) {
203     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
204     PetscLogObjectParent(pc,osm->pmat[i]);
205     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
206     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
207   }
208   ierr = VecDestroy(vec);CHKERRQ(ierr);
209   PetscFunctionReturn(0);
210 }
211 
212 #undef __FUNCT__
213 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
214 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
215 {
216   PC_ASM *osm = (PC_ASM*)pc->data;
217   PetscErrorCode ierr;
218   int    i;
219 
220   PetscFunctionBegin;
221   for (i=0; i<osm->n_local_true; i++) {
222     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
223   }
224   /*
225      If inplace flag is set, then destroy the matrix after the setup
226      on blocks is done.
227   */
228   if (osm->inplace && osm->n_local_true > 0) {
229     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
230   }
231   PetscFunctionReturn(0);
232 }
233 
234 #undef __FUNCT__
235 #define __FUNCT__ "PCApply_ASM"
236 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
237 {
238   PC_ASM      *osm = (PC_ASM*)pc->data;
239   PetscErrorCode ierr;
240   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true;
241   PetscScalar zero = 0.0;
242   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
243 
244   PetscFunctionBegin;
245   /*
246        Support for limiting the restriction or interpolation to only local
247      subdomain values (leaving the other values 0).
248   */
249   if (!(osm->type & PC_ASM_RESTRICT)) {
250     forward = SCATTER_FORWARD_LOCAL;
251     /* have to zero the work RHS since scatter may leave some slots empty */
252     for (i=0; i<n_local; i++) {
253       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
254     }
255   }
256   if (!(osm->type & PC_ASM_INTERPOLATE)) {
257     reverse = SCATTER_REVERSE_LOCAL;
258   }
259 
260   for (i=0; i<n_local; i++) {
261     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
262   }
263   ierr = VecSet(&zero,y);CHKERRQ(ierr);
264   /* do the local solves */
265   for (i=0; i<n_local_true; i++) {
266     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
267     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
268     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
269   }
270   /* handle the rest of the scatters that do not have local solves */
271   for (i=n_local_true; i<n_local; i++) {
272     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
273     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
274   }
275   for (i=0; i<n_local; i++) {
276     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
277   }
278   PetscFunctionReturn(0);
279 }
280 
281 #undef __FUNCT__
282 #define __FUNCT__ "PCApplyTranspose_ASM"
283 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
284 {
285   PC_ASM      *osm = (PC_ASM*)pc->data;
286   PetscErrorCode ierr;
287   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true;
288   PetscScalar zero = 0.0;
289   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
290 
291   PetscFunctionBegin;
292   /*
293        Support for limiting the restriction or interpolation to only local
294      subdomain values (leaving the other values 0).
295 
296        Note: these are reversed from the PCApply_ASM() because we are applying the
297      transpose of the three terms
298   */
299   if (!(osm->type & PC_ASM_INTERPOLATE)) {
300     forward = SCATTER_FORWARD_LOCAL;
301     /* have to zero the work RHS since scatter may leave some slots empty */
302     for (i=0; i<n_local; i++) {
303       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
304     }
305   }
306   if (!(osm->type & PC_ASM_RESTRICT)) {
307     reverse = SCATTER_REVERSE_LOCAL;
308   }
309 
310   for (i=0; i<n_local; i++) {
311     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
312   }
313   ierr = VecSet(&zero,y);CHKERRQ(ierr);
314   /* do the local solves */
315   for (i=0; i<n_local_true; i++) {
316     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
317     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
318     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
319   }
320   /* handle the rest of the scatters that do not have local solves */
321   for (i=n_local_true; i<n_local; i++) {
322     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
323     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
324   }
325   for (i=0; i<n_local; i++) {
326     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
327   }
328   PetscFunctionReturn(0);
329 }
330 
331 #undef __FUNCT__
332 #define __FUNCT__ "PCDestroy_ASM"
333 static PetscErrorCode PCDestroy_ASM(PC pc)
334 {
335   PC_ASM *osm = (PC_ASM*)pc->data;
336   PetscErrorCode ierr;
337   int    i;
338 
339   PetscFunctionBegin;
340   for (i=0; i<osm->n_local; i++) {
341     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
342     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
343     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
344   }
345   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
346     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
347   }
348   if (osm->ksp) {
349     for (i=0; i<osm->n_local_true; i++) {
350       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
351     }
352   }
353   if (osm->is_flg) {
354     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
355     ierr = PetscFree(osm->is);CHKERRQ(ierr);
356   }
357   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
358   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
359   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
360   ierr = PetscFree(osm);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "PCSetFromOptions_ASM"
366 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
367 {
368   PC_ASM     *osm = (PC_ASM*)pc->data;
369   PetscErrorCode ierr;
370   int        blocks,ovl,indx;
371   PetscTruth flg,set,sym;
372   const char *type[] = {"none","restrict","interpolate","basic"};
373 
374   PetscFunctionBegin;
375   /* set the type to symmetric if matrix is symmetric */
376   if (pc->pmat && !osm->type_set) {
377     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
378     if (set && sym) {
379       osm->type = PC_ASM_BASIC;
380     }
381   }
382   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
383     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
384     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
385     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
386     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
387     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
388     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
389     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
390     if (flg) {
391       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
392     }
393   ierr = PetscOptionsTail();CHKERRQ(ierr);
394   PetscFunctionReturn(0);
395 }
396 
397 /*------------------------------------------------------------------------------------*/
398 
399 EXTERN_C_BEGIN
400 #undef __FUNCT__
401 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
402 PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,int n,IS is[])
403 {
404   PC_ASM *osm = (PC_ASM*)pc->data;
405 
406   PetscFunctionBegin;
407   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
408 
409   if (pc->setupcalled && n != osm->n_local_true) {
410     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
411   }
412   if (!pc->setupcalled){
413     osm->n_local_true = n;
414     osm->is           = is;
415   }
416   PetscFunctionReturn(0);
417 }
418 EXTERN_C_END
419 
420 EXTERN_C_BEGIN
421 #undef __FUNCT__
422 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
423 PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,int N,IS *is)
424 {
425   PC_ASM *osm = (PC_ASM*)pc->data;
426   PetscErrorCode ierr;
427   int    rank,size,n;
428 
429   PetscFunctionBegin;
430 
431   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
432 they cannot be set globally yet.");
433 
434   /*
435      Split the subdomains equally amoung all processors
436   */
437   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
438   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
439   n = N/size + ((N % size) > rank);
440   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
441   if (!pc->setupcalled){
442     osm->n_local_true = n;
443     osm->is           = 0;
444   }
445   PetscFunctionReturn(0);
446 }
447 EXTERN_C_END
448 
449 EXTERN_C_BEGIN
450 #undef __FUNCT__
451 #define __FUNCT__ "PCASMSetOverlap_ASM"
452 PetscErrorCode PCASMSetOverlap_ASM(PC pc,int ovl)
453 {
454   PC_ASM *osm;
455 
456   PetscFunctionBegin;
457   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
458 
459   osm               = (PC_ASM*)pc->data;
460   osm->overlap      = ovl;
461   PetscFunctionReturn(0);
462 }
463 EXTERN_C_END
464 
465 EXTERN_C_BEGIN
466 #undef __FUNCT__
467 #define __FUNCT__ "PCASMSetType_ASM"
468 PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
469 {
470   PC_ASM *osm;
471 
472   PetscFunctionBegin;
473   osm        = (PC_ASM*)pc->data;
474   osm->type     = type;
475   osm->type_set = PETSC_TRUE;
476   PetscFunctionReturn(0);
477 }
478 EXTERN_C_END
479 
480 EXTERN_C_BEGIN
481 #undef __FUNCT__
482 #define __FUNCT__ "PCASMGetSubKSP_ASM"
483 PetscErrorCode PCASMGetSubKSP_ASM(PC pc,int *n_local,int *first_local,KSP **ksp)
484 {
485   PC_ASM   *jac = (PC_ASM*)pc->data;
486   PetscErrorCode ierr;
487 
488   PetscFunctionBegin;
489   if (jac->n_local_true < 0) {
490     SETERRQ(1,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
491   }
492 
493   if (n_local)     *n_local     = jac->n_local_true;
494   if (first_local) {
495     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
496     *first_local -= jac->n_local_true;
497   }
498   *ksp                         = jac->ksp;
499   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
500                                       not necessarily true though!  This flag is
501                                       used only for PCView_ASM() */
502   PetscFunctionReturn(0);
503 }
504 EXTERN_C_END
505 
506 EXTERN_C_BEGIN
507 #undef __FUNCT__
508 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
509 PetscErrorCode PCASMSetUseInPlace_ASM(PC pc)
510 {
511   PC_ASM *dir;
512 
513   PetscFunctionBegin;
514   dir          = (PC_ASM*)pc->data;
515   dir->inplace = PETSC_TRUE;
516   PetscFunctionReturn(0);
517 }
518 EXTERN_C_END
519 
520 /*----------------------------------------------------------------------------*/
521 #undef __FUNCT__
522 #define __FUNCT__ "PCASMSetUseInPlace"
523 /*@
524    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
525 
526    Collective on PC
527 
528    Input Parameters:
529 .  pc - the preconditioner context
530 
531    Options Database Key:
532 .  -pc_asm_in_place - Activates in-place factorization
533 
534    Note:
535    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
536    when the original matrix is not required during the Solve process.
537    This destroys the matrix, early thus, saving on memory usage.
538 
539    Level: intermediate
540 
541 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
542 
543 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
544 @*/
545 PetscErrorCode PCASMSetUseInPlace(PC pc)
546 {
547   PetscErrorCode ierr,(*f)(PC);
548 
549   PetscFunctionBegin;
550   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
551   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
552   if (f) {
553     ierr = (*f)(pc);CHKERRQ(ierr);
554   }
555   PetscFunctionReturn(0);
556 }
557 /*----------------------------------------------------------------------------*/
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "PCASMSetLocalSubdomains"
561 /*@C
562     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
563     only) for the additive Schwarz preconditioner.
564 
565     Collective on PC
566 
567     Input Parameters:
568 +   pc - the preconditioner context
569 .   n - the number of subdomains for this processor (default value = 1)
570 -   is - the index sets that define the subdomains for this processor
571          (or PETSC_NULL for PETSc to determine subdomains)
572 
573     Notes:
574     The IS numbering is in the parallel, global numbering of the vector.
575 
576     By default the ASM preconditioner uses 1 block per processor.
577 
578     These index sets cannot be destroyed until after completion of the
579     linear solves for which the ASM preconditioner is being used.
580 
581     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
582 
583     Level: advanced
584 
585 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
586 
587 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
588           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
589 @*/
590 PetscErrorCode PCASMSetLocalSubdomains(PC pc,int n,IS is[])
591 {
592   PetscErrorCode ierr,(*f)(PC,int,IS[]);
593 
594   PetscFunctionBegin;
595   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
596   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
597   if (f) {
598     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
599   }
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "PCASMSetTotalSubdomains"
605 /*@C
606     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
607     additive Schwarz preconditioner.  Either all or no processors in the
608     PC communicator must call this routine, with the same index sets.
609 
610     Collective on PC
611 
612     Input Parameters:
613 +   pc - the preconditioner context
614 .   n - the number of subdomains for all processors
615 -   is - the index sets that define the subdomains for all processor
616          (or PETSC_NULL for PETSc to determine subdomains)
617 
618     Options Database Key:
619     To set the total number of subdomain blocks rather than specify the
620     index sets, use the option
621 .    -pc_asm_blocks <blks> - Sets total blocks
622 
623     Notes:
624     Currently you cannot use this to set the actual subdomains with the argument is.
625 
626     By default the ASM preconditioner uses 1 block per processor.
627 
628     These index sets cannot be destroyed until after completion of the
629     linear solves for which the ASM preconditioner is being used.
630 
631     Use PCASMSetLocalSubdomains() to set local subdomains.
632 
633     Level: advanced
634 
635 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
636 
637 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
638           PCASMCreateSubdomains2D()
639 @*/
640 PetscErrorCode PCASMSetTotalSubdomains(PC pc,int N,IS *is)
641 {
642   PetscErrorCode ierr,(*f)(PC,int,IS *);
643 
644   PetscFunctionBegin;
645   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
646   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
647   if (f) {
648     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
649   }
650   PetscFunctionReturn(0);
651 }
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "PCASMSetOverlap"
655 /*@
656     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
657     additive Schwarz preconditioner.  Either all or no processors in the
658     PC communicator must call this routine.
659 
660     Collective on PC
661 
662     Input Parameters:
663 +   pc  - the preconditioner context
664 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
665 
666     Options Database Key:
667 .   -pc_asm_overlap <ovl> - Sets overlap
668 
669     Notes:
670     By default the ASM preconditioner uses 1 block per processor.  To use
671     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
672     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
673 
674     The overlap defaults to 1, so if one desires that no additional
675     overlap be computed beyond what may have been set with a call to
676     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
677     must be set to be 0.  In particular, if one does not explicitly set
678     the subdomains an application code, then all overlap would be computed
679     internally by PETSc, and using an overlap of 0 would result in an ASM
680     variant that is equivalent to the block Jacobi preconditioner.
681 
682     Note that one can define initial index sets with any overlap via
683     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
684     PCASMSetOverlap() merely allows PETSc to extend that overlap further
685     if desired.
686 
687     Level: intermediate
688 
689 .keywords: PC, ASM, set, overlap
690 
691 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
692           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
693 @*/
694 PetscErrorCode PCASMSetOverlap(PC pc,int ovl)
695 {
696   PetscErrorCode ierr,(*f)(PC,int);
697 
698   PetscFunctionBegin;
699   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
700   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
701   if (f) {
702     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
703   }
704   PetscFunctionReturn(0);
705 }
706 
707 #undef __FUNCT__
708 #define __FUNCT__ "PCASMSetType"
709 /*@
710     PCASMSetType - Sets the type of restriction and interpolation used
711     for local problems in the additive Schwarz method.
712 
713     Collective on PC
714 
715     Input Parameters:
716 +   pc  - the preconditioner context
717 -   type - variant of ASM, one of
718 .vb
719       PC_ASM_BASIC       - full interpolation and restriction
720       PC_ASM_RESTRICT    - full restriction, local processor interpolation
721       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
722       PC_ASM_NONE        - local processor restriction and interpolation
723 .ve
724 
725     Options Database Key:
726 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
727 
728     Level: intermediate
729 
730 .keywords: PC, ASM, set, type
731 
732 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
733           PCASMCreateSubdomains2D()
734 @*/
735 PetscErrorCode PCASMSetType(PC pc,PCASMType type)
736 {
737   PetscErrorCode ierr,(*f)(PC,PCASMType);
738 
739   PetscFunctionBegin;
740   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
741   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
742   if (f) {
743     ierr = (*f)(pc,type);CHKERRQ(ierr);
744   }
745   PetscFunctionReturn(0);
746 }
747 
748 #undef __FUNCT__
749 #define __FUNCT__ "PCASMGetSubKSP"
750 /*@C
751    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
752    this processor.
753 
754    Collective on PC iff first_local is requested
755 
756    Input Parameter:
757 .  pc - the preconditioner context
758 
759    Output Parameters:
760 +  n_local - the number of blocks on this processor or PETSC_NULL
761 .  first_local - the global number of the first block on this processor or PETSC_NULL,
762                  all processors must request or all must pass PETSC_NULL
763 -  ksp - the array of KSP contexts
764 
765    Note:
766    After PCASMGetSubKSP() the array of KSPes is not to be freed
767 
768    Currently for some matrix implementations only 1 block per processor
769    is supported.
770 
771    You must call KSPSetUp() before calling PCASMGetSubKSP().
772 
773    Level: advanced
774 
775 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
776 
777 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
778           PCASMCreateSubdomains2D(),
779 @*/
780 PetscErrorCode PCASMGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[])
781 {
782   PetscErrorCode ierr,(*f)(PC,int*,int*,KSP **);
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
786   PetscValidIntPointer(n_local,2);
787   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
788   if (f) {
789     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
790   } else {
791     SETERRQ(1,"Cannot get subksp for this type of PC");
792   }
793 
794  PetscFunctionReturn(0);
795 }
796 
797 /* -------------------------------------------------------------------------------------*/
798 /*MC
799    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
800            its own KSP object.
801 
802    Options Database Keys:
803 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
804 .  -pc_asm_in_place - Activates in-place factorization
805 .  -pc_asm_blocks <blks> - Sets total blocks
806 .  -pc_asm_overlap <ovl> - Sets overlap
807 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
808 
809      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
810       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
811       -pc_asm_type basic to use the standard ASM.
812 
813    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
814      than one processor. Defaults to one block per processor.
815 
816      To set options on the solvers for each block append -sub_ to all the KSP, and PC
817         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
818 
819      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
820          and set the options directly on the resulting KSP object (you can access its PC
821          with KSPGetPC())
822 
823 
824    Level: beginner
825 
826    Concepts: additive Schwarz method
827 
828 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
829            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
830            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
831            PCASMSetUseInPlace()
832 M*/
833 
834 EXTERN_C_BEGIN
835 #undef __FUNCT__
836 #define __FUNCT__ "PCCreate_ASM"
837 PetscErrorCode PCCreate_ASM(PC pc)
838 {
839   PetscErrorCode ierr;
840   PC_ASM *osm;
841 
842   PetscFunctionBegin;
843   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
844   PetscLogObjectMemory(pc,sizeof(PC_ASM));
845   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
846   osm->n                 = PETSC_DECIDE;
847   osm->n_local           = 0;
848   osm->n_local_true      = PETSC_DECIDE;
849   osm->overlap           = 1;
850   osm->is_flg            = PETSC_FALSE;
851   osm->ksp              = 0;
852   osm->scat              = 0;
853   osm->is                = 0;
854   osm->mat               = 0;
855   osm->pmat              = 0;
856   osm->type              = PC_ASM_RESTRICT;
857   osm->same_local_solves = PETSC_TRUE;
858   osm->inplace           = PETSC_FALSE;
859   pc->data               = (void*)osm;
860 
861   pc->ops->apply             = PCApply_ASM;
862   pc->ops->applytranspose    = PCApplyTranspose_ASM;
863   pc->ops->setup             = PCSetUp_ASM;
864   pc->ops->destroy           = PCDestroy_ASM;
865   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
866   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
867   pc->ops->view              = PCView_ASM;
868   pc->ops->applyrichardson   = 0;
869 
870   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
871                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
873                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
874   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
875                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
876   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
877                     PCASMSetType_ASM);CHKERRQ(ierr);
878   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
879                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
880 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
881                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 EXTERN_C_END
885 
886 
887 #undef __FUNCT__
888 #define __FUNCT__ "PCASMCreateSubdomains2D"
889 /*@
890    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
891    preconditioner for a two-dimensional problem on a regular grid.
892 
893    Not Collective
894 
895    Input Parameters:
896 +  m, n - the number of mesh points in the x and y directions
897 .  M, N - the number of subdomains in the x and y directions
898 .  dof - degrees of freedom per node
899 -  overlap - overlap in mesh lines
900 
901    Output Parameters:
902 +  Nsub - the number of subdomains created
903 -  is - the array of index sets defining the subdomains
904 
905    Note:
906    Presently PCAMSCreateSubdomains2d() is valid only for sequential
907    preconditioners.  More general related routines are
908    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
909 
910    Level: advanced
911 
912 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
913 
914 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
915           PCASMSetOverlap()
916 @*/
917 PetscErrorCode PCASMCreateSubdomains2D(int m,int n,int M,int N,int dof,int overlap,int *Nsub,IS **is)
918 {
919   int i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
920   PetscErrorCode ierr;
921   int nidx,*idx,loc,ii,jj,count;
922 
923   PetscFunctionBegin;
924   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
925 
926   *Nsub = N*M;
927   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
928   ystart = 0;
929   loc_outter = 0;
930   for (i=0; i<N; i++) {
931     height = n/N + ((n % N) > i); /* height of subdomain */
932     if (height < 2) SETERRQ(1,"Too many N subdomains for mesh dimension n");
933     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
934     yright = ystart + height + overlap; if (yright > n) yright = n;
935     xstart = 0;
936     for (j=0; j<M; j++) {
937       width = m/M + ((m % M) > j); /* width of subdomain */
938       if (width < 2) SETERRQ(1,"Too many M subdomains for mesh dimension m");
939       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
940       xright = xstart + width + overlap; if (xright > m) xright = m;
941       nidx   = (xright - xleft)*(yright - yleft);
942       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
943       loc    = 0;
944       for (ii=yleft; ii<yright; ii++) {
945         count = m*ii + xleft;
946         for (jj=xleft; jj<xright; jj++) {
947           idx[loc++] = count++;
948         }
949       }
950       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
951       ierr = PetscFree(idx);CHKERRQ(ierr);
952       xstart += width;
953     }
954     ystart += height;
955   }
956   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
957   PetscFunctionReturn(0);
958 }
959 
960 #undef __FUNCT__
961 #define __FUNCT__ "PCASMGetLocalSubdomains"
962 /*@C
963     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
964     only) for the additive Schwarz preconditioner.
965 
966     Collective on PC
967 
968     Input Parameter:
969 .   pc - the preconditioner context
970 
971     Output Parameters:
972 +   n - the number of subdomains for this processor (default value = 1)
973 -   is - the index sets that define the subdomains for this processor
974 
975 
976     Notes:
977     The IS numbering is in the parallel, global numbering of the vector.
978 
979     Level: advanced
980 
981 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
982 
983 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
984           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
985 @*/
986 PetscErrorCode PCASMGetLocalSubdomains(PC pc,int *n,IS *is[])
987 {
988   PC_ASM *osm;
989 
990   PetscFunctionBegin;
991   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
992   PetscValidIntPointer(n,2);
993   if (!pc->setupcalled) {
994     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
995   }
996 
997   osm = (PC_ASM*)pc->data;
998   if (n)  *n = osm->n_local_true;
999   if (is) *is = osm->is;
1000   PetscFunctionReturn(0);
1001 }
1002 
1003 #undef __FUNCT__
1004 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1005 /*@C
1006     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1007     only) for the additive Schwarz preconditioner.
1008 
1009     Collective on PC
1010 
1011     Input Parameter:
1012 .   pc - the preconditioner context
1013 
1014     Output Parameters:
1015 +   n - the number of matrices for this processor (default value = 1)
1016 -   mat - the matrices
1017 
1018 
1019     Level: advanced
1020 
1021 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1022 
1023 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1024           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1025 @*/
1026 PetscErrorCode PCASMGetLocalSubmatrices(PC pc,int *n,Mat *mat[])
1027 {
1028   PC_ASM *osm;
1029 
1030   PetscFunctionBegin;
1031   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1032   PetscValidPointer(n,2);
1033   if (!pc->setupcalled) {
1034     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1035   }
1036 
1037   osm = (PC_ASM*)pc->data;
1038   if (n)   *n   = osm->n_local_true;
1039   if (mat) *mat = osm->pmat;
1040   PetscFunctionReturn(0);
1041 }
1042 
1043