xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision d41222bbcdea31b88e23614a3c2b1a0fe84fa572)
1 /*$Id: asm.c,v 1.131 2001/08/07 03:03:38 balay Exp $*/
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   int        n,n_local,n_local_true;
17   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
18   int        overlap;             /* overlap requested by user */
19   KSP       *ksp;               /* linear solvers for each block */
20   VecScatter *scat;               /* mapping to subregion */
21   Vec        *x,*y;
22   IS         *is;                 /* index set that defines each subdomain */
23   Mat        *mat,*pmat;          /* mat is not currently used */
24   PCASMType  type;                /* use reduced interpolation, restriction or both */
25   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
26   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
27   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
28                                      PCSetUpOnBlocks() is done. Similar to inplace
29                                      factorization in the case of LU and ILU */
30 } PC_ASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_ASM"
34 static int PCView_ASM(PC pc,PetscViewer viewer)
35 {
36   PC_ASM      *jac = (PC_ASM*)pc->data;
37   int         rank,ierr,i;
38   const char  *cstring = 0;
39   PetscTruth  isascii,isstring;
40   PetscViewer sviewer;
41 
42 
43   PetscFunctionBegin;
44   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %d, amount of overlap = %d\n",jac->n,jac->overlap);CHKERRQ(ierr);
48     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
49     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
50     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
51     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
52     else                                      cstring = "Unknown ASM type";
53     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
55     if (jac->same_local_solves) {
56       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
57       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
58       if (!rank && jac->ksp) {
59         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
60         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
61         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
62       }
63       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
64     } else {
65       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
66       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d\n",rank,jac->n_local);CHKERRQ(ierr);
67       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
68       for (i=0; i<jac->n_local; i++) {
69         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr);
70         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
71         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
72         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i != jac->n_local-1) {
74           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
75         }
76       }
77       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
79     }
80   } else if (isstring) {
81     ierr = PetscViewerStringSPrintf(viewer," blks=%d, overlap=%d, type=%d",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
82     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
83       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
84     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
85   } else {
86     SETERRQ1(1,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
87   }
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "PCSetUp_ASM"
93 static int PCSetUp_ASM(PC pc)
94 {
95   PC_ASM   *osm  = (PC_ASM*)pc->data;
96   int      i,ierr,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
97   int      start,start_val,end_val,size,sz,bs;
98   MatReuse scall = MAT_REUSE_MATRIX;
99   IS       isl;
100   KSP      ksp;
101   PC       subpc;
102   char     *prefix,*pprefix;
103   Vec      vec;
104 
105   PetscFunctionBegin;
106   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
107   if (!pc->setupcalled) {
108     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
109       /* no subdomains given, use one per processor */
110       osm->n_local_true = osm->n_local = 1;
111       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
112       osm->n = size;
113     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
114       int inwork[2],outwork[2];
115       inwork[0] = inwork[1] = osm->n_local_true;
116       ierr = MPI_Allreduce(inwork,outwork,1,MPI_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
117       osm->n_local = outwork[0];
118       osm->n       = outwork[1];
119     }
120     n_local      = osm->n_local;
121     n_local_true = osm->n_local_true;
122     if (!osm->is){ /* build the index sets */
123       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
124       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
125       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
126       sz    = end_val - start_val;
127       start = start_val;
128       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
129         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
130       }
131       for (i=0; i<n_local_true; i++){
132         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
133         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
134         start      += size;
135         osm->is[i] =  isl;
136       }
137       osm->is_flg = PETSC_TRUE;
138     }
139 
140     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
141     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
142     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
143     osm->y = osm->x + n_local;
144 
145     /*  Extend the "overlapping" regions by a number of steps  */
146     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
147     for (i=0; i<n_local_true; i++) {
148       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
149     }
150 
151     /* create the local work vectors and scatter contexts */
152     for (i=0; i<n_local_true; i++) {
153       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
154       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
155       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
156       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
157       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
158       ierr = ISDestroy(isl);CHKERRQ(ierr);
159     }
160     for (i=n_local_true; i<n_local; i++) {
161       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
162       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
163       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
164       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
165       ierr = ISDestroy(isl);CHKERRQ(ierr);
166     }
167 
168    /*
169        Create the local solvers.
170     */
171     for (i=0; i<n_local_true; i++) {
172       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
173       PetscLogObjectParent(pc,ksp);
174       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
175       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
176       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
177       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
178       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
179       osm->ksp[i] = ksp;
180     }
181     scall = MAT_INITIAL_MATRIX;
182   } else {
183     /*
184        Destroy the blocks from the previous iteration
185     */
186     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
187       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
188       scall = MAT_INITIAL_MATRIX;
189     }
190   }
191 
192   /* extract out the submatrices */
193   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
194 
195   /* Return control to the user so that the submatrices can be modified (e.g., to apply
196      different boundary conditions for the submatrices than for the global problem) */
197   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
198 
199   /* loop over subdomains putting them into local ksp */
200   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
201   for (i=0; i<n_local_true; i++) {
202     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
203     PetscLogObjectParent(pc,osm->pmat[i]);
204     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
205     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
206   }
207   ierr = VecDestroy(vec);CHKERRQ(ierr);
208   PetscFunctionReturn(0);
209 }
210 
211 #undef __FUNCT__
212 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
213 static int PCSetUpOnBlocks_ASM(PC pc)
214 {
215   PC_ASM *osm = (PC_ASM*)pc->data;
216   int    i,ierr;
217 
218   PetscFunctionBegin;
219   for (i=0; i<osm->n_local_true; i++) {
220     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
221   }
222   /*
223      If inplace flag is set, then destroy the matrix after the setup
224      on blocks is done.
225   */
226   if (osm->inplace && osm->n_local_true > 0) {
227     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
228   }
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "PCApply_ASM"
234 static int PCApply_ASM(PC pc,Vec x,Vec y)
235 {
236   PC_ASM      *osm = (PC_ASM*)pc->data;
237   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
238   PetscScalar zero = 0.0;
239   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
240 
241   PetscFunctionBegin;
242   /*
243        Support for limiting the restriction or interpolation to only local
244      subdomain values (leaving the other values 0).
245   */
246   if (!(osm->type & PC_ASM_RESTRICT)) {
247     forward = SCATTER_FORWARD_LOCAL;
248     /* have to zero the work RHS since scatter may leave some slots empty */
249     for (i=0; i<n_local; i++) {
250       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
251     }
252   }
253   if (!(osm->type & PC_ASM_INTERPOLATE)) {
254     reverse = SCATTER_REVERSE_LOCAL;
255   }
256 
257   for (i=0; i<n_local; i++) {
258     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
259   }
260   ierr = VecSet(&zero,y);CHKERRQ(ierr);
261   /* do the local solves */
262   for (i=0; i<n_local_true; i++) {
263     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
264     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
265     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
266   }
267   /* handle the rest of the scatters that do not have local solves */
268   for (i=n_local_true; i<n_local; i++) {
269     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
270     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
271   }
272   for (i=0; i<n_local; i++) {
273     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
274   }
275   PetscFunctionReturn(0);
276 }
277 
278 #undef __FUNCT__
279 #define __FUNCT__ "PCApplyTranspose_ASM"
280 static int PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
281 {
282   PC_ASM      *osm = (PC_ASM*)pc->data;
283   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
284   PetscScalar zero = 0.0;
285   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
286 
287   PetscFunctionBegin;
288   /*
289        Support for limiting the restriction or interpolation to only local
290      subdomain values (leaving the other values 0).
291 
292        Note: these are reversed from the PCApply_ASM() because we are applying the
293      transpose of the three terms
294   */
295   if (!(osm->type & PC_ASM_INTERPOLATE)) {
296     forward = SCATTER_FORWARD_LOCAL;
297     /* have to zero the work RHS since scatter may leave some slots empty */
298     for (i=0; i<n_local; i++) {
299       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
300     }
301   }
302   if (!(osm->type & PC_ASM_RESTRICT)) {
303     reverse = SCATTER_REVERSE_LOCAL;
304   }
305 
306   for (i=0; i<n_local; i++) {
307     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
308   }
309   ierr = VecSet(&zero,y);CHKERRQ(ierr);
310   /* do the local solves */
311   for (i=0; i<n_local_true; i++) {
312     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
313     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
314     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
315   }
316   /* handle the rest of the scatters that do not have local solves */
317   for (i=n_local_true; i<n_local; i++) {
318     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
319     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
320   }
321   for (i=0; i<n_local; i++) {
322     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
323   }
324   PetscFunctionReturn(0);
325 }
326 
327 #undef __FUNCT__
328 #define __FUNCT__ "PCDestroy_ASM"
329 static int PCDestroy_ASM(PC pc)
330 {
331   PC_ASM *osm = (PC_ASM*)pc->data;
332   int    i,ierr;
333 
334   PetscFunctionBegin;
335   for (i=0; i<osm->n_local; i++) {
336     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
337     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
338     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
339   }
340   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
341     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
342   }
343   if (osm->ksp) {
344     for (i=0; i<osm->n_local_true; i++) {
345       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
346     }
347   }
348   if (osm->is_flg) {
349     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
350     ierr = PetscFree(osm->is);CHKERRQ(ierr);
351   }
352   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
353   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
354   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
355   ierr = PetscFree(osm);CHKERRQ(ierr);
356   PetscFunctionReturn(0);
357 }
358 
359 #undef __FUNCT__
360 #define __FUNCT__ "PCSetFromOptions_ASM"
361 static int PCSetFromOptions_ASM(PC pc)
362 {
363   PC_ASM     *osm = (PC_ASM*)pc->data;
364   int        blocks,ovl,ierr,indx;
365   PetscTruth flg,set,sym;
366   const char *type[] = {"none","restrict","interpolate","basic"};
367 
368   PetscFunctionBegin;
369   /* set the type to symmetric if matrix is symmetric */
370   if (pc->pmat && !osm->type_set) {
371     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
372     if (set && sym) {
373       osm->type = PC_ASM_BASIC;
374     }
375   }
376   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
377     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
378     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
379     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
380     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
381     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
382     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
383     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
384     if (flg) {
385       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
386     }
387   ierr = PetscOptionsTail();CHKERRQ(ierr);
388   PetscFunctionReturn(0);
389 }
390 
391 /*------------------------------------------------------------------------------------*/
392 
393 EXTERN_C_BEGIN
394 #undef __FUNCT__
395 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
396 int PCASMSetLocalSubdomains_ASM(PC pc,int n,IS is[])
397 {
398   PC_ASM *osm = (PC_ASM*)pc->data;
399 
400   PetscFunctionBegin;
401   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
402 
403   if (pc->setupcalled && n != osm->n_local_true) {
404     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
405   }
406   if (!pc->setupcalled){
407     osm->n_local_true = n;
408     osm->is           = is;
409   }
410   PetscFunctionReturn(0);
411 }
412 EXTERN_C_END
413 
414 EXTERN_C_BEGIN
415 #undef __FUNCT__
416 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
417 int PCASMSetTotalSubdomains_ASM(PC pc,int N,IS *is)
418 {
419   PC_ASM *osm = (PC_ASM*)pc->data;
420   int    rank,size,ierr,n;
421 
422   PetscFunctionBegin;
423 
424   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
425 they cannot be set globally yet.");
426 
427   /*
428      Split the subdomains equally amoung all processors
429   */
430   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
431   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
432   n = N/size + ((N % size) > rank);
433   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
434   if (!pc->setupcalled){
435     osm->n_local_true = n;
436     osm->is           = 0;
437   }
438   PetscFunctionReturn(0);
439 }
440 EXTERN_C_END
441 
442 EXTERN_C_BEGIN
443 #undef __FUNCT__
444 #define __FUNCT__ "PCASMSetOverlap_ASM"
445 int PCASMSetOverlap_ASM(PC pc,int ovl)
446 {
447   PC_ASM *osm;
448 
449   PetscFunctionBegin;
450   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
451 
452   osm               = (PC_ASM*)pc->data;
453   osm->overlap      = ovl;
454   PetscFunctionReturn(0);
455 }
456 EXTERN_C_END
457 
458 EXTERN_C_BEGIN
459 #undef __FUNCT__
460 #define __FUNCT__ "PCASMSetType_ASM"
461 int PCASMSetType_ASM(PC pc,PCASMType type)
462 {
463   PC_ASM *osm;
464 
465   PetscFunctionBegin;
466   osm        = (PC_ASM*)pc->data;
467   osm->type     = type;
468   osm->type_set = PETSC_TRUE;
469   PetscFunctionReturn(0);
470 }
471 EXTERN_C_END
472 
473 EXTERN_C_BEGIN
474 #undef __FUNCT__
475 #define __FUNCT__ "PCASMGetSubKSP_ASM"
476 int PCASMGetSubKSP_ASM(PC pc,int *n_local,int *first_local,KSP **ksp)
477 {
478   PC_ASM   *jac = (PC_ASM*)pc->data;
479   int      ierr;
480 
481   PetscFunctionBegin;
482   if (jac->n_local_true < 0) {
483     SETERRQ(1,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
484   }
485 
486   if (n_local)     *n_local     = jac->n_local_true;
487   if (first_local) {
488     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
489     *first_local -= jac->n_local_true;
490   }
491   *ksp                         = jac->ksp;
492   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
493                                       not necessarily true though!  This flag is
494                                       used only for PCView_ASM() */
495   PetscFunctionReturn(0);
496 }
497 EXTERN_C_END
498 
499 EXTERN_C_BEGIN
500 #undef __FUNCT__
501 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
502 int PCASMSetUseInPlace_ASM(PC pc)
503 {
504   PC_ASM *dir;
505 
506   PetscFunctionBegin;
507   dir          = (PC_ASM*)pc->data;
508   dir->inplace = PETSC_TRUE;
509   PetscFunctionReturn(0);
510 }
511 EXTERN_C_END
512 
513 /*----------------------------------------------------------------------------*/
514 #undef __FUNCT__
515 #define __FUNCT__ "PCASMSetUseInPlace"
516 /*@
517    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
518 
519    Collective on PC
520 
521    Input Parameters:
522 .  pc - the preconditioner context
523 
524    Options Database Key:
525 .  -pc_asm_in_place - Activates in-place factorization
526 
527    Note:
528    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
529    when the original matrix is not required during the Solve process.
530    This destroys the matrix, early thus, saving on memory usage.
531 
532    Level: intermediate
533 
534 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
535 
536 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
537 @*/
538 int PCASMSetUseInPlace(PC pc)
539 {
540   int ierr,(*f)(PC);
541 
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
544   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
545   if (f) {
546     ierr = (*f)(pc);CHKERRQ(ierr);
547   }
548   PetscFunctionReturn(0);
549 }
550 /*----------------------------------------------------------------------------*/
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "PCASMSetLocalSubdomains"
554 /*@C
555     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
556     only) for the additive Schwarz preconditioner.
557 
558     Collective on PC
559 
560     Input Parameters:
561 +   pc - the preconditioner context
562 .   n - the number of subdomains for this processor (default value = 1)
563 -   is - the index sets that define the subdomains for this processor
564          (or PETSC_NULL for PETSc to determine subdomains)
565 
566     Notes:
567     The IS numbering is in the parallel, global numbering of the vector.
568 
569     By default the ASM preconditioner uses 1 block per processor.
570 
571     These index sets cannot be destroyed until after completion of the
572     linear solves for which the ASM preconditioner is being used.
573 
574     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
575 
576     Level: advanced
577 
578 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
579 
580 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
581           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
582 @*/
583 int PCASMSetLocalSubdomains(PC pc,int n,IS is[])
584 {
585   int ierr,(*f)(PC,int,IS[]);
586 
587   PetscFunctionBegin;
588   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
589   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
590   if (f) {
591     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
592   }
593   PetscFunctionReturn(0);
594 }
595 
596 #undef __FUNCT__
597 #define __FUNCT__ "PCASMSetTotalSubdomains"
598 /*@C
599     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
600     additive Schwarz preconditioner.  Either all or no processors in the
601     PC communicator must call this routine, with the same index sets.
602 
603     Collective on PC
604 
605     Input Parameters:
606 +   pc - the preconditioner context
607 .   n - the number of subdomains for all processors
608 -   is - the index sets that define the subdomains for all processor
609          (or PETSC_NULL for PETSc to determine subdomains)
610 
611     Options Database Key:
612     To set the total number of subdomain blocks rather than specify the
613     index sets, use the option
614 .    -pc_asm_blocks <blks> - Sets total blocks
615 
616     Notes:
617     Currently you cannot use this to set the actual subdomains with the argument is.
618 
619     By default the ASM preconditioner uses 1 block per processor.
620 
621     These index sets cannot be destroyed until after completion of the
622     linear solves for which the ASM preconditioner is being used.
623 
624     Use PCASMSetLocalSubdomains() to set local subdomains.
625 
626     Level: advanced
627 
628 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
629 
630 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
631           PCASMCreateSubdomains2D()
632 @*/
633 int PCASMSetTotalSubdomains(PC pc,int N,IS *is)
634 {
635   int ierr,(*f)(PC,int,IS *);
636 
637   PetscFunctionBegin;
638   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
639   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
640   if (f) {
641     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
642   }
643   PetscFunctionReturn(0);
644 }
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "PCASMSetOverlap"
648 /*@
649     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
650     additive Schwarz preconditioner.  Either all or no processors in the
651     PC communicator must call this routine.
652 
653     Collective on PC
654 
655     Input Parameters:
656 +   pc  - the preconditioner context
657 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
658 
659     Options Database Key:
660 .   -pc_asm_overlap <ovl> - Sets overlap
661 
662     Notes:
663     By default the ASM preconditioner uses 1 block per processor.  To use
664     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
665     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
666 
667     The overlap defaults to 1, so if one desires that no additional
668     overlap be computed beyond what may have been set with a call to
669     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
670     must be set to be 0.  In particular, if one does not explicitly set
671     the subdomains an application code, then all overlap would be computed
672     internally by PETSc, and using an overlap of 0 would result in an ASM
673     variant that is equivalent to the block Jacobi preconditioner.
674 
675     Note that one can define initial index sets with any overlap via
676     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
677     PCASMSetOverlap() merely allows PETSc to extend that overlap further
678     if desired.
679 
680     Level: intermediate
681 
682 .keywords: PC, ASM, set, overlap
683 
684 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
685           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
686 @*/
687 int PCASMSetOverlap(PC pc,int ovl)
688 {
689   int ierr,(*f)(PC,int);
690 
691   PetscFunctionBegin;
692   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
693   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
694   if (f) {
695     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
696   }
697   PetscFunctionReturn(0);
698 }
699 
700 #undef __FUNCT__
701 #define __FUNCT__ "PCASMSetType"
702 /*@
703     PCASMSetType - Sets the type of restriction and interpolation used
704     for local problems in the additive Schwarz method.
705 
706     Collective on PC
707 
708     Input Parameters:
709 +   pc  - the preconditioner context
710 -   type - variant of ASM, one of
711 .vb
712       PC_ASM_BASIC       - full interpolation and restriction
713       PC_ASM_RESTRICT    - full restriction, local processor interpolation
714       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
715       PC_ASM_NONE        - local processor restriction and interpolation
716 .ve
717 
718     Options Database Key:
719 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
720 
721     Level: intermediate
722 
723 .keywords: PC, ASM, set, type
724 
725 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
726           PCASMCreateSubdomains2D()
727 @*/
728 int PCASMSetType(PC pc,PCASMType type)
729 {
730   int ierr,(*f)(PC,PCASMType);
731 
732   PetscFunctionBegin;
733   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
734   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
735   if (f) {
736     ierr = (*f)(pc,type);CHKERRQ(ierr);
737   }
738   PetscFunctionReturn(0);
739 }
740 
741 #undef __FUNCT__
742 #define __FUNCT__ "PCASMGetSubKSP"
743 /*@C
744    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
745    this processor.
746 
747    Collective on PC iff first_local is requested
748 
749    Input Parameter:
750 .  pc - the preconditioner context
751 
752    Output Parameters:
753 +  n_local - the number of blocks on this processor or PETSC_NULL
754 .  first_local - the global number of the first block on this processor or PETSC_NULL,
755                  all processors must request or all must pass PETSC_NULL
756 -  ksp - the array of KSP contexts
757 
758    Note:
759    After PCASMGetSubKSP() the array of KSPes is not to be freed
760 
761    Currently for some matrix implementations only 1 block per processor
762    is supported.
763 
764    You must call KSPSetUp() before calling PCASMGetSubKSP().
765 
766    Level: advanced
767 
768 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
769 
770 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
771           PCASMCreateSubdomains2D(),
772 @*/
773 int PCASMGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[])
774 {
775   int ierr,(*f)(PC,int*,int*,KSP **);
776 
777   PetscFunctionBegin;
778   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
779   PetscValidIntPointer(n_local,2);
780   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
781   if (f) {
782     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
783   } else {
784     SETERRQ(1,"Cannot get subksp for this type of PC");
785   }
786 
787  PetscFunctionReturn(0);
788 }
789 
790 /* -------------------------------------------------------------------------------------*/
791 /*MC
792    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
793            its own KSP object.
794 
795    Options Database Keys:
796 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
797 .  -pc_asm_in_place - Activates in-place factorization
798 .  -pc_asm_blocks <blks> - Sets total blocks
799 .  -pc_asm_overlap <ovl> - Sets overlap
800 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
801 
802      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
803       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
804       -pc_asm_type basic to use the standard ASM.
805 
806    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
807      than one processor. Defaults to one block per processor.
808 
809      To set options on the solvers for each block append -sub_ to all the KSP, and PC
810         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
811 
812      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
813          and set the options directly on the resulting KSP object (you can access its PC
814          with KSPGetPC())
815 
816 
817    Level: beginner
818 
819    Concepts: additive Schwarz method
820 
821 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
822            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
823            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
824            PCASMSetUseInPlace()
825 M*/
826 
827 EXTERN_C_BEGIN
828 #undef __FUNCT__
829 #define __FUNCT__ "PCCreate_ASM"
830 int PCCreate_ASM(PC pc)
831 {
832   int    ierr;
833   PC_ASM *osm;
834 
835   PetscFunctionBegin;
836   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
837   PetscLogObjectMemory(pc,sizeof(PC_ASM));
838   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
839   osm->n                 = PETSC_DECIDE;
840   osm->n_local           = 0;
841   osm->n_local_true      = PETSC_DECIDE;
842   osm->overlap           = 1;
843   osm->is_flg            = PETSC_FALSE;
844   osm->ksp              = 0;
845   osm->scat              = 0;
846   osm->is                = 0;
847   osm->mat               = 0;
848   osm->pmat              = 0;
849   osm->type              = PC_ASM_RESTRICT;
850   osm->same_local_solves = PETSC_TRUE;
851   osm->inplace           = PETSC_FALSE;
852   pc->data               = (void*)osm;
853 
854   pc->ops->apply             = PCApply_ASM;
855   pc->ops->applytranspose    = PCApplyTranspose_ASM;
856   pc->ops->setup             = PCSetUp_ASM;
857   pc->ops->destroy           = PCDestroy_ASM;
858   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
859   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
860   pc->ops->view              = PCView_ASM;
861   pc->ops->applyrichardson   = 0;
862 
863   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
864                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
865   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
866                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
867   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
868                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
869   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
870                     PCASMSetType_ASM);CHKERRQ(ierr);
871   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
872                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
873 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
874                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 EXTERN_C_END
878 
879 
880 #undef __FUNCT__
881 #define __FUNCT__ "PCASMCreateSubdomains2D"
882 /*@
883    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
884    preconditioner for a two-dimensional problem on a regular grid.
885 
886    Not Collective
887 
888    Input Parameters:
889 +  m, n - the number of mesh points in the x and y directions
890 .  M, N - the number of subdomains in the x and y directions
891 .  dof - degrees of freedom per node
892 -  overlap - overlap in mesh lines
893 
894    Output Parameters:
895 +  Nsub - the number of subdomains created
896 -  is - the array of index sets defining the subdomains
897 
898    Note:
899    Presently PCAMSCreateSubdomains2d() is valid only for sequential
900    preconditioners.  More general related routines are
901    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
902 
903    Level: advanced
904 
905 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
906 
907 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
908           PCASMSetOverlap()
909 @*/
910 int PCASMCreateSubdomains2D(int m,int n,int M,int N,int dof,int overlap,int *Nsub,IS **is)
911 {
912   int i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
913   int nidx,*idx,loc,ii,jj,ierr,count;
914 
915   PetscFunctionBegin;
916   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
917 
918   *Nsub = N*M;
919   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
920   ystart = 0;
921   loc_outter = 0;
922   for (i=0; i<N; i++) {
923     height = n/N + ((n % N) > i); /* height of subdomain */
924     if (height < 2) SETERRQ(1,"Too many N subdomains for mesh dimension n");
925     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
926     yright = ystart + height + overlap; if (yright > n) yright = n;
927     xstart = 0;
928     for (j=0; j<M; j++) {
929       width = m/M + ((m % M) > j); /* width of subdomain */
930       if (width < 2) SETERRQ(1,"Too many M subdomains for mesh dimension m");
931       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
932       xright = xstart + width + overlap; if (xright > m) xright = m;
933       nidx   = (xright - xleft)*(yright - yleft);
934       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
935       loc    = 0;
936       for (ii=yleft; ii<yright; ii++) {
937         count = m*ii + xleft;
938         for (jj=xleft; jj<xright; jj++) {
939           idx[loc++] = count++;
940         }
941       }
942       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
943       ierr = PetscFree(idx);CHKERRQ(ierr);
944       xstart += width;
945     }
946     ystart += height;
947   }
948   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
949   PetscFunctionReturn(0);
950 }
951 
952 #undef __FUNCT__
953 #define __FUNCT__ "PCASMGetLocalSubdomains"
954 /*@C
955     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
956     only) for the additive Schwarz preconditioner.
957 
958     Collective on PC
959 
960     Input Parameter:
961 .   pc - the preconditioner context
962 
963     Output Parameters:
964 +   n - the number of subdomains for this processor (default value = 1)
965 -   is - the index sets that define the subdomains for this processor
966 
967 
968     Notes:
969     The IS numbering is in the parallel, global numbering of the vector.
970 
971     Level: advanced
972 
973 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
974 
975 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
976           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
977 @*/
978 int PCASMGetLocalSubdomains(PC pc,int *n,IS *is[])
979 {
980   PC_ASM *osm;
981 
982   PetscFunctionBegin;
983   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
984   PetscValidIntPointer(n,2);
985   if (!pc->setupcalled) {
986     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
987   }
988 
989   osm = (PC_ASM*)pc->data;
990   if (n)  *n = osm->n_local_true;
991   if (is) *is = osm->is;
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "PCASMGetLocalSubmatrices"
997 /*@C
998     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
999     only) for the additive Schwarz preconditioner.
1000 
1001     Collective on PC
1002 
1003     Input Parameter:
1004 .   pc - the preconditioner context
1005 
1006     Output Parameters:
1007 +   n - the number of matrices for this processor (default value = 1)
1008 -   mat - the matrices
1009 
1010 
1011     Level: advanced
1012 
1013 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1014 
1015 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1016           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1017 @*/
1018 int PCASMGetLocalSubmatrices(PC pc,int *n,Mat *mat[])
1019 {
1020   PC_ASM *osm;
1021 
1022   PetscFunctionBegin;
1023   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1024   PetscValidPointer(n,2);
1025   if (!pc->setupcalled) {
1026     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1027   }
1028 
1029   osm = (PC_ASM*)pc->data;
1030   if (n)   *n   = osm->n_local_true;
1031   if (mat) *mat = osm->pmat;
1032   PetscFunctionReturn(0);
1033 }
1034 
1035