xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 2fc52814d27bf1f4e71021c1c3ebb532b583ed60)
1 /*$Id: asm.c,v 1.131 2001/08/07 03:03:38 balay Exp $*/
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   int        n,n_local,n_local_true;
17   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
18   int        overlap;             /* overlap requested by user */
19   KSP       *ksp;               /* linear solvers for each block */
20   VecScatter *scat;               /* mapping to subregion */
21   Vec        *x,*y;
22   IS         *is;                 /* index set that defines each subdomain */
23   Mat        *mat,*pmat;          /* mat is not currently used */
24   PCASMType  type;                /* use reduced interpolation, restriction or both */
25   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
26   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
27                                      PCSetUpOnBlocks() is done. Similar to inplace
28                                      factorization in the case of LU and ILU */
29 } PC_ASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCView_ASM"
33 static int PCView_ASM(PC pc,PetscViewer viewer)
34 {
35   PC_ASM      *jac = (PC_ASM*)pc->data;
36   int         rank,ierr,i;
37   const char  *cstring = 0;
38   PetscTruth  isascii,isstring;
39   PetscViewer sviewer;
40 
41 
42   PetscFunctionBegin;
43   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
44   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
45   if (isascii) {
46     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %d, amount of overlap = %d\n",jac->n,jac->overlap);CHKERRQ(ierr);
47     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
48     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
49     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
50     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
51     else                                      cstring = "Unknown ASM type";
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
54     if (jac->same_local_solves) {
55       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
56       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
57       if (!rank && jac->ksp) {
58         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
59         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
60         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
61       }
62       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
63     } else {
64       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
65       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d\n",rank,jac->n_local);CHKERRQ(ierr);
66       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67       for (i=0; i<jac->n_local; i++) {
68         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr);
69         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
70         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
71         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
72         if (i != jac->n_local-1) {
73           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
74         }
75       }
76       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
78     }
79   } else if (isstring) {
80     ierr = PetscViewerStringSPrintf(viewer," blks=%d, overlap=%d, type=%d",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
81     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84   } else {
85     SETERRQ1(1,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
86   }
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "PCSetUp_ASM"
92 static int PCSetUp_ASM(PC pc)
93 {
94   PC_ASM   *osm  = (PC_ASM*)pc->data;
95   int      i,ierr,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
96   int      start,start_val,end_val,size,sz,bs;
97   MatReuse scall = MAT_REUSE_MATRIX;
98   IS       isl;
99   KSP      ksp;
100   PC       subpc;
101   char     *prefix,*pprefix;
102 
103   PetscFunctionBegin;
104   if (!pc->setupcalled) {
105     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
106       /* no subdomains given, use one per processor */
107       osm->n_local_true = osm->n_local = 1;
108       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
109       osm->n = size;
110     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
111       int inwork[2],outwork[2];
112       inwork[0] = inwork[1] = osm->n_local_true;
113       ierr = MPI_Allreduce(inwork,outwork,1,MPI_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
114       osm->n_local = outwork[0];
115       osm->n       = outwork[1];
116     }
117     n_local      = osm->n_local;
118     n_local_true = osm->n_local_true;
119     if (!osm->is){ /* build the index sets */
120       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
121       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
122       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
123       sz    = end_val - start_val;
124       start = start_val;
125       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
126         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
127       }
128       for (i=0; i<n_local_true; i++){
129         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
130         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
131         start      += size;
132         osm->is[i] =  isl;
133       }
134       osm->is_flg = PETSC_TRUE;
135     }
136 
137     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
138     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
139     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
140     osm->y = osm->x + n_local;
141 
142     /*  Extend the "overlapping" regions by a number of steps  */
143     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
144     for (i=0; i<n_local_true; i++) {
145       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
146     }
147 
148     /* create the local work vectors and scatter contexts */
149     for (i=0; i<n_local_true; i++) {
150       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
151       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
152       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
153       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
154       ierr = VecScatterCreate(pc->vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
155       ierr = ISDestroy(isl);CHKERRQ(ierr);
156     }
157     for (i=n_local_true; i<n_local; i++) {
158       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
159       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
160       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
161       ierr = VecScatterCreate(pc->vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
162       ierr = ISDestroy(isl);CHKERRQ(ierr);
163     }
164 
165    /*
166        Create the local solvers.
167     */
168     for (i=0; i<n_local_true; i++) {
169       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
170       PetscLogObjectParent(pc,ksp);
171       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
172       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
173       ierr = PCSetType(subpc,PCILU);CHKERRQ(ierr);
174       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
175       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
176       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
177       ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
178       osm->ksp[i] = ksp;
179     }
180     scall = MAT_INITIAL_MATRIX;
181   } else {
182     /*
183        Destroy the blocks from the previous iteration
184     */
185     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
186       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
187       scall = MAT_INITIAL_MATRIX;
188     }
189   }
190 
191   /* extract out the submatrices */
192   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
193 
194   /* Return control to the user so that the submatrices can be modified (e.g., to apply
195      different boundary conditions for the submatrices than for the global problem) */
196   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
197 
198   /* loop over subdomains putting them into local ksp */
199   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
200   for (i=0; i<n_local_true; i++) {
201     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
202     PetscLogObjectParent(pc,osm->pmat[i]);
203     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
204   }
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
210 static int PCSetUpOnBlocks_ASM(PC pc)
211 {
212   PC_ASM *osm = (PC_ASM*)pc->data;
213   int    i,ierr;
214 
215   PetscFunctionBegin;
216   for (i=0; i<osm->n_local_true; i++) {
217     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
218     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
219     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
220   }
221   /*
222      If inplace flag is set, then destroy the matrix after the setup
223      on blocks is done.
224   */
225   if (osm->inplace && osm->n_local_true > 0) {
226     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
227   }
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "PCApply_ASM"
233 static int PCApply_ASM(PC pc,Vec x,Vec y)
234 {
235   PC_ASM      *osm = (PC_ASM*)pc->data;
236   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
237   PetscScalar zero = 0.0;
238   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
239 
240   PetscFunctionBegin;
241   /*
242        Support for limiting the restriction or interpolation to only local
243      subdomain values (leaving the other values 0).
244   */
245   if (!(osm->type & PC_ASM_RESTRICT)) {
246     forward = SCATTER_FORWARD_LOCAL;
247     /* have to zero the work RHS since scatter may leave some slots empty */
248     for (i=0; i<n_local; i++) {
249       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
250     }
251   }
252   if (!(osm->type & PC_ASM_INTERPOLATE)) {
253     reverse = SCATTER_REVERSE_LOCAL;
254   }
255 
256   for (i=0; i<n_local; i++) {
257     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
258   }
259   ierr = VecSet(&zero,y);CHKERRQ(ierr);
260   /* do the local solves */
261   for (i=0; i<n_local_true; i++) {
262     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
263     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
264     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
265     ierr = KSPSolve(osm->ksp[i]);CHKERRQ(ierr);
266     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
267   }
268   /* handle the rest of the scatters that do not have local solves */
269   for (i=n_local_true; i<n_local; i++) {
270     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
271     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
272   }
273   for (i=0; i<n_local; i++) {
274     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 #undef __FUNCT__
280 #define __FUNCT__ "PCApplyTranspose_ASM"
281 static int PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
282 {
283   PC_ASM      *osm = (PC_ASM*)pc->data;
284   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
285   PetscScalar zero = 0.0;
286   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
287 
288   PetscFunctionBegin;
289   /*
290        Support for limiting the restriction or interpolation to only local
291      subdomain values (leaving the other values 0).
292 
293        Note: these are reversed from the PCApply_ASM() because we are applying the
294      transpose of the three terms
295   */
296   if (!(osm->type & PC_ASM_INTERPOLATE)) {
297     forward = SCATTER_FORWARD_LOCAL;
298     /* have to zero the work RHS since scatter may leave some slots empty */
299     for (i=0; i<n_local; i++) {
300       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
301     }
302   }
303   if (!(osm->type & PC_ASM_RESTRICT)) {
304     reverse = SCATTER_REVERSE_LOCAL;
305   }
306 
307   for (i=0; i<n_local; i++) {
308     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
309   }
310   ierr = VecSet(&zero,y);CHKERRQ(ierr);
311   /* do the local solves */
312   for (i=0; i<n_local_true; i++) {
313     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
314     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
315     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
316     ierr = KSPSolveTranspose(osm->ksp[i]);CHKERRQ(ierr);
317     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
318   }
319   /* handle the rest of the scatters that do not have local solves */
320   for (i=n_local_true; i<n_local; i++) {
321     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
322     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
323   }
324   for (i=0; i<n_local; i++) {
325     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
326   }
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "PCDestroy_ASM"
332 static int PCDestroy_ASM(PC pc)
333 {
334   PC_ASM *osm = (PC_ASM*)pc->data;
335   int    i,ierr;
336 
337   PetscFunctionBegin;
338   for (i=0; i<osm->n_local; i++) {
339     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
340     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
341     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
342   }
343   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
344     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
345   }
346   if (osm->ksp) {
347     for (i=0; i<osm->n_local_true; i++) {
348       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
349     }
350   }
351   if (osm->is_flg) {
352     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
353     ierr = PetscFree(osm->is);CHKERRQ(ierr);
354   }
355   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
356   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
357   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
358   ierr = PetscFree(osm);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PCSetFromOptions_ASM"
364 static int PCSetFromOptions_ASM(PC pc)
365 {
366   PC_ASM     *osm = (PC_ASM*)pc->data;
367   int        blocks,ovl,ierr,indx;
368   PetscTruth flg;
369   const char *type[] = {"basic","restrict","interpolate","none"};
370 
371   PetscFunctionBegin;
372   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
373     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
374     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
375     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
376     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
377     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
378     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
379     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
380     if (flg) {
381       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
382     }
383   ierr = PetscOptionsTail();CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 /*------------------------------------------------------------------------------------*/
388 
389 EXTERN_C_BEGIN
390 #undef __FUNCT__
391 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
392 int PCASMSetLocalSubdomains_ASM(PC pc,int n,IS is[])
393 {
394   PC_ASM *osm = (PC_ASM*)pc->data;
395 
396   PetscFunctionBegin;
397   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
398 
399   if (pc->setupcalled && n != osm->n_local_true) {
400     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
401   }
402   if (!pc->setupcalled){
403     osm->n_local_true = n;
404     osm->is           = is;
405   }
406   PetscFunctionReturn(0);
407 }
408 EXTERN_C_END
409 
410 EXTERN_C_BEGIN
411 #undef __FUNCT__
412 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
413 int PCASMSetTotalSubdomains_ASM(PC pc,int N,IS *is)
414 {
415   PC_ASM *osm = (PC_ASM*)pc->data;
416   int    rank,size,ierr,n;
417 
418   PetscFunctionBegin;
419 
420   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
421 they cannot be set globally yet.");
422 
423   /*
424      Split the subdomains equally amoung all processors
425   */
426   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
427   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
428   n = N/size + ((N % size) > rank);
429   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
430   if (!pc->setupcalled){
431     osm->n_local_true = n;
432     osm->is           = 0;
433   }
434   PetscFunctionReturn(0);
435 }
436 EXTERN_C_END
437 
438 EXTERN_C_BEGIN
439 #undef __FUNCT__
440 #define __FUNCT__ "PCASMSetOverlap_ASM"
441 int PCASMSetOverlap_ASM(PC pc,int ovl)
442 {
443   PC_ASM *osm;
444 
445   PetscFunctionBegin;
446   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
447 
448   osm               = (PC_ASM*)pc->data;
449   osm->overlap      = ovl;
450   PetscFunctionReturn(0);
451 }
452 EXTERN_C_END
453 
454 EXTERN_C_BEGIN
455 #undef __FUNCT__
456 #define __FUNCT__ "PCASMSetType_ASM"
457 int PCASMSetType_ASM(PC pc,PCASMType type)
458 {
459   PC_ASM *osm;
460 
461   PetscFunctionBegin;
462   osm        = (PC_ASM*)pc->data;
463   osm->type  = type;
464   PetscFunctionReturn(0);
465 }
466 EXTERN_C_END
467 
468 EXTERN_C_BEGIN
469 #undef __FUNCT__
470 #define __FUNCT__ "PCASMGetSubKSP_ASM"
471 int PCASMGetSubKSP_ASM(PC pc,int *n_local,int *first_local,KSP **ksp)
472 {
473   PC_ASM   *jac = (PC_ASM*)pc->data;
474   int      ierr;
475 
476   PetscFunctionBegin;
477   if (jac->n_local_true < 0) {
478     SETERRQ(1,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
479   }
480 
481   if (n_local)     *n_local     = jac->n_local_true;
482   if (first_local) {
483     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
484     *first_local -= jac->n_local_true;
485   }
486   *ksp                         = jac->ksp;
487   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
488                                       not necessarily true though!  This flag is
489                                       used only for PCView_ASM() */
490   PetscFunctionReturn(0);
491 }
492 EXTERN_C_END
493 
494 EXTERN_C_BEGIN
495 #undef __FUNCT__
496 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
497 int PCASMSetUseInPlace_ASM(PC pc)
498 {
499   PC_ASM *dir;
500 
501   PetscFunctionBegin;
502   dir          = (PC_ASM*)pc->data;
503   dir->inplace = PETSC_TRUE;
504   PetscFunctionReturn(0);
505 }
506 EXTERN_C_END
507 
508 /*----------------------------------------------------------------------------*/
509 #undef __FUNCT__
510 #define __FUNCT__ "PCASMSetUseInPlace"
511 /*@
512    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
513 
514    Collective on PC
515 
516    Input Parameters:
517 .  pc - the preconditioner context
518 
519    Options Database Key:
520 .  -pc_asm_in_place - Activates in-place factorization
521 
522    Note:
523    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
524    when the original matrix is not required during the Solve process.
525    This destroys the matrix, early thus, saving on memory usage.
526 
527    Level: intermediate
528 
529 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
530 
531 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
532 @*/
533 int PCASMSetUseInPlace(PC pc)
534 {
535   int ierr,(*f)(PC);
536 
537   PetscFunctionBegin;
538   PetscValidHeaderSpecific(pc,PC_COOKIE);
539   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
540   if (f) {
541     ierr = (*f)(pc);CHKERRQ(ierr);
542   }
543   PetscFunctionReturn(0);
544 }
545 /*----------------------------------------------------------------------------*/
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "PCASMSetLocalSubdomains"
549 /*@C
550     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
551     only) for the additive Schwarz preconditioner.
552 
553     Collective on PC
554 
555     Input Parameters:
556 +   pc - the preconditioner context
557 .   n - the number of subdomains for this processor (default value = 1)
558 -   is - the index sets that define the subdomains for this processor
559          (or PETSC_NULL for PETSc to determine subdomains)
560 
561     Notes:
562     The IS numbering is in the parallel, global numbering of the vector.
563 
564     By default the ASM preconditioner uses 1 block per processor.
565 
566     These index sets cannot be destroyed until after completion of the
567     linear solves for which the ASM preconditioner is being used.
568 
569     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
570 
571     Level: advanced
572 
573 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
574 
575 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
576           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
577 @*/
578 int PCASMSetLocalSubdomains(PC pc,int n,IS is[])
579 {
580   int ierr,(*f)(PC,int,IS[]);
581 
582   PetscFunctionBegin;
583   PetscValidHeaderSpecific(pc,PC_COOKIE);
584   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
585   if (f) {
586     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
587   }
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "PCASMSetTotalSubdomains"
593 /*@C
594     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
595     additive Schwarz preconditioner.  Either all or no processors in the
596     PC communicator must call this routine, with the same index sets.
597 
598     Collective on PC
599 
600     Input Parameters:
601 +   pc - the preconditioner context
602 .   n - the number of subdomains for all processors
603 -   is - the index sets that define the subdomains for all processor
604          (or PETSC_NULL for PETSc to determine subdomains)
605 
606     Options Database Key:
607     To set the total number of subdomain blocks rather than specify the
608     index sets, use the option
609 .    -pc_asm_blocks <blks> - Sets total blocks
610 
611     Notes:
612     Currently you cannot use this to set the actual subdomains with the argument is.
613 
614     By default the ASM preconditioner uses 1 block per processor.
615 
616     These index sets cannot be destroyed until after completion of the
617     linear solves for which the ASM preconditioner is being used.
618 
619     Use PCASMSetLocalSubdomains() to set local subdomains.
620 
621     Level: advanced
622 
623 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
624 
625 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
626           PCASMCreateSubdomains2D()
627 @*/
628 int PCASMSetTotalSubdomains(PC pc,int N,IS *is)
629 {
630   int ierr,(*f)(PC,int,IS *);
631 
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(pc,PC_COOKIE);
634   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
635   if (f) {
636     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
637   }
638   PetscFunctionReturn(0);
639 }
640 
641 #undef __FUNCT__
642 #define __FUNCT__ "PCASMSetOverlap"
643 /*@
644     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
645     additive Schwarz preconditioner.  Either all or no processors in the
646     PC communicator must call this routine.
647 
648     Collective on PC
649 
650     Input Parameters:
651 +   pc  - the preconditioner context
652 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
653 
654     Options Database Key:
655 .   -pc_asm_overlap <ovl> - Sets overlap
656 
657     Notes:
658     By default the ASM preconditioner uses 1 block per processor.  To use
659     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
660     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
661 
662     The overlap defaults to 1, so if one desires that no additional
663     overlap be computed beyond what may have been set with a call to
664     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
665     must be set to be 0.  In particular, if one does not explicitly set
666     the subdomains an application code, then all overlap would be computed
667     internally by PETSc, and using an overlap of 0 would result in an ASM
668     variant that is equivalent to the block Jacobi preconditioner.
669 
670     Note that one can define initial index sets with any overlap via
671     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
672     PCASMSetOverlap() merely allows PETSc to extend that overlap further
673     if desired.
674 
675     Level: intermediate
676 
677 .keywords: PC, ASM, set, overlap
678 
679 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
680           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
681 @*/
682 int PCASMSetOverlap(PC pc,int ovl)
683 {
684   int ierr,(*f)(PC,int);
685 
686   PetscFunctionBegin;
687   PetscValidHeaderSpecific(pc,PC_COOKIE);
688   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
689   if (f) {
690     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
691   }
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "PCASMSetType"
697 /*@
698     PCASMSetType - Sets the type of restriction and interpolation used
699     for local problems in the additive Schwarz method.
700 
701     Collective on PC
702 
703     Input Parameters:
704 +   pc  - the preconditioner context
705 -   type - variant of ASM, one of
706 .vb
707       PC_ASM_BASIC       - full interpolation and restriction
708       PC_ASM_RESTRICT    - full restriction, local processor interpolation
709       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
710       PC_ASM_NONE        - local processor restriction and interpolation
711 .ve
712 
713     Options Database Key:
714 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
715 
716     Level: intermediate
717 
718 .keywords: PC, ASM, set, type
719 
720 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
721           PCASMCreateSubdomains2D()
722 @*/
723 int PCASMSetType(PC pc,PCASMType type)
724 {
725   int ierr,(*f)(PC,PCASMType);
726 
727   PetscFunctionBegin;
728   PetscValidHeaderSpecific(pc,PC_COOKIE);
729   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
730   if (f) {
731     ierr = (*f)(pc,type);CHKERRQ(ierr);
732   }
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "PCASMGetSubKSP"
738 /*@C
739    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
740    this processor.
741 
742    Collective on PC iff first_local is requested
743 
744    Input Parameter:
745 .  pc - the preconditioner context
746 
747    Output Parameters:
748 +  n_local - the number of blocks on this processor or PETSC_NULL
749 .  first_local - the global number of the first block on this processor or PETSC_NULL,
750                  all processors must request or all must pass PETSC_NULL
751 -  ksp - the array of KSP contexts
752 
753    Note:
754    After PCASMGetSubKSP() the array of KSPes is not to be freed
755 
756    Currently for some matrix implementations only 1 block per processor
757    is supported.
758 
759    You must call KSPSetUp() before calling PCASMGetSubKSP().
760 
761    Level: advanced
762 
763 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
764 
765 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
766           PCASMCreateSubdomains2D(),
767 @*/
768 int PCASMGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[])
769 {
770   int ierr,(*f)(PC,int*,int*,KSP **);
771 
772   PetscFunctionBegin;
773   PetscValidHeaderSpecific(pc,PC_COOKIE);
774   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
775   if (f) {
776     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
777   } else {
778     SETERRQ(1,"Cannot get subksp for this type of PC");
779   }
780 
781  PetscFunctionReturn(0);
782 }
783 
784 /* -------------------------------------------------------------------------------------*/
785 /*MC
786    PCASM - Use the additive Schwarz method, each block is (approximately) solved with
787            its own KSP object.
788 
789    Options Database Keys:
790 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
791 .  -pc_asm_in_place - Activates in-place factorization
792 .  -pc_asm_blocks <blks> - Sets total blocks
793 .  -pc_asm_overlap <ovl> - Sets overlap
794 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
795 
796    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
797      than one processor. Defaults to one block per processor.
798 
799      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
800         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
801 
802      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
803          and set the options directly on the resulting KSP object (you can access its PC
804          with KSPGetPC())
805 
806    Level: beginner
807 
808    Concepts: additive Schwarz method
809 
810 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
811            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
812            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
813            PCASMSetUseInPlace()
814 M*/
815 
816 EXTERN_C_BEGIN
817 #undef __FUNCT__
818 #define __FUNCT__ "PCCreate_ASM"
819 int PCCreate_ASM(PC pc)
820 {
821   int    ierr;
822   PC_ASM *osm;
823 
824   PetscFunctionBegin;
825   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
826   PetscLogObjectMemory(pc,sizeof(PC_ASM));
827   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
828   osm->n                 = PETSC_DECIDE;
829   osm->n_local           = 0;
830   osm->n_local_true      = PETSC_DECIDE;
831   osm->overlap           = 1;
832   osm->is_flg            = PETSC_FALSE;
833   osm->ksp              = 0;
834   osm->scat              = 0;
835   osm->is                = 0;
836   osm->mat               = 0;
837   osm->pmat              = 0;
838   osm->type              = PC_ASM_RESTRICT;
839   osm->same_local_solves = PETSC_TRUE;
840   osm->inplace           = PETSC_FALSE;
841   pc->data               = (void*)osm;
842 
843   pc->ops->apply             = PCApply_ASM;
844   pc->ops->applytranspose    = PCApplyTranspose_ASM;
845   pc->ops->setup             = PCSetUp_ASM;
846   pc->ops->destroy           = PCDestroy_ASM;
847   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
848   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
849   pc->ops->view              = PCView_ASM;
850   pc->ops->applyrichardson   = 0;
851 
852   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
853                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
854   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
855                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
856   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
857                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
858   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
859                     PCASMSetType_ASM);CHKERRQ(ierr);
860   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
861                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
862 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
863                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
864   PetscFunctionReturn(0);
865 }
866 EXTERN_C_END
867 
868 
869 #undef __FUNCT__
870 #define __FUNCT__ "PCASMCreateSubdomains2D"
871 /*@
872    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
873    preconditioner for a two-dimensional problem on a regular grid.
874 
875    Not Collective
876 
877    Input Parameters:
878 +  m, n - the number of mesh points in the x and y directions
879 .  M, N - the number of subdomains in the x and y directions
880 .  dof - degrees of freedom per node
881 -  overlap - overlap in mesh lines
882 
883    Output Parameters:
884 +  Nsub - the number of subdomains created
885 -  is - the array of index sets defining the subdomains
886 
887    Note:
888    Presently PCAMSCreateSubdomains2d() is valid only for sequential
889    preconditioners.  More general related routines are
890    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
891 
892    Level: advanced
893 
894 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
895 
896 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
897           PCASMSetOverlap()
898 @*/
899 int PCASMCreateSubdomains2D(int m,int n,int M,int N,int dof,int overlap,int *Nsub,IS **is)
900 {
901   int i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
902   int nidx,*idx,loc,ii,jj,ierr,count;
903 
904   PetscFunctionBegin;
905   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
906 
907   *Nsub = N*M;
908   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
909   ystart = 0;
910   loc_outter = 0;
911   for (i=0; i<N; i++) {
912     height = n/N + ((n % N) > i); /* height of subdomain */
913     if (height < 2) SETERRQ(1,"Too many N subdomains for mesh dimension n");
914     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
915     yright = ystart + height + overlap; if (yright > n) yright = n;
916     xstart = 0;
917     for (j=0; j<M; j++) {
918       width = m/M + ((m % M) > j); /* width of subdomain */
919       if (width < 2) SETERRQ(1,"Too many M subdomains for mesh dimension m");
920       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
921       xright = xstart + width + overlap; if (xright > m) xright = m;
922       nidx   = (xright - xleft)*(yright - yleft);
923       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
924       loc    = 0;
925       for (ii=yleft; ii<yright; ii++) {
926         count = m*ii + xleft;
927         for (jj=xleft; jj<xright; jj++) {
928           idx[loc++] = count++;
929         }
930       }
931       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
932       ierr = PetscFree(idx);CHKERRQ(ierr);
933       xstart += width;
934     }
935     ystart += height;
936   }
937   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
938   PetscFunctionReturn(0);
939 }
940 
941 #undef __FUNCT__
942 #define __FUNCT__ "PCASMGetLocalSubdomains"
943 /*@C
944     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
945     only) for the additive Schwarz preconditioner.
946 
947     Collective on PC
948 
949     Input Parameter:
950 .   pc - the preconditioner context
951 
952     Output Parameters:
953 +   n - the number of subdomains for this processor (default value = 1)
954 -   is - the index sets that define the subdomains for this processor
955 
956 
957     Notes:
958     The IS numbering is in the parallel, global numbering of the vector.
959 
960     Level: advanced
961 
962 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
963 
964 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
965           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
966 @*/
967 int PCASMGetLocalSubdomains(PC pc,int *n,IS *is[])
968 {
969   PC_ASM *osm;
970 
971   PetscFunctionBegin;
972   PetscValidHeaderSpecific(pc,PC_COOKIE);
973   if (!pc->setupcalled) {
974     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
975   }
976 
977   osm = (PC_ASM*)pc->data;
978   if (n)  *n = osm->n_local_true;
979   if (is) *is = osm->is;
980   PetscFunctionReturn(0);
981 }
982 
983 #undef __FUNCT__
984 #define __FUNCT__ "PCASMGetLocalSubmatrices"
985 /*@C
986     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
987     only) for the additive Schwarz preconditioner.
988 
989     Collective on PC
990 
991     Input Parameter:
992 .   pc - the preconditioner context
993 
994     Output Parameters:
995 +   n - the number of matrices for this processor (default value = 1)
996 -   mat - the matrices
997 
998 
999     Level: advanced
1000 
1001 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1002 
1003 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1004           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1005 @*/
1006 int PCASMGetLocalSubmatrices(PC pc,int *n,Mat *mat[])
1007 {
1008   PC_ASM *osm;
1009 
1010   PetscFunctionBegin;
1011   PetscValidHeaderSpecific(pc,PC_COOKIE);
1012   if (!pc->setupcalled) {
1013     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1014   }
1015 
1016   osm = (PC_ASM*)pc->data;
1017   if (n)   *n   = osm->n_local_true;
1018   if (mat) *mat = osm->pmat;
1019   PetscFunctionReturn(0);
1020 }
1021 
1022