xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 4482741e5b2e2bbc854fb1f8dba65221386520f2)
1 /*$Id: asm.c,v 1.131 2001/08/07 03:03:38 balay Exp $*/
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   int        n,n_local,n_local_true;
17   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
18   int        overlap;             /* overlap requested by user */
19   KSP       *ksp;               /* linear solvers for each block */
20   VecScatter *scat;               /* mapping to subregion */
21   Vec        *x,*y;
22   IS         *is;                 /* index set that defines each subdomain */
23   Mat        *mat,*pmat;          /* mat is not currently used */
24   PCASMType  type;                /* use reduced interpolation, restriction or both */
25   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
26   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
27                                      PCSetUpOnBlocks() is done. Similar to inplace
28                                      factorization in the case of LU and ILU */
29 } PC_ASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCView_ASM"
33 static int PCView_ASM(PC pc,PetscViewer viewer)
34 {
35   PC_ASM      *jac = (PC_ASM*)pc->data;
36   int         rank,ierr,i;
37   const char  *cstring = 0;
38   PetscTruth  isascii,isstring;
39   PetscViewer sviewer;
40 
41 
42   PetscFunctionBegin;
43   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
44   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
45   if (isascii) {
46     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %d, amount of overlap = %d\n",jac->n,jac->overlap);CHKERRQ(ierr);
47     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
48     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
49     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
50     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
51     else                                      cstring = "Unknown ASM type";
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
54     if (jac->same_local_solves) {
55       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
56       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
57       if (!rank && jac->ksp) {
58         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
59         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
60         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
61       }
62       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
63     } else {
64       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
65       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d\n",rank,jac->n_local);CHKERRQ(ierr);
66       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67       for (i=0; i<jac->n_local; i++) {
68         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr);
69         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
70         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
71         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
72         if (i != jac->n_local-1) {
73           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
74         }
75       }
76       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
78     }
79   } else if (isstring) {
80     ierr = PetscViewerStringSPrintf(viewer," blks=%d, overlap=%d, type=%d",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
81     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84   } else {
85     SETERRQ1(1,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
86   }
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "PCSetUp_ASM"
92 static int PCSetUp_ASM(PC pc)
93 {
94   PC_ASM   *osm  = (PC_ASM*)pc->data;
95   int      i,ierr,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
96   int      start,start_val,end_val,size,sz,bs;
97   MatReuse scall = MAT_REUSE_MATRIX;
98   IS       isl;
99   KSP      ksp;
100   PC       subpc;
101   char     *prefix,*pprefix;
102 
103   PetscFunctionBegin;
104   if (!pc->setupcalled) {
105     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
106       /* no subdomains given, use one per processor */
107       osm->n_local_true = osm->n_local = 1;
108       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
109       osm->n = size;
110     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
111       int inwork[2],outwork[2];
112       inwork[0] = inwork[1] = osm->n_local_true;
113       ierr = MPI_Allreduce(inwork,outwork,1,MPI_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
114       osm->n_local = outwork[0];
115       osm->n       = outwork[1];
116     }
117     n_local      = osm->n_local;
118     n_local_true = osm->n_local_true;
119     if (!osm->is){ /* build the index sets */
120       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
121       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
122       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
123       sz    = end_val - start_val;
124       start = start_val;
125       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
126         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
127       }
128       for (i=0; i<n_local_true; i++){
129         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
130         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
131         start      += size;
132         osm->is[i] =  isl;
133       }
134       osm->is_flg = PETSC_TRUE;
135     }
136 
137     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
138     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
139     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
140     osm->y = osm->x + n_local;
141 
142     /*  Extend the "overlapping" regions by a number of steps  */
143     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
144     for (i=0; i<n_local_true; i++) {
145       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
146     }
147 
148     /* create the local work vectors and scatter contexts */
149     for (i=0; i<n_local_true; i++) {
150       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
151       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
152       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
153       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
154       ierr = VecScatterCreate(pc->vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
155       ierr = ISDestroy(isl);CHKERRQ(ierr);
156     }
157     for (i=n_local_true; i<n_local; i++) {
158       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
159       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
160       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
161       ierr = VecScatterCreate(pc->vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
162       ierr = ISDestroy(isl);CHKERRQ(ierr);
163     }
164 
165    /*
166        Create the local solvers.
167     */
168     for (i=0; i<n_local_true; i++) {
169       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
170       PetscLogObjectParent(pc,ksp);
171       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
172       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
173       ierr = PCSetType(subpc,PCILU);CHKERRQ(ierr);
174       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
175       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
176       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
177       ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
178       osm->ksp[i] = ksp;
179     }
180     scall = MAT_INITIAL_MATRIX;
181   } else {
182     /*
183        Destroy the blocks from the previous iteration
184     */
185     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
186       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
187       scall = MAT_INITIAL_MATRIX;
188     }
189   }
190 
191   /* extract out the submatrices */
192   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
193 
194   /* Return control to the user so that the submatrices can be modified (e.g., to apply
195      different boundary conditions for the submatrices than for the global problem) */
196   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
197 
198   /* loop over subdomains putting them into local ksp */
199   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
200   for (i=0; i<n_local_true; i++) {
201     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
202     PetscLogObjectParent(pc,osm->pmat[i]);
203     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
204   }
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
210 static int PCSetUpOnBlocks_ASM(PC pc)
211 {
212   PC_ASM *osm = (PC_ASM*)pc->data;
213   int    i,ierr;
214 
215   PetscFunctionBegin;
216   for (i=0; i<osm->n_local_true; i++) {
217     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
218     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
219     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
220   }
221   /*
222      If inplace flag is set, then destroy the matrix after the setup
223      on blocks is done.
224   */
225   if (osm->inplace && osm->n_local_true > 0) {
226     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
227   }
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "PCApply_ASM"
233 static int PCApply_ASM(PC pc,Vec x,Vec y)
234 {
235   PC_ASM      *osm = (PC_ASM*)pc->data;
236   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
237   PetscScalar zero = 0.0;
238   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
239 
240   PetscFunctionBegin;
241   /*
242        Support for limiting the restriction or interpolation to only local
243      subdomain values (leaving the other values 0).
244   */
245   if (!(osm->type & PC_ASM_RESTRICT)) {
246     forward = SCATTER_FORWARD_LOCAL;
247     /* have to zero the work RHS since scatter may leave some slots empty */
248     for (i=0; i<n_local; i++) {
249       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
250     }
251   }
252   if (!(osm->type & PC_ASM_INTERPOLATE)) {
253     reverse = SCATTER_REVERSE_LOCAL;
254   }
255 
256   for (i=0; i<n_local; i++) {
257     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
258   }
259   ierr = VecSet(&zero,y);CHKERRQ(ierr);
260   /* do the local solves */
261   for (i=0; i<n_local_true; i++) {
262     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
263     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
264     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
265     ierr = KSPSolve(osm->ksp[i]);CHKERRQ(ierr);
266     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
267   }
268   /* handle the rest of the scatters that do not have local solves */
269   for (i=n_local_true; i<n_local; i++) {
270     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
271     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
272   }
273   for (i=0; i<n_local; i++) {
274     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 #undef __FUNCT__
280 #define __FUNCT__ "PCApplyTranspose_ASM"
281 static int PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
282 {
283   PC_ASM      *osm = (PC_ASM*)pc->data;
284   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
285   PetscScalar zero = 0.0;
286   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
287 
288   PetscFunctionBegin;
289   /*
290        Support for limiting the restriction or interpolation to only local
291      subdomain values (leaving the other values 0).
292 
293        Note: these are reversed from the PCApply_ASM() because we are applying the
294      transpose of the three terms
295   */
296   if (!(osm->type & PC_ASM_INTERPOLATE)) {
297     forward = SCATTER_FORWARD_LOCAL;
298     /* have to zero the work RHS since scatter may leave some slots empty */
299     for (i=0; i<n_local; i++) {
300       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
301     }
302   }
303   if (!(osm->type & PC_ASM_RESTRICT)) {
304     reverse = SCATTER_REVERSE_LOCAL;
305   }
306 
307   for (i=0; i<n_local; i++) {
308     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
309   }
310   ierr = VecSet(&zero,y);CHKERRQ(ierr);
311   /* do the local solves */
312   for (i=0; i<n_local_true; i++) {
313     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
314     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
315     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
316     ierr = KSPSolveTranspose(osm->ksp[i]);CHKERRQ(ierr);
317     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
318   }
319   /* handle the rest of the scatters that do not have local solves */
320   for (i=n_local_true; i<n_local; i++) {
321     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
322     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
323   }
324   for (i=0; i<n_local; i++) {
325     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
326   }
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "PCDestroy_ASM"
332 static int PCDestroy_ASM(PC pc)
333 {
334   PC_ASM *osm = (PC_ASM*)pc->data;
335   int    i,ierr;
336 
337   PetscFunctionBegin;
338   for (i=0; i<osm->n_local; i++) {
339     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
340     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
341     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
342   }
343   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
344     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
345   }
346   if (osm->ksp) {
347     for (i=0; i<osm->n_local_true; i++) {
348       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
349     }
350   }
351   if (osm->is_flg) {
352     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
353     ierr = PetscFree(osm->is);CHKERRQ(ierr);
354   }
355   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
356   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
357   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
358   ierr = PetscFree(osm);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PCSetFromOptions_ASM"
364 static int PCSetFromOptions_ASM(PC pc)
365 {
366   PC_ASM     *osm = (PC_ASM*)pc->data;
367   int        blocks,ovl,ierr,indx;
368   PetscTruth flg;
369   const char *type[] = {"none","restrict","interpolate","basic"};
370 
371   PetscFunctionBegin;
372   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
373     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
374     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
375     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
376     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
377     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
378     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
379     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
380     if (flg) {
381       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
382     }
383   ierr = PetscOptionsTail();CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 /*------------------------------------------------------------------------------------*/
388 
389 EXTERN_C_BEGIN
390 #undef __FUNCT__
391 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
392 int PCASMSetLocalSubdomains_ASM(PC pc,int n,IS is[])
393 {
394   PC_ASM *osm = (PC_ASM*)pc->data;
395 
396   PetscFunctionBegin;
397   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
398 
399   if (pc->setupcalled && n != osm->n_local_true) {
400     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
401   }
402   if (!pc->setupcalled){
403     osm->n_local_true = n;
404     osm->is           = is;
405   }
406   PetscFunctionReturn(0);
407 }
408 EXTERN_C_END
409 
410 EXTERN_C_BEGIN
411 #undef __FUNCT__
412 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
413 int PCASMSetTotalSubdomains_ASM(PC pc,int N,IS *is)
414 {
415   PC_ASM *osm = (PC_ASM*)pc->data;
416   int    rank,size,ierr,n;
417 
418   PetscFunctionBegin;
419 
420   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
421 they cannot be set globally yet.");
422 
423   /*
424      Split the subdomains equally amoung all processors
425   */
426   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
427   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
428   n = N/size + ((N % size) > rank);
429   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
430   if (!pc->setupcalled){
431     osm->n_local_true = n;
432     osm->is           = 0;
433   }
434   PetscFunctionReturn(0);
435 }
436 EXTERN_C_END
437 
438 EXTERN_C_BEGIN
439 #undef __FUNCT__
440 #define __FUNCT__ "PCASMSetOverlap_ASM"
441 int PCASMSetOverlap_ASM(PC pc,int ovl)
442 {
443   PC_ASM *osm;
444 
445   PetscFunctionBegin;
446   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
447 
448   osm               = (PC_ASM*)pc->data;
449   osm->overlap      = ovl;
450   PetscFunctionReturn(0);
451 }
452 EXTERN_C_END
453 
454 EXTERN_C_BEGIN
455 #undef __FUNCT__
456 #define __FUNCT__ "PCASMSetType_ASM"
457 int PCASMSetType_ASM(PC pc,PCASMType type)
458 {
459   PC_ASM *osm;
460 
461   PetscFunctionBegin;
462   osm        = (PC_ASM*)pc->data;
463   osm->type  = type;
464   PetscFunctionReturn(0);
465 }
466 EXTERN_C_END
467 
468 EXTERN_C_BEGIN
469 #undef __FUNCT__
470 #define __FUNCT__ "PCASMGetSubKSP_ASM"
471 int PCASMGetSubKSP_ASM(PC pc,int *n_local,int *first_local,KSP **ksp)
472 {
473   PC_ASM   *jac = (PC_ASM*)pc->data;
474   int      ierr;
475 
476   PetscFunctionBegin;
477   if (jac->n_local_true < 0) {
478     SETERRQ(1,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
479   }
480 
481   if (n_local)     *n_local     = jac->n_local_true;
482   if (first_local) {
483     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
484     *first_local -= jac->n_local_true;
485   }
486   *ksp                         = jac->ksp;
487   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
488                                       not necessarily true though!  This flag is
489                                       used only for PCView_ASM() */
490   PetscFunctionReturn(0);
491 }
492 EXTERN_C_END
493 
494 EXTERN_C_BEGIN
495 #undef __FUNCT__
496 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
497 int PCASMSetUseInPlace_ASM(PC pc)
498 {
499   PC_ASM *dir;
500 
501   PetscFunctionBegin;
502   dir          = (PC_ASM*)pc->data;
503   dir->inplace = PETSC_TRUE;
504   PetscFunctionReturn(0);
505 }
506 EXTERN_C_END
507 
508 /*----------------------------------------------------------------------------*/
509 #undef __FUNCT__
510 #define __FUNCT__ "PCASMSetUseInPlace"
511 /*@
512    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
513 
514    Collective on PC
515 
516    Input Parameters:
517 .  pc - the preconditioner context
518 
519    Options Database Key:
520 .  -pc_asm_in_place - Activates in-place factorization
521 
522    Note:
523    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
524    when the original matrix is not required during the Solve process.
525    This destroys the matrix, early thus, saving on memory usage.
526 
527    Level: intermediate
528 
529 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
530 
531 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
532 @*/
533 int PCASMSetUseInPlace(PC pc)
534 {
535   int ierr,(*f)(PC);
536 
537   PetscFunctionBegin;
538   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
539   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
540   if (f) {
541     ierr = (*f)(pc);CHKERRQ(ierr);
542   }
543   PetscFunctionReturn(0);
544 }
545 /*----------------------------------------------------------------------------*/
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "PCASMSetLocalSubdomains"
549 /*@C
550     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
551     only) for the additive Schwarz preconditioner.
552 
553     Collective on PC
554 
555     Input Parameters:
556 +   pc - the preconditioner context
557 .   n - the number of subdomains for this processor (default value = 1)
558 -   is - the index sets that define the subdomains for this processor
559          (or PETSC_NULL for PETSc to determine subdomains)
560 
561     Notes:
562     The IS numbering is in the parallel, global numbering of the vector.
563 
564     By default the ASM preconditioner uses 1 block per processor.
565 
566     These index sets cannot be destroyed until after completion of the
567     linear solves for which the ASM preconditioner is being used.
568 
569     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
570 
571     Level: advanced
572 
573 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
574 
575 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
576           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
577 @*/
578 int PCASMSetLocalSubdomains(PC pc,int n,IS is[])
579 {
580   int ierr,(*f)(PC,int,IS[]);
581 
582   PetscFunctionBegin;
583   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
584   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
585   if (f) {
586     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
587   }
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "PCASMSetTotalSubdomains"
593 /*@C
594     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
595     additive Schwarz preconditioner.  Either all or no processors in the
596     PC communicator must call this routine, with the same index sets.
597 
598     Collective on PC
599 
600     Input Parameters:
601 +   pc - the preconditioner context
602 .   n - the number of subdomains for all processors
603 -   is - the index sets that define the subdomains for all processor
604          (or PETSC_NULL for PETSc to determine subdomains)
605 
606     Options Database Key:
607     To set the total number of subdomain blocks rather than specify the
608     index sets, use the option
609 .    -pc_asm_blocks <blks> - Sets total blocks
610 
611     Notes:
612     Currently you cannot use this to set the actual subdomains with the argument is.
613 
614     By default the ASM preconditioner uses 1 block per processor.
615 
616     These index sets cannot be destroyed until after completion of the
617     linear solves for which the ASM preconditioner is being used.
618 
619     Use PCASMSetLocalSubdomains() to set local subdomains.
620 
621     Level: advanced
622 
623 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
624 
625 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
626           PCASMCreateSubdomains2D()
627 @*/
628 int PCASMSetTotalSubdomains(PC pc,int N,IS *is)
629 {
630   int ierr,(*f)(PC,int,IS *);
631 
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
634   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
635   if (f) {
636     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
637   }
638   PetscFunctionReturn(0);
639 }
640 
641 #undef __FUNCT__
642 #define __FUNCT__ "PCASMSetOverlap"
643 /*@
644     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
645     additive Schwarz preconditioner.  Either all or no processors in the
646     PC communicator must call this routine.
647 
648     Collective on PC
649 
650     Input Parameters:
651 +   pc  - the preconditioner context
652 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
653 
654     Options Database Key:
655 .   -pc_asm_overlap <ovl> - Sets overlap
656 
657     Notes:
658     By default the ASM preconditioner uses 1 block per processor.  To use
659     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
660     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
661 
662     The overlap defaults to 1, so if one desires that no additional
663     overlap be computed beyond what may have been set with a call to
664     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
665     must be set to be 0.  In particular, if one does not explicitly set
666     the subdomains an application code, then all overlap would be computed
667     internally by PETSc, and using an overlap of 0 would result in an ASM
668     variant that is equivalent to the block Jacobi preconditioner.
669 
670     Note that one can define initial index sets with any overlap via
671     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
672     PCASMSetOverlap() merely allows PETSc to extend that overlap further
673     if desired.
674 
675     Level: intermediate
676 
677 .keywords: PC, ASM, set, overlap
678 
679 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
680           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
681 @*/
682 int PCASMSetOverlap(PC pc,int ovl)
683 {
684   int ierr,(*f)(PC,int);
685 
686   PetscFunctionBegin;
687   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
688   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
689   if (f) {
690     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
691   }
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "PCASMSetType"
697 /*@
698     PCASMSetType - Sets the type of restriction and interpolation used
699     for local problems in the additive Schwarz method.
700 
701     Collective on PC
702 
703     Input Parameters:
704 +   pc  - the preconditioner context
705 -   type - variant of ASM, one of
706 .vb
707       PC_ASM_BASIC       - full interpolation and restriction
708       PC_ASM_RESTRICT    - full restriction, local processor interpolation
709       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
710       PC_ASM_NONE        - local processor restriction and interpolation
711 .ve
712 
713     Options Database Key:
714 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
715 
716     Level: intermediate
717 
718 .keywords: PC, ASM, set, type
719 
720 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
721           PCASMCreateSubdomains2D()
722 @*/
723 int PCASMSetType(PC pc,PCASMType type)
724 {
725   int ierr,(*f)(PC,PCASMType);
726 
727   PetscFunctionBegin;
728   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
729   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
730   if (f) {
731     ierr = (*f)(pc,type);CHKERRQ(ierr);
732   }
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "PCASMGetSubKSP"
738 /*@C
739    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
740    this processor.
741 
742    Collective on PC iff first_local is requested
743 
744    Input Parameter:
745 .  pc - the preconditioner context
746 
747    Output Parameters:
748 +  n_local - the number of blocks on this processor or PETSC_NULL
749 .  first_local - the global number of the first block on this processor or PETSC_NULL,
750                  all processors must request or all must pass PETSC_NULL
751 -  ksp - the array of KSP contexts
752 
753    Note:
754    After PCASMGetSubKSP() the array of KSPes is not to be freed
755 
756    Currently for some matrix implementations only 1 block per processor
757    is supported.
758 
759    You must call KSPSetUp() before calling PCASMGetSubKSP().
760 
761    Level: advanced
762 
763 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
764 
765 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
766           PCASMCreateSubdomains2D(),
767 @*/
768 int PCASMGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[])
769 {
770   int ierr,(*f)(PC,int*,int*,KSP **);
771 
772   PetscFunctionBegin;
773   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
774   PetscValidIntPointer(n_local,2);
775   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
776   if (f) {
777     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
778   } else {
779     SETERRQ(1,"Cannot get subksp for this type of PC");
780   }
781 
782  PetscFunctionReturn(0);
783 }
784 
785 /* -------------------------------------------------------------------------------------*/
786 /*MC
787    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
788            its own KSP object.
789 
790    Options Database Keys:
791 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
792 .  -pc_asm_in_place - Activates in-place factorization
793 .  -pc_asm_blocks <blks> - Sets total blocks
794 .  -pc_asm_overlap <ovl> - Sets overlap
795 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
796 
797      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
798       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
799       -pc_asm_type basic to use the standard ASM.
800 
801    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
802      than one processor. Defaults to one block per processor.
803 
804      To set options on the solvers for each block append -sub_ to all the KSP, and PC
805         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
806 
807      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
808          and set the options directly on the resulting KSP object (you can access its PC
809          with KSPGetPC())
810 
811 
812    Level: beginner
813 
814    Concepts: additive Schwarz method
815 
816 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
817            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
818            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
819            PCASMSetUseInPlace()
820 M*/
821 
822 EXTERN_C_BEGIN
823 #undef __FUNCT__
824 #define __FUNCT__ "PCCreate_ASM"
825 int PCCreate_ASM(PC pc)
826 {
827   int    ierr;
828   PC_ASM *osm;
829 
830   PetscFunctionBegin;
831   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
832   PetscLogObjectMemory(pc,sizeof(PC_ASM));
833   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
834   osm->n                 = PETSC_DECIDE;
835   osm->n_local           = 0;
836   osm->n_local_true      = PETSC_DECIDE;
837   osm->overlap           = 1;
838   osm->is_flg            = PETSC_FALSE;
839   osm->ksp              = 0;
840   osm->scat              = 0;
841   osm->is                = 0;
842   osm->mat               = 0;
843   osm->pmat              = 0;
844   osm->type              = PC_ASM_RESTRICT;
845   osm->same_local_solves = PETSC_TRUE;
846   osm->inplace           = PETSC_FALSE;
847   pc->data               = (void*)osm;
848 
849   pc->ops->apply             = PCApply_ASM;
850   pc->ops->applytranspose    = PCApplyTranspose_ASM;
851   pc->ops->setup             = PCSetUp_ASM;
852   pc->ops->destroy           = PCDestroy_ASM;
853   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
854   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
855   pc->ops->view              = PCView_ASM;
856   pc->ops->applyrichardson   = 0;
857 
858   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
859                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
860   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
861                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
862   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
863                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
864   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
865                     PCASMSetType_ASM);CHKERRQ(ierr);
866   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
867                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
868 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
869                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 EXTERN_C_END
873 
874 
875 #undef __FUNCT__
876 #define __FUNCT__ "PCASMCreateSubdomains2D"
877 /*@
878    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
879    preconditioner for a two-dimensional problem on a regular grid.
880 
881    Not Collective
882 
883    Input Parameters:
884 +  m, n - the number of mesh points in the x and y directions
885 .  M, N - the number of subdomains in the x and y directions
886 .  dof - degrees of freedom per node
887 -  overlap - overlap in mesh lines
888 
889    Output Parameters:
890 +  Nsub - the number of subdomains created
891 -  is - the array of index sets defining the subdomains
892 
893    Note:
894    Presently PCAMSCreateSubdomains2d() is valid only for sequential
895    preconditioners.  More general related routines are
896    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
897 
898    Level: advanced
899 
900 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
901 
902 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
903           PCASMSetOverlap()
904 @*/
905 int PCASMCreateSubdomains2D(int m,int n,int M,int N,int dof,int overlap,int *Nsub,IS **is)
906 {
907   int i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
908   int nidx,*idx,loc,ii,jj,ierr,count;
909 
910   PetscFunctionBegin;
911   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
912 
913   *Nsub = N*M;
914   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
915   ystart = 0;
916   loc_outter = 0;
917   for (i=0; i<N; i++) {
918     height = n/N + ((n % N) > i); /* height of subdomain */
919     if (height < 2) SETERRQ(1,"Too many N subdomains for mesh dimension n");
920     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
921     yright = ystart + height + overlap; if (yright > n) yright = n;
922     xstart = 0;
923     for (j=0; j<M; j++) {
924       width = m/M + ((m % M) > j); /* width of subdomain */
925       if (width < 2) SETERRQ(1,"Too many M subdomains for mesh dimension m");
926       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
927       xright = xstart + width + overlap; if (xright > m) xright = m;
928       nidx   = (xright - xleft)*(yright - yleft);
929       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
930       loc    = 0;
931       for (ii=yleft; ii<yright; ii++) {
932         count = m*ii + xleft;
933         for (jj=xleft; jj<xright; jj++) {
934           idx[loc++] = count++;
935         }
936       }
937       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
938       ierr = PetscFree(idx);CHKERRQ(ierr);
939       xstart += width;
940     }
941     ystart += height;
942   }
943   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
944   PetscFunctionReturn(0);
945 }
946 
947 #undef __FUNCT__
948 #define __FUNCT__ "PCASMGetLocalSubdomains"
949 /*@C
950     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
951     only) for the additive Schwarz preconditioner.
952 
953     Collective on PC
954 
955     Input Parameter:
956 .   pc - the preconditioner context
957 
958     Output Parameters:
959 +   n - the number of subdomains for this processor (default value = 1)
960 -   is - the index sets that define the subdomains for this processor
961 
962 
963     Notes:
964     The IS numbering is in the parallel, global numbering of the vector.
965 
966     Level: advanced
967 
968 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
969 
970 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
971           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
972 @*/
973 int PCASMGetLocalSubdomains(PC pc,int *n,IS *is[])
974 {
975   PC_ASM *osm;
976 
977   PetscFunctionBegin;
978   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
979   PetscValidIntPointer(n,2);
980   if (!pc->setupcalled) {
981     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
982   }
983 
984   osm = (PC_ASM*)pc->data;
985   if (n)  *n = osm->n_local_true;
986   if (is) *is = osm->is;
987   PetscFunctionReturn(0);
988 }
989 
990 #undef __FUNCT__
991 #define __FUNCT__ "PCASMGetLocalSubmatrices"
992 /*@C
993     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
994     only) for the additive Schwarz preconditioner.
995 
996     Collective on PC
997 
998     Input Parameter:
999 .   pc - the preconditioner context
1000 
1001     Output Parameters:
1002 +   n - the number of matrices for this processor (default value = 1)
1003 -   mat - the matrices
1004 
1005 
1006     Level: advanced
1007 
1008 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1009 
1010 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1011           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1012 @*/
1013 int PCASMGetLocalSubmatrices(PC pc,int *n,Mat *mat[])
1014 {
1015   PC_ASM *osm;
1016 
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1019   PetscValidPointer(n,2);
1020   if (!pc->setupcalled) {
1021     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1022   }
1023 
1024   osm = (PC_ASM*)pc->data;
1025   if (n)   *n   = osm->n_local_true;
1026   if (mat) *mat = osm->pmat;
1027   PetscFunctionReturn(0);
1028 }
1029 
1030