xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 3ab0aad5e591026ee6fbe8b8c4e8eb05f49d9dc6)
1 /*$Id: asm.c,v 1.131 2001/08/07 03:03:38 balay Exp $*/
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   int        n,n_local,n_local_true;
17   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
18   int        overlap;             /* overlap requested by user */
19   KSP       *ksp;               /* linear solvers for each block */
20   VecScatter *scat;               /* mapping to subregion */
21   Vec        *x,*y;
22   IS         *is;                 /* index set that defines each subdomain */
23   Mat        *mat,*pmat;          /* mat is not currently used */
24   PCASMType  type;                /* use reduced interpolation, restriction or both */
25   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
26   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
27   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
28                                      PCSetUpOnBlocks() is done. Similar to inplace
29                                      factorization in the case of LU and ILU */
30 } PC_ASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_ASM"
34 static int PCView_ASM(PC pc,PetscViewer viewer)
35 {
36   PC_ASM      *jac = (PC_ASM*)pc->data;
37   int         rank,ierr,i;
38   const char  *cstring = 0;
39   PetscTruth  isascii,isstring;
40   PetscViewer sviewer;
41 
42 
43   PetscFunctionBegin;
44   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %d, amount of overlap = %d\n",jac->n,jac->overlap);CHKERRQ(ierr);
48     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
49     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
50     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
51     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
52     else                                      cstring = "Unknown ASM type";
53     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
55     if (jac->same_local_solves) {
56       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
57       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
58       if (!rank && jac->ksp) {
59         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
60         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
61         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
62       }
63       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
64     } else {
65       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
66       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d\n",rank,jac->n_local);CHKERRQ(ierr);
67       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
68       for (i=0; i<jac->n_local; i++) {
69         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr);
70         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
71         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
72         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i != jac->n_local-1) {
74           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
75         }
76       }
77       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
79     }
80   } else if (isstring) {
81     ierr = PetscViewerStringSPrintf(viewer," blks=%d, overlap=%d, type=%d",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
82     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
83       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
84     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
85   } else {
86     SETERRQ1(1,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
87   }
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "PCSetUp_ASM"
93 static int PCSetUp_ASM(PC pc)
94 {
95   PC_ASM   *osm  = (PC_ASM*)pc->data;
96   int      i,ierr,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
97   int      start,start_val,end_val,size,sz,bs;
98   MatReuse scall = MAT_REUSE_MATRIX;
99   IS       isl;
100   KSP      ksp;
101   PC       subpc;
102   char     *prefix,*pprefix;
103 
104   PetscFunctionBegin;
105   if (!pc->setupcalled) {
106     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
107       /* no subdomains given, use one per processor */
108       osm->n_local_true = osm->n_local = 1;
109       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
110       osm->n = size;
111     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
112       int inwork[2],outwork[2];
113       inwork[0] = inwork[1] = osm->n_local_true;
114       ierr = MPI_Allreduce(inwork,outwork,1,MPI_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
115       osm->n_local = outwork[0];
116       osm->n       = outwork[1];
117     }
118     n_local      = osm->n_local;
119     n_local_true = osm->n_local_true;
120     if (!osm->is){ /* build the index sets */
121       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
122       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
123       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
124       sz    = end_val - start_val;
125       start = start_val;
126       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
127         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
128       }
129       for (i=0; i<n_local_true; i++){
130         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
131         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
132         start      += size;
133         osm->is[i] =  isl;
134       }
135       osm->is_flg = PETSC_TRUE;
136     }
137 
138     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
139     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
140     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
141     osm->y = osm->x + n_local;
142 
143     /*  Extend the "overlapping" regions by a number of steps  */
144     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
145     for (i=0; i<n_local_true; i++) {
146       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
147     }
148 
149     /* create the local work vectors and scatter contexts */
150     for (i=0; i<n_local_true; i++) {
151       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
152       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
153       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
154       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
155       ierr = VecScatterCreate(pc->vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
156       ierr = ISDestroy(isl);CHKERRQ(ierr);
157     }
158     for (i=n_local_true; i<n_local; i++) {
159       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
160       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
161       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
162       ierr = VecScatterCreate(pc->vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
163       ierr = ISDestroy(isl);CHKERRQ(ierr);
164     }
165 
166    /*
167        Create the local solvers.
168     */
169     for (i=0; i<n_local_true; i++) {
170       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
171       PetscLogObjectParent(pc,ksp);
172       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
173       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
174       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
175       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
176       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
177       osm->ksp[i] = ksp;
178     }
179     scall = MAT_INITIAL_MATRIX;
180   } else {
181     /*
182        Destroy the blocks from the previous iteration
183     */
184     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
185       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
186       scall = MAT_INITIAL_MATRIX;
187     }
188   }
189 
190   /* extract out the submatrices */
191   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
192 
193   /* Return control to the user so that the submatrices can be modified (e.g., to apply
194      different boundary conditions for the submatrices than for the global problem) */
195   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
196 
197   /* loop over subdomains putting them into local ksp */
198   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
199   for (i=0; i<n_local_true; i++) {
200     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
201     PetscLogObjectParent(pc,osm->pmat[i]);
202     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
203     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
204   }
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
210 static int PCSetUpOnBlocks_ASM(PC pc)
211 {
212   PC_ASM *osm = (PC_ASM*)pc->data;
213   int    i,ierr;
214 
215   PetscFunctionBegin;
216   for (i=0; i<osm->n_local_true; i++) {
217     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
218     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
219     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
220   }
221   /*
222      If inplace flag is set, then destroy the matrix after the setup
223      on blocks is done.
224   */
225   if (osm->inplace && osm->n_local_true > 0) {
226     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
227   }
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "PCApply_ASM"
233 static int PCApply_ASM(PC pc,Vec x,Vec y)
234 {
235   PC_ASM      *osm = (PC_ASM*)pc->data;
236   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
237   PetscScalar zero = 0.0;
238   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
239 
240   PetscFunctionBegin;
241   /*
242        Support for limiting the restriction or interpolation to only local
243      subdomain values (leaving the other values 0).
244   */
245   if (!(osm->type & PC_ASM_RESTRICT)) {
246     forward = SCATTER_FORWARD_LOCAL;
247     /* have to zero the work RHS since scatter may leave some slots empty */
248     for (i=0; i<n_local; i++) {
249       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
250     }
251   }
252   if (!(osm->type & PC_ASM_INTERPOLATE)) {
253     reverse = SCATTER_REVERSE_LOCAL;
254   }
255 
256   for (i=0; i<n_local; i++) {
257     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
258   }
259   ierr = VecSet(&zero,y);CHKERRQ(ierr);
260   /* do the local solves */
261   for (i=0; i<n_local_true; i++) {
262     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
263     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
264     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
265     ierr = KSPSolve(osm->ksp[i]);CHKERRQ(ierr);
266     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
267   }
268   /* handle the rest of the scatters that do not have local solves */
269   for (i=n_local_true; i<n_local; i++) {
270     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
271     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
272   }
273   for (i=0; i<n_local; i++) {
274     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 #undef __FUNCT__
280 #define __FUNCT__ "PCApplyTranspose_ASM"
281 static int PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
282 {
283   PC_ASM      *osm = (PC_ASM*)pc->data;
284   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
285   PetscScalar zero = 0.0;
286   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
287 
288   PetscFunctionBegin;
289   /*
290        Support for limiting the restriction or interpolation to only local
291      subdomain values (leaving the other values 0).
292 
293        Note: these are reversed from the PCApply_ASM() because we are applying the
294      transpose of the three terms
295   */
296   if (!(osm->type & PC_ASM_INTERPOLATE)) {
297     forward = SCATTER_FORWARD_LOCAL;
298     /* have to zero the work RHS since scatter may leave some slots empty */
299     for (i=0; i<n_local; i++) {
300       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
301     }
302   }
303   if (!(osm->type & PC_ASM_RESTRICT)) {
304     reverse = SCATTER_REVERSE_LOCAL;
305   }
306 
307   for (i=0; i<n_local; i++) {
308     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
309   }
310   ierr = VecSet(&zero,y);CHKERRQ(ierr);
311   /* do the local solves */
312   for (i=0; i<n_local_true; i++) {
313     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
314     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
315     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
316     ierr = KSPSolveTranspose(osm->ksp[i]);CHKERRQ(ierr);
317     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
318   }
319   /* handle the rest of the scatters that do not have local solves */
320   for (i=n_local_true; i<n_local; i++) {
321     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
322     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
323   }
324   for (i=0; i<n_local; i++) {
325     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
326   }
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "PCDestroy_ASM"
332 static int PCDestroy_ASM(PC pc)
333 {
334   PC_ASM *osm = (PC_ASM*)pc->data;
335   int    i,ierr;
336 
337   PetscFunctionBegin;
338   for (i=0; i<osm->n_local; i++) {
339     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
340     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
341     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
342   }
343   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
344     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
345   }
346   if (osm->ksp) {
347     for (i=0; i<osm->n_local_true; i++) {
348       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
349     }
350   }
351   if (osm->is_flg) {
352     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
353     ierr = PetscFree(osm->is);CHKERRQ(ierr);
354   }
355   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
356   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
357   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
358   ierr = PetscFree(osm);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PCSetFromOptions_ASM"
364 static int PCSetFromOptions_ASM(PC pc)
365 {
366   PC_ASM     *osm = (PC_ASM*)pc->data;
367   int        blocks,ovl,ierr,indx;
368   PetscTruth flg,set,sym;
369   const char *type[] = {"none","restrict","interpolate","basic"};
370 
371   PetscFunctionBegin;
372   /* set the type to symmetric if matrix is symmetric */
373   if (pc->pmat && !osm->type_set) {
374     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
375     if (set && sym) {
376       osm->type = PC_ASM_BASIC;
377     }
378   }
379   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
380     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
381     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
382     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
383     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
384     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
385     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
386     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
387     if (flg) {
388       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
389     }
390   ierr = PetscOptionsTail();CHKERRQ(ierr);
391   PetscFunctionReturn(0);
392 }
393 
394 /*------------------------------------------------------------------------------------*/
395 
396 EXTERN_C_BEGIN
397 #undef __FUNCT__
398 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
399 int PCASMSetLocalSubdomains_ASM(PC pc,int n,IS is[])
400 {
401   PC_ASM *osm = (PC_ASM*)pc->data;
402 
403   PetscFunctionBegin;
404   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
405 
406   if (pc->setupcalled && n != osm->n_local_true) {
407     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
408   }
409   if (!pc->setupcalled){
410     osm->n_local_true = n;
411     osm->is           = is;
412   }
413   PetscFunctionReturn(0);
414 }
415 EXTERN_C_END
416 
417 EXTERN_C_BEGIN
418 #undef __FUNCT__
419 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
420 int PCASMSetTotalSubdomains_ASM(PC pc,int N,IS *is)
421 {
422   PC_ASM *osm = (PC_ASM*)pc->data;
423   int    rank,size,ierr,n;
424 
425   PetscFunctionBegin;
426 
427   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
428 they cannot be set globally yet.");
429 
430   /*
431      Split the subdomains equally amoung all processors
432   */
433   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
434   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
435   n = N/size + ((N % size) > rank);
436   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
437   if (!pc->setupcalled){
438     osm->n_local_true = n;
439     osm->is           = 0;
440   }
441   PetscFunctionReturn(0);
442 }
443 EXTERN_C_END
444 
445 EXTERN_C_BEGIN
446 #undef __FUNCT__
447 #define __FUNCT__ "PCASMSetOverlap_ASM"
448 int PCASMSetOverlap_ASM(PC pc,int ovl)
449 {
450   PC_ASM *osm;
451 
452   PetscFunctionBegin;
453   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
454 
455   osm               = (PC_ASM*)pc->data;
456   osm->overlap      = ovl;
457   PetscFunctionReturn(0);
458 }
459 EXTERN_C_END
460 
461 EXTERN_C_BEGIN
462 #undef __FUNCT__
463 #define __FUNCT__ "PCASMSetType_ASM"
464 int PCASMSetType_ASM(PC pc,PCASMType type)
465 {
466   PC_ASM *osm;
467 
468   PetscFunctionBegin;
469   osm        = (PC_ASM*)pc->data;
470   osm->type     = type;
471   osm->type_set = PETSC_TRUE;
472   PetscFunctionReturn(0);
473 }
474 EXTERN_C_END
475 
476 EXTERN_C_BEGIN
477 #undef __FUNCT__
478 #define __FUNCT__ "PCASMGetSubKSP_ASM"
479 int PCASMGetSubKSP_ASM(PC pc,int *n_local,int *first_local,KSP **ksp)
480 {
481   PC_ASM   *jac = (PC_ASM*)pc->data;
482   int      ierr;
483 
484   PetscFunctionBegin;
485   if (jac->n_local_true < 0) {
486     SETERRQ(1,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
487   }
488 
489   if (n_local)     *n_local     = jac->n_local_true;
490   if (first_local) {
491     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
492     *first_local -= jac->n_local_true;
493   }
494   *ksp                         = jac->ksp;
495   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
496                                       not necessarily true though!  This flag is
497                                       used only for PCView_ASM() */
498   PetscFunctionReturn(0);
499 }
500 EXTERN_C_END
501 
502 EXTERN_C_BEGIN
503 #undef __FUNCT__
504 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
505 int PCASMSetUseInPlace_ASM(PC pc)
506 {
507   PC_ASM *dir;
508 
509   PetscFunctionBegin;
510   dir          = (PC_ASM*)pc->data;
511   dir->inplace = PETSC_TRUE;
512   PetscFunctionReturn(0);
513 }
514 EXTERN_C_END
515 
516 /*----------------------------------------------------------------------------*/
517 #undef __FUNCT__
518 #define __FUNCT__ "PCASMSetUseInPlace"
519 /*@
520    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
521 
522    Collective on PC
523 
524    Input Parameters:
525 .  pc - the preconditioner context
526 
527    Options Database Key:
528 .  -pc_asm_in_place - Activates in-place factorization
529 
530    Note:
531    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
532    when the original matrix is not required during the Solve process.
533    This destroys the matrix, early thus, saving on memory usage.
534 
535    Level: intermediate
536 
537 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
538 
539 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
540 @*/
541 int PCASMSetUseInPlace(PC pc)
542 {
543   int ierr,(*f)(PC);
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
547   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
548   if (f) {
549     ierr = (*f)(pc);CHKERRQ(ierr);
550   }
551   PetscFunctionReturn(0);
552 }
553 /*----------------------------------------------------------------------------*/
554 
555 #undef __FUNCT__
556 #define __FUNCT__ "PCASMSetLocalSubdomains"
557 /*@C
558     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
559     only) for the additive Schwarz preconditioner.
560 
561     Collective on PC
562 
563     Input Parameters:
564 +   pc - the preconditioner context
565 .   n - the number of subdomains for this processor (default value = 1)
566 -   is - the index sets that define the subdomains for this processor
567          (or PETSC_NULL for PETSc to determine subdomains)
568 
569     Notes:
570     The IS numbering is in the parallel, global numbering of the vector.
571 
572     By default the ASM preconditioner uses 1 block per processor.
573 
574     These index sets cannot be destroyed until after completion of the
575     linear solves for which the ASM preconditioner is being used.
576 
577     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
578 
579     Level: advanced
580 
581 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
582 
583 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
584           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
585 @*/
586 int PCASMSetLocalSubdomains(PC pc,int n,IS is[])
587 {
588   int ierr,(*f)(PC,int,IS[]);
589 
590   PetscFunctionBegin;
591   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
592   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
593   if (f) {
594     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
595   }
596   PetscFunctionReturn(0);
597 }
598 
599 #undef __FUNCT__
600 #define __FUNCT__ "PCASMSetTotalSubdomains"
601 /*@C
602     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
603     additive Schwarz preconditioner.  Either all or no processors in the
604     PC communicator must call this routine, with the same index sets.
605 
606     Collective on PC
607 
608     Input Parameters:
609 +   pc - the preconditioner context
610 .   n - the number of subdomains for all processors
611 -   is - the index sets that define the subdomains for all processor
612          (or PETSC_NULL for PETSc to determine subdomains)
613 
614     Options Database Key:
615     To set the total number of subdomain blocks rather than specify the
616     index sets, use the option
617 .    -pc_asm_blocks <blks> - Sets total blocks
618 
619     Notes:
620     Currently you cannot use this to set the actual subdomains with the argument is.
621 
622     By default the ASM preconditioner uses 1 block per processor.
623 
624     These index sets cannot be destroyed until after completion of the
625     linear solves for which the ASM preconditioner is being used.
626 
627     Use PCASMSetLocalSubdomains() to set local subdomains.
628 
629     Level: advanced
630 
631 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
632 
633 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
634           PCASMCreateSubdomains2D()
635 @*/
636 int PCASMSetTotalSubdomains(PC pc,int N,IS *is)
637 {
638   int ierr,(*f)(PC,int,IS *);
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
642   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
643   if (f) {
644     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
645   }
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "PCASMSetOverlap"
651 /*@
652     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
653     additive Schwarz preconditioner.  Either all or no processors in the
654     PC communicator must call this routine.
655 
656     Collective on PC
657 
658     Input Parameters:
659 +   pc  - the preconditioner context
660 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
661 
662     Options Database Key:
663 .   -pc_asm_overlap <ovl> - Sets overlap
664 
665     Notes:
666     By default the ASM preconditioner uses 1 block per processor.  To use
667     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
668     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
669 
670     The overlap defaults to 1, so if one desires that no additional
671     overlap be computed beyond what may have been set with a call to
672     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
673     must be set to be 0.  In particular, if one does not explicitly set
674     the subdomains an application code, then all overlap would be computed
675     internally by PETSc, and using an overlap of 0 would result in an ASM
676     variant that is equivalent to the block Jacobi preconditioner.
677 
678     Note that one can define initial index sets with any overlap via
679     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
680     PCASMSetOverlap() merely allows PETSc to extend that overlap further
681     if desired.
682 
683     Level: intermediate
684 
685 .keywords: PC, ASM, set, overlap
686 
687 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
688           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
689 @*/
690 int PCASMSetOverlap(PC pc,int ovl)
691 {
692   int ierr,(*f)(PC,int);
693 
694   PetscFunctionBegin;
695   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
696   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
697   if (f) {
698     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
699   }
700   PetscFunctionReturn(0);
701 }
702 
703 #undef __FUNCT__
704 #define __FUNCT__ "PCASMSetType"
705 /*@
706     PCASMSetType - Sets the type of restriction and interpolation used
707     for local problems in the additive Schwarz method.
708 
709     Collective on PC
710 
711     Input Parameters:
712 +   pc  - the preconditioner context
713 -   type - variant of ASM, one of
714 .vb
715       PC_ASM_BASIC       - full interpolation and restriction
716       PC_ASM_RESTRICT    - full restriction, local processor interpolation
717       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
718       PC_ASM_NONE        - local processor restriction and interpolation
719 .ve
720 
721     Options Database Key:
722 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
723 
724     Level: intermediate
725 
726 .keywords: PC, ASM, set, type
727 
728 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
729           PCASMCreateSubdomains2D()
730 @*/
731 int PCASMSetType(PC pc,PCASMType type)
732 {
733   int ierr,(*f)(PC,PCASMType);
734 
735   PetscFunctionBegin;
736   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
737   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
738   if (f) {
739     ierr = (*f)(pc,type);CHKERRQ(ierr);
740   }
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "PCASMGetSubKSP"
746 /*@C
747    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
748    this processor.
749 
750    Collective on PC iff first_local is requested
751 
752    Input Parameter:
753 .  pc - the preconditioner context
754 
755    Output Parameters:
756 +  n_local - the number of blocks on this processor or PETSC_NULL
757 .  first_local - the global number of the first block on this processor or PETSC_NULL,
758                  all processors must request or all must pass PETSC_NULL
759 -  ksp - the array of KSP contexts
760 
761    Note:
762    After PCASMGetSubKSP() the array of KSPes is not to be freed
763 
764    Currently for some matrix implementations only 1 block per processor
765    is supported.
766 
767    You must call KSPSetUp() before calling PCASMGetSubKSP().
768 
769    Level: advanced
770 
771 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
772 
773 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
774           PCASMCreateSubdomains2D(),
775 @*/
776 int PCASMGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[])
777 {
778   int ierr,(*f)(PC,int*,int*,KSP **);
779 
780   PetscFunctionBegin;
781   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
782   PetscValidIntPointer(n_local,2);
783   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
784   if (f) {
785     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
786   } else {
787     SETERRQ(1,"Cannot get subksp for this type of PC");
788   }
789 
790  PetscFunctionReturn(0);
791 }
792 
793 /* -------------------------------------------------------------------------------------*/
794 /*MC
795    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
796            its own KSP object.
797 
798    Options Database Keys:
799 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
800 .  -pc_asm_in_place - Activates in-place factorization
801 .  -pc_asm_blocks <blks> - Sets total blocks
802 .  -pc_asm_overlap <ovl> - Sets overlap
803 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
804 
805      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
806       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
807       -pc_asm_type basic to use the standard ASM.
808 
809    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
810      than one processor. Defaults to one block per processor.
811 
812      To set options on the solvers for each block append -sub_ to all the KSP, and PC
813         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
814 
815      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
816          and set the options directly on the resulting KSP object (you can access its PC
817          with KSPGetPC())
818 
819 
820    Level: beginner
821 
822    Concepts: additive Schwarz method
823 
824 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
825            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
826            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
827            PCASMSetUseInPlace()
828 M*/
829 
830 EXTERN_C_BEGIN
831 #undef __FUNCT__
832 #define __FUNCT__ "PCCreate_ASM"
833 int PCCreate_ASM(PC pc)
834 {
835   int    ierr;
836   PC_ASM *osm;
837 
838   PetscFunctionBegin;
839   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
840   PetscLogObjectMemory(pc,sizeof(PC_ASM));
841   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
842   osm->n                 = PETSC_DECIDE;
843   osm->n_local           = 0;
844   osm->n_local_true      = PETSC_DECIDE;
845   osm->overlap           = 1;
846   osm->is_flg            = PETSC_FALSE;
847   osm->ksp              = 0;
848   osm->scat              = 0;
849   osm->is                = 0;
850   osm->mat               = 0;
851   osm->pmat              = 0;
852   osm->type              = PC_ASM_RESTRICT;
853   osm->same_local_solves = PETSC_TRUE;
854   osm->inplace           = PETSC_FALSE;
855   pc->data               = (void*)osm;
856 
857   pc->ops->apply             = PCApply_ASM;
858   pc->ops->applytranspose    = PCApplyTranspose_ASM;
859   pc->ops->setup             = PCSetUp_ASM;
860   pc->ops->destroy           = PCDestroy_ASM;
861   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
862   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
863   pc->ops->view              = PCView_ASM;
864   pc->ops->applyrichardson   = 0;
865 
866   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
867                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
868   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
869                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
870   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
871                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
873                     PCASMSetType_ASM);CHKERRQ(ierr);
874   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
875                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
876 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
877                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
878   PetscFunctionReturn(0);
879 }
880 EXTERN_C_END
881 
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "PCASMCreateSubdomains2D"
885 /*@
886    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
887    preconditioner for a two-dimensional problem on a regular grid.
888 
889    Not Collective
890 
891    Input Parameters:
892 +  m, n - the number of mesh points in the x and y directions
893 .  M, N - the number of subdomains in the x and y directions
894 .  dof - degrees of freedom per node
895 -  overlap - overlap in mesh lines
896 
897    Output Parameters:
898 +  Nsub - the number of subdomains created
899 -  is - the array of index sets defining the subdomains
900 
901    Note:
902    Presently PCAMSCreateSubdomains2d() is valid only for sequential
903    preconditioners.  More general related routines are
904    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
905 
906    Level: advanced
907 
908 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
909 
910 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
911           PCASMSetOverlap()
912 @*/
913 int PCASMCreateSubdomains2D(int m,int n,int M,int N,int dof,int overlap,int *Nsub,IS **is)
914 {
915   int i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
916   int nidx,*idx,loc,ii,jj,ierr,count;
917 
918   PetscFunctionBegin;
919   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
920 
921   *Nsub = N*M;
922   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
923   ystart = 0;
924   loc_outter = 0;
925   for (i=0; i<N; i++) {
926     height = n/N + ((n % N) > i); /* height of subdomain */
927     if (height < 2) SETERRQ(1,"Too many N subdomains for mesh dimension n");
928     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
929     yright = ystart + height + overlap; if (yright > n) yright = n;
930     xstart = 0;
931     for (j=0; j<M; j++) {
932       width = m/M + ((m % M) > j); /* width of subdomain */
933       if (width < 2) SETERRQ(1,"Too many M subdomains for mesh dimension m");
934       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
935       xright = xstart + width + overlap; if (xright > m) xright = m;
936       nidx   = (xright - xleft)*(yright - yleft);
937       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
938       loc    = 0;
939       for (ii=yleft; ii<yright; ii++) {
940         count = m*ii + xleft;
941         for (jj=xleft; jj<xright; jj++) {
942           idx[loc++] = count++;
943         }
944       }
945       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
946       ierr = PetscFree(idx);CHKERRQ(ierr);
947       xstart += width;
948     }
949     ystart += height;
950   }
951   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
952   PetscFunctionReturn(0);
953 }
954 
955 #undef __FUNCT__
956 #define __FUNCT__ "PCASMGetLocalSubdomains"
957 /*@C
958     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
959     only) for the additive Schwarz preconditioner.
960 
961     Collective on PC
962 
963     Input Parameter:
964 .   pc - the preconditioner context
965 
966     Output Parameters:
967 +   n - the number of subdomains for this processor (default value = 1)
968 -   is - the index sets that define the subdomains for this processor
969 
970 
971     Notes:
972     The IS numbering is in the parallel, global numbering of the vector.
973 
974     Level: advanced
975 
976 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
977 
978 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
979           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
980 @*/
981 int PCASMGetLocalSubdomains(PC pc,int *n,IS *is[])
982 {
983   PC_ASM *osm;
984 
985   PetscFunctionBegin;
986   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
987   PetscValidIntPointer(n,2);
988   if (!pc->setupcalled) {
989     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
990   }
991 
992   osm = (PC_ASM*)pc->data;
993   if (n)  *n = osm->n_local_true;
994   if (is) *is = osm->is;
995   PetscFunctionReturn(0);
996 }
997 
998 #undef __FUNCT__
999 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1000 /*@C
1001     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1002     only) for the additive Schwarz preconditioner.
1003 
1004     Collective on PC
1005 
1006     Input Parameter:
1007 .   pc - the preconditioner context
1008 
1009     Output Parameters:
1010 +   n - the number of matrices for this processor (default value = 1)
1011 -   mat - the matrices
1012 
1013 
1014     Level: advanced
1015 
1016 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1017 
1018 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1019           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1020 @*/
1021 int PCASMGetLocalSubmatrices(PC pc,int *n,Mat *mat[])
1022 {
1023   PC_ASM *osm;
1024 
1025   PetscFunctionBegin;
1026   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1027   PetscValidPointer(n,2);
1028   if (!pc->setupcalled) {
1029     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1030   }
1031 
1032   osm = (PC_ASM*)pc->data;
1033   if (n)   *n   = osm->n_local_true;
1034   if (mat) *mat = osm->pmat;
1035   PetscFunctionReturn(0);
1036 }
1037 
1038