xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision be1d678a52e6eff2808b2fa31ae986cdbf03c9fe)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   n,n_local,n_local_true;
16   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;               /* linear solvers for each block */
19   VecScatter *scat;               /* mapping to subregion */
20   Vec        *x,*y;
21   IS         *is;                 /* index set that defines each subdomain */
22   Mat        *mat,*pmat;          /* mat is not currently used */
23   PCASMType  type;                /* use reduced interpolation, restriction or both */
24   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
25   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
26   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
27                                      PCSetUpOnBlocks() is done. Similar to inplace
28                                      factorization in the case of LU and ILU */
29 } PC_ASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCView_ASM"
33 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
34 {
35   PC_ASM         *jac = (PC_ASM*)pc->data;
36   PetscErrorCode ierr;
37   PetscMPIInt    rank;
38   PetscInt       i;
39   const char     *cstring = 0;
40   PetscTruth     iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     if (jac->n > 0) {
49       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",jac->n,jac->overlap);CHKERRQ(ierr);
50     } else {
51       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",jac->overlap);CHKERRQ(ierr);
52     }
53     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
54     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
55     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
56     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
57     else                                      cstring = "Unknown ASM type";
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
59     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
60     if (jac->same_local_solves) {
61       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
62       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
63       if (!rank && jac->ksp) {
64         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
65         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
66         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
67       }
68       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
69     } else {
70       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D\n",rank,jac->n_local_true);CHKERRQ(ierr);
72       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
73       for (i=0; i<jac->n_local_true; i++) {
74         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
75         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
76         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
77         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
78         if (i != jac->n_local_true-1) {
79           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80         }
81       }
82       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
83       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
84     }
85   } else if (isstring) {
86     ierr = PetscViewerStringSPrintf(viewer," blks=%D, overlap=%D, type=%D",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
87     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
88       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
89     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
90   } else {
91     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 #undef __FUNCT__
97 #define __FUNCT__ "PCSetUp_ASM"
98 static PetscErrorCode PCSetUp_ASM(PC pc)
99 {
100   PC_ASM         *osm  = (PC_ASM*)pc->data;
101   PetscErrorCode ierr;
102   PetscInt       i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
103   PetscInt       start,start_val,end_val,sz,bs;
104   PetscMPIInt    size;
105   MatReuse       scall = MAT_REUSE_MATRIX;
106   IS             isl;
107   KSP            ksp;
108   PC             subpc;
109   char           *prefix,*pprefix;
110   Vec            vec;
111 
112   PetscFunctionBegin;
113   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
114   if (!pc->setupcalled) {
115     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
116       /* no subdomains given, use one per processor */
117       osm->n_local_true = osm->n_local = 1;
118       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
119       osm->n = size;
120     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
121       PetscInt inwork[2],outwork[2];
122       inwork[0] = inwork[1] = osm->n_local_true;
123       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
124       osm->n_local = outwork[0];
125       osm->n       = outwork[1];
126     }
127     n_local      = osm->n_local;
128     n_local_true = osm->n_local_true;
129     if (!osm->is){ /* build the index sets */
130       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
131       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
132       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
133       sz    = end_val - start_val;
134       start = start_val;
135       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
136         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
137       }
138       for (i=0; i<n_local_true; i++){
139         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
140         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
141         start      += size;
142         osm->is[i] =  isl;
143       }
144       osm->is_flg = PETSC_TRUE;
145     }
146 
147     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
148     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
149     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
150     osm->y = osm->x + n_local;
151 
152     /*  Extend the "overlapping" regions by a number of steps  */
153     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
154     for (i=0; i<n_local_true; i++) {
155       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
156     }
157 
158     /* create the local work vectors and scatter contexts */
159     for (i=0; i<n_local_true; i++) {
160       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
161       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
162       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
163       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
164       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
165       ierr = ISDestroy(isl);CHKERRQ(ierr);
166     }
167     for (i=n_local_true; i<n_local; i++) {
168       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
169       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
170       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
171       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
172       ierr = ISDestroy(isl);CHKERRQ(ierr);
173     }
174 
175    /*
176        Create the local solvers.
177     */
178     for (i=0; i<n_local_true; i++) {
179       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
180       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
181       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
182       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
183       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
184       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
185       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
186       osm->ksp[i] = ksp;
187     }
188     scall = MAT_INITIAL_MATRIX;
189   } else {
190     /*
191        Destroy the blocks from the previous iteration
192     */
193     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
194       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
195       scall = MAT_INITIAL_MATRIX;
196     }
197   }
198 
199   /* extract out the submatrices */
200   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
201 
202   /* Return control to the user so that the submatrices can be modified (e.g., to apply
203      different boundary conditions for the submatrices than for the global problem) */
204   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
205 
206   /* loop over subdomains putting them into local ksp */
207   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
208   for (i=0; i<n_local_true; i++) {
209     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
210     ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
211     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
212     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
213   }
214   ierr = VecDestroy(vec);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
220 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
221 {
222   PC_ASM         *osm = (PC_ASM*)pc->data;
223   PetscErrorCode ierr;
224   PetscInt       i;
225 
226   PetscFunctionBegin;
227   for (i=0; i<osm->n_local_true; i++) {
228     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
229   }
230   /*
231      If inplace flag is set, then destroy the matrix after the setup
232      on blocks is done.
233   */
234   if (osm->inplace && osm->n_local_true > 0) {
235     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
236   }
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "PCApply_ASM"
242 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
243 {
244   PC_ASM         *osm = (PC_ASM*)pc->data;
245   PetscErrorCode ierr;
246   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
247   PetscScalar    zero = 0.0;
248   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
249 
250   PetscFunctionBegin;
251   /*
252        Support for limiting the restriction or interpolation to only local
253      subdomain values (leaving the other values 0).
254   */
255   if (!(osm->type & PC_ASM_RESTRICT)) {
256     forward = SCATTER_FORWARD_LOCAL;
257     /* have to zero the work RHS since scatter may leave some slots empty */
258     for (i=0; i<n_local_true; i++) {
259       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
260     }
261   }
262   if (!(osm->type & PC_ASM_INTERPOLATE)) {
263     reverse = SCATTER_REVERSE_LOCAL;
264   }
265 
266   for (i=0; i<n_local; i++) {
267     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
268   }
269   ierr = VecSet(&zero,y);CHKERRQ(ierr);
270   /* do the local solves */
271   for (i=0; i<n_local_true; i++) {
272     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
273     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
274     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
275   }
276   /* handle the rest of the scatters that do not have local solves */
277   for (i=n_local_true; i<n_local; i++) {
278     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
279     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
280   }
281   for (i=0; i<n_local; i++) {
282     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
283   }
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "PCApplyTranspose_ASM"
289 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
290 {
291   PC_ASM         *osm = (PC_ASM*)pc->data;
292   PetscErrorCode ierr;
293   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
294   PetscScalar    zero = 0.0;
295   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
296 
297   PetscFunctionBegin;
298   /*
299        Support for limiting the restriction or interpolation to only local
300      subdomain values (leaving the other values 0).
301 
302        Note: these are reversed from the PCApply_ASM() because we are applying the
303      transpose of the three terms
304   */
305   if (!(osm->type & PC_ASM_INTERPOLATE)) {
306     forward = SCATTER_FORWARD_LOCAL;
307     /* have to zero the work RHS since scatter may leave some slots empty */
308     for (i=0; i<n_local_true; i++) {
309       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
310     }
311   }
312   if (!(osm->type & PC_ASM_RESTRICT)) {
313     reverse = SCATTER_REVERSE_LOCAL;
314   }
315 
316   for (i=0; i<n_local; i++) {
317     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
318   }
319   ierr = VecSet(&zero,y);CHKERRQ(ierr);
320   /* do the local solves */
321   for (i=0; i<n_local_true; i++) {
322     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
323     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
324     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
325   }
326   /* handle the rest of the scatters that do not have local solves */
327   for (i=n_local_true; i<n_local; i++) {
328     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
329     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
330   }
331   for (i=0; i<n_local; i++) {
332     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
333   }
334   PetscFunctionReturn(0);
335 }
336 
337 #undef __FUNCT__
338 #define __FUNCT__ "PCDestroy_ASM"
339 static PetscErrorCode PCDestroy_ASM(PC pc)
340 {
341   PC_ASM         *osm = (PC_ASM*)pc->data;
342   PetscErrorCode ierr;
343   PetscInt       i;
344 
345   PetscFunctionBegin;
346   for (i=0; i<osm->n_local; i++) {
347     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
348     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
349     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
350   }
351   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
352     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
353   }
354   if (osm->ksp) {
355     for (i=0; i<osm->n_local_true; i++) {
356       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
357     }
358   }
359   if (osm->is_flg) {
360     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
361     ierr = PetscFree(osm->is);CHKERRQ(ierr);
362   }
363   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
364   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
365   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
366   ierr = PetscFree(osm);CHKERRQ(ierr);
367   PetscFunctionReturn(0);
368 }
369 
370 #undef __FUNCT__
371 #define __FUNCT__ "PCSetFromOptions_ASM"
372 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
373 {
374   PC_ASM         *osm = (PC_ASM*)pc->data;
375   PetscErrorCode ierr;
376   PetscInt       blocks,ovl,indx;
377   PetscTruth     flg,set,sym;
378   const char     *type[] = {"none","restrict","interpolate","basic"};
379 
380   PetscFunctionBegin;
381   /* set the type to symmetric if matrix is symmetric */
382   if (pc->pmat && !osm->type_set) {
383     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
384     if (set && sym) {
385       osm->type = PC_ASM_BASIC;
386     }
387   }
388   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
389     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
390     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
391     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
392     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
393     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
394     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
395     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
396     if (flg) {
397       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
398     }
399   ierr = PetscOptionsTail();CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 /*------------------------------------------------------------------------------------*/
404 
405 EXTERN_C_BEGIN
406 #undef __FUNCT__
407 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
408 PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[])
409 {
410   PC_ASM *osm = (PC_ASM*)pc->data;
411 
412   PetscFunctionBegin;
413   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
414 
415   if (pc->setupcalled && n != osm->n_local_true) {
416     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
417   }
418   if (!pc->setupcalled){
419     osm->n_local_true = n;
420     osm->is           = is;
421   }
422   PetscFunctionReturn(0);
423 }
424 EXTERN_C_END
425 
426 EXTERN_C_BEGIN
427 #undef __FUNCT__
428 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
429 PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
430 {
431   PC_ASM         *osm = (PC_ASM*)pc->data;
432   PetscErrorCode ierr;
433   PetscMPIInt    rank,size;
434   PetscInt       n;
435 
436   PetscFunctionBegin;
437 
438   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
439 they cannot be set globally yet.");
440 
441   /*
442      Split the subdomains equally amoung all processors
443   */
444   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
445   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
446   n = N/size + ((N % size) > rank);
447   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
448   if (!pc->setupcalled){
449     osm->n_local_true = n;
450     osm->is           = 0;
451   }
452   PetscFunctionReturn(0);
453 }
454 EXTERN_C_END
455 
456 EXTERN_C_BEGIN
457 #undef __FUNCT__
458 #define __FUNCT__ "PCASMSetOverlap_ASM"
459 PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
460 {
461   PC_ASM *osm;
462 
463   PetscFunctionBegin;
464   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
465 
466   osm               = (PC_ASM*)pc->data;
467   osm->overlap      = ovl;
468   PetscFunctionReturn(0);
469 }
470 EXTERN_C_END
471 
472 EXTERN_C_BEGIN
473 #undef __FUNCT__
474 #define __FUNCT__ "PCASMSetType_ASM"
475 PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
476 {
477   PC_ASM *osm;
478 
479   PetscFunctionBegin;
480   osm           = (PC_ASM*)pc->data;
481   osm->type     = type;
482   osm->type_set = PETSC_TRUE;
483   PetscFunctionReturn(0);
484 }
485 EXTERN_C_END
486 
487 EXTERN_C_BEGIN
488 #undef __FUNCT__
489 #define __FUNCT__ "PCASMGetSubKSP_ASM"
490 PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
491 {
492   PC_ASM         *jac = (PC_ASM*)pc->data;
493   PetscErrorCode ierr;
494 
495   PetscFunctionBegin;
496   if (jac->n_local_true < 0) {
497     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
498   }
499 
500   if (n_local)     *n_local     = jac->n_local_true;
501   if (first_local) {
502     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPIU_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
503     *first_local -= jac->n_local_true;
504   }
505   *ksp                         = jac->ksp;
506   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
507                                       not necessarily true though!  This flag is
508                                       used only for PCView_ASM() */
509   PetscFunctionReturn(0);
510 }
511 EXTERN_C_END
512 
513 EXTERN_C_BEGIN
514 #undef __FUNCT__
515 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
516 PetscErrorCode PCASMSetUseInPlace_ASM(PC pc)
517 {
518   PC_ASM *dir;
519 
520   PetscFunctionBegin;
521   dir          = (PC_ASM*)pc->data;
522   dir->inplace = PETSC_TRUE;
523   PetscFunctionReturn(0);
524 }
525 EXTERN_C_END
526 
527 /*----------------------------------------------------------------------------*/
528 #undef __FUNCT__
529 #define __FUNCT__ "PCASMSetUseInPlace"
530 /*@
531    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
532 
533    Collective on PC
534 
535    Input Parameters:
536 .  pc - the preconditioner context
537 
538    Options Database Key:
539 .  -pc_asm_in_place - Activates in-place factorization
540 
541    Note:
542    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
543    when the original matrix is not required during the Solve process.
544    This destroys the matrix, early thus, saving on memory usage.
545 
546    Level: intermediate
547 
548 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
549 
550 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
551 @*/
552 PetscErrorCode PCASMSetUseInPlace(PC pc)
553 {
554   PetscErrorCode ierr,(*f)(PC);
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
558   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
559   if (f) {
560     ierr = (*f)(pc);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 /*----------------------------------------------------------------------------*/
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "PCASMSetLocalSubdomains"
568 /*@C
569     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
570     only) for the additive Schwarz preconditioner.
571 
572     Collective on PC
573 
574     Input Parameters:
575 +   pc - the preconditioner context
576 .   n - the number of subdomains for this processor (default value = 1)
577 -   is - the index sets that define the subdomains for this processor
578          (or PETSC_NULL for PETSc to determine subdomains)
579 
580     Notes:
581     The IS numbering is in the parallel, global numbering of the vector.
582 
583     By default the ASM preconditioner uses 1 block per processor.
584 
585     These index sets cannot be destroyed until after completion of the
586     linear solves for which the ASM preconditioner is being used.
587 
588     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
589 
590     Level: advanced
591 
592 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
593 
594 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
595           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
596 @*/
597 PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[])
598 {
599   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[]);
600 
601   PetscFunctionBegin;
602   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
603   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
604   if (f) {
605     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
606   }
607   PetscFunctionReturn(0);
608 }
609 
610 #undef __FUNCT__
611 #define __FUNCT__ "PCASMSetTotalSubdomains"
612 /*@C
613     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
614     additive Schwarz preconditioner.  Either all or no processors in the
615     PC communicator must call this routine, with the same index sets.
616 
617     Collective on PC
618 
619     Input Parameters:
620 +   pc - the preconditioner context
621 .   n - the number of subdomains for all processors
622 -   is - the index sets that define the subdomains for all processor
623          (or PETSC_NULL for PETSc to determine subdomains)
624 
625     Options Database Key:
626     To set the total number of subdomain blocks rather than specify the
627     index sets, use the option
628 .    -pc_asm_blocks <blks> - Sets total blocks
629 
630     Notes:
631     Currently you cannot use this to set the actual subdomains with the argument is.
632 
633     By default the ASM preconditioner uses 1 block per processor.
634 
635     These index sets cannot be destroyed until after completion of the
636     linear solves for which the ASM preconditioner is being used.
637 
638     Use PCASMSetLocalSubdomains() to set local subdomains.
639 
640     Level: advanced
641 
642 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
643 
644 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
645           PCASMCreateSubdomains2D()
646 @*/
647 PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS *is)
648 {
649   PetscErrorCode ierr,(*f)(PC,PetscInt,IS *);
650 
651   PetscFunctionBegin;
652   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
653   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
654   if (f) {
655     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
656   }
657   PetscFunctionReturn(0);
658 }
659 
660 #undef __FUNCT__
661 #define __FUNCT__ "PCASMSetOverlap"
662 /*@
663     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
664     additive Schwarz preconditioner.  Either all or no processors in the
665     PC communicator must call this routine.
666 
667     Collective on PC
668 
669     Input Parameters:
670 +   pc  - the preconditioner context
671 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
672 
673     Options Database Key:
674 .   -pc_asm_overlap <ovl> - Sets overlap
675 
676     Notes:
677     By default the ASM preconditioner uses 1 block per processor.  To use
678     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
679     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
680 
681     The overlap defaults to 1, so if one desires that no additional
682     overlap be computed beyond what may have been set with a call to
683     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
684     must be set to be 0.  In particular, if one does not explicitly set
685     the subdomains an application code, then all overlap would be computed
686     internally by PETSc, and using an overlap of 0 would result in an ASM
687     variant that is equivalent to the block Jacobi preconditioner.
688 
689     Note that one can define initial index sets with any overlap via
690     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
691     PCASMSetOverlap() merely allows PETSc to extend that overlap further
692     if desired.
693 
694     Level: intermediate
695 
696 .keywords: PC, ASM, set, overlap
697 
698 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
699           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
700 @*/
701 PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl)
702 {
703   PetscErrorCode ierr,(*f)(PC,PetscInt);
704 
705   PetscFunctionBegin;
706   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
707   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
708   if (f) {
709     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
710   }
711   PetscFunctionReturn(0);
712 }
713 
714 #undef __FUNCT__
715 #define __FUNCT__ "PCASMSetType"
716 /*@
717     PCASMSetType - Sets the type of restriction and interpolation used
718     for local problems in the additive Schwarz method.
719 
720     Collective on PC
721 
722     Input Parameters:
723 +   pc  - the preconditioner context
724 -   type - variant of ASM, one of
725 .vb
726       PC_ASM_BASIC       - full interpolation and restriction
727       PC_ASM_RESTRICT    - full restriction, local processor interpolation
728       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
729       PC_ASM_NONE        - local processor restriction and interpolation
730 .ve
731 
732     Options Database Key:
733 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
734 
735     Level: intermediate
736 
737 .keywords: PC, ASM, set, type
738 
739 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
740           PCASMCreateSubdomains2D()
741 @*/
742 PetscErrorCode PCASMSetType(PC pc,PCASMType type)
743 {
744   PetscErrorCode ierr,(*f)(PC,PCASMType);
745 
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
748   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
749   if (f) {
750     ierr = (*f)(pc,type);CHKERRQ(ierr);
751   }
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "PCASMGetSubKSP"
757 /*@C
758    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
759    this processor.
760 
761    Collective on PC iff first_local is requested
762 
763    Input Parameter:
764 .  pc - the preconditioner context
765 
766    Output Parameters:
767 +  n_local - the number of blocks on this processor or PETSC_NULL
768 .  first_local - the global number of the first block on this processor or PETSC_NULL,
769                  all processors must request or all must pass PETSC_NULL
770 -  ksp - the array of KSP contexts
771 
772    Note:
773    After PCASMGetSubKSP() the array of KSPes is not to be freed
774 
775    Currently for some matrix implementations only 1 block per processor
776    is supported.
777 
778    You must call KSPSetUp() before calling PCASMGetSubKSP().
779 
780    Level: advanced
781 
782 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
783 
784 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
785           PCASMCreateSubdomains2D(),
786 @*/
787 PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
788 {
789   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
790 
791   PetscFunctionBegin;
792   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
793   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
794   if (f) {
795     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
796   } else {
797     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
798   }
799 
800  PetscFunctionReturn(0);
801 }
802 
803 /* -------------------------------------------------------------------------------------*/
804 /*MC
805    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
806            its own KSP object.
807 
808    Options Database Keys:
809 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
810 .  -pc_asm_in_place - Activates in-place factorization
811 .  -pc_asm_blocks <blks> - Sets total blocks
812 .  -pc_asm_overlap <ovl> - Sets overlap
813 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
814 
815      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
816       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
817       -pc_asm_type basic to use the standard ASM.
818 
819    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
820      than one processor. Defaults to one block per processor.
821 
822      To set options on the solvers for each block append -sub_ to all the KSP, and PC
823         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
824 
825      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
826          and set the options directly on the resulting KSP object (you can access its PC
827          with KSPGetPC())
828 
829 
830    Level: beginner
831 
832    Concepts: additive Schwarz method
833 
834 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
835            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
836            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
837            PCASMSetUseInPlace()
838 M*/
839 
840 EXTERN_C_BEGIN
841 #undef __FUNCT__
842 #define __FUNCT__ "PCCreate_ASM"
843 PetscErrorCode PCCreate_ASM(PC pc)
844 {
845   PetscErrorCode ierr;
846   PC_ASM         *osm;
847 
848   PetscFunctionBegin;
849   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
850   ierr = PetscLogObjectMemory(pc,sizeof(PC_ASM));CHKERRQ(ierr);
851   osm->n                 = PETSC_DECIDE;
852   osm->n_local           = 0;
853   osm->n_local_true      = PETSC_DECIDE;
854   osm->overlap           = 1;
855   osm->is_flg            = PETSC_FALSE;
856   osm->ksp              = 0;
857   osm->scat              = 0;
858   osm->is                = 0;
859   osm->mat               = 0;
860   osm->pmat              = 0;
861   osm->type              = PC_ASM_RESTRICT;
862   osm->same_local_solves = PETSC_TRUE;
863   osm->inplace           = PETSC_FALSE;
864   pc->data               = (void*)osm;
865 
866   pc->ops->apply             = PCApply_ASM;
867   pc->ops->applytranspose    = PCApplyTranspose_ASM;
868   pc->ops->setup             = PCSetUp_ASM;
869   pc->ops->destroy           = PCDestroy_ASM;
870   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
871   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
872   pc->ops->view              = PCView_ASM;
873   pc->ops->applyrichardson   = 0;
874 
875   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
876                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
877   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
878                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
879   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
880                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
881   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
882                     PCASMSetType_ASM);CHKERRQ(ierr);
883   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
884                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
885 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
886                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
887   PetscFunctionReturn(0);
888 }
889 EXTERN_C_END
890 
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "PCASMCreateSubdomains2D"
894 /*@
895    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
896    preconditioner for a two-dimensional problem on a regular grid.
897 
898    Not Collective
899 
900    Input Parameters:
901 +  m, n - the number of mesh points in the x and y directions
902 .  M, N - the number of subdomains in the x and y directions
903 .  dof - degrees of freedom per node
904 -  overlap - overlap in mesh lines
905 
906    Output Parameters:
907 +  Nsub - the number of subdomains created
908 -  is - the array of index sets defining the subdomains
909 
910    Note:
911    Presently PCAMSCreateSubdomains2d() is valid only for sequential
912    preconditioners.  More general related routines are
913    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
914 
915    Level: advanced
916 
917 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
918 
919 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
920           PCASMSetOverlap()
921 @*/
922 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
923 {
924   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
925   PetscErrorCode ierr;
926   PetscInt       nidx,*idx,loc,ii,jj,count;
927 
928   PetscFunctionBegin;
929   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
930 
931   *Nsub = N*M;
932   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
933   ystart = 0;
934   loc_outter = 0;
935   for (i=0; i<N; i++) {
936     height = n/N + ((n % N) > i); /* height of subdomain */
937     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
938     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
939     yright = ystart + height + overlap; if (yright > n) yright = n;
940     xstart = 0;
941     for (j=0; j<M; j++) {
942       width = m/M + ((m % M) > j); /* width of subdomain */
943       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
944       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
945       xright = xstart + width + overlap; if (xright > m) xright = m;
946       nidx   = (xright - xleft)*(yright - yleft);
947       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
948       loc    = 0;
949       for (ii=yleft; ii<yright; ii++) {
950         count = m*ii + xleft;
951         for (jj=xleft; jj<xright; jj++) {
952           idx[loc++] = count++;
953         }
954       }
955       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
956       ierr = PetscFree(idx);CHKERRQ(ierr);
957       xstart += width;
958     }
959     ystart += height;
960   }
961   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNCT__
966 #define __FUNCT__ "PCASMGetLocalSubdomains"
967 /*@C
968     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
969     only) for the additive Schwarz preconditioner.
970 
971     Collective on PC
972 
973     Input Parameter:
974 .   pc - the preconditioner context
975 
976     Output Parameters:
977 +   n - the number of subdomains for this processor (default value = 1)
978 -   is - the index sets that define the subdomains for this processor
979 
980 
981     Notes:
982     The IS numbering is in the parallel, global numbering of the vector.
983 
984     Level: advanced
985 
986 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
987 
988 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
989           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
990 @*/
991 PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[])
992 {
993   PC_ASM *osm;
994 
995   PetscFunctionBegin;
996   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
997   PetscValidIntPointer(n,2);
998   if (!pc->setupcalled) {
999     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1000   }
1001 
1002   osm = (PC_ASM*)pc->data;
1003   if (n)  *n = osm->n_local_true;
1004   if (is) *is = osm->is;
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1010 /*@C
1011     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1012     only) for the additive Schwarz preconditioner.
1013 
1014     Collective on PC
1015 
1016     Input Parameter:
1017 .   pc - the preconditioner context
1018 
1019     Output Parameters:
1020 +   n - the number of matrices for this processor (default value = 1)
1021 -   mat - the matrices
1022 
1023 
1024     Level: advanced
1025 
1026 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1027 
1028 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1029           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1030 @*/
1031 PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1032 {
1033   PC_ASM *osm;
1034 
1035   PetscFunctionBegin;
1036   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1037   PetscValidPointer(n,2);
1038   if (!pc->setupcalled) {
1039     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1040   }
1041 
1042   osm = (PC_ASM*)pc->data;
1043   if (n)   *n   = osm->n_local_true;
1044   if (mat) *mat = osm->pmat;
1045   PetscFunctionReturn(0);
1046 }
1047 
1048