xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 8bc8193efbc389280f83b3d41dffa9e2d23e2ace)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   n,n_local,n_local_true;
16   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP       *ksp;               /* linear solvers for each block */
19   VecScatter *scat;               /* mapping to subregion */
20   Vec        *x,*y;
21   IS         *is;                 /* index set that defines each subdomain */
22   Mat        *mat,*pmat;          /* mat is not currently used */
23   PCASMType  type;                /* use reduced interpolation, restriction or both */
24   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
25   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
26   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
27                                      PCSetUpOnBlocks() is done. Similar to inplace
28                                      factorization in the case of LU and ILU */
29 } PC_ASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCView_ASM"
33 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
34 {
35   PC_ASM         *jac = (PC_ASM*)pc->data;
36   PetscErrorCode ierr;
37   PetscMPIInt    rank;
38   PetscInt       i;
39   const char     *cstring = 0;
40   PetscTruth     iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",jac->n,jac->overlap);CHKERRQ(ierr);
49     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
50     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
51     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
52     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
53     else                                      cstring = "Unknown ASM type";
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
56     if (jac->same_local_solves) {
57       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59       if (!rank && jac->ksp) {
60         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
62         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63       }
64       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65     } else {
66       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D\n",rank,jac->n_local);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
69       for (i=0; i<jac->n_local; i++) {
70         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
71         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
72         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
73         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
74         if (i != jac->n_local-1) {
75           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
76         }
77       }
78       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
80     }
81   } else if (isstring) {
82     ierr = PetscViewerStringSPrintf(viewer," blks=%D, overlap=%D, type=%D",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
85     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
86   } else {
87     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
88   }
89   PetscFunctionReturn(0);
90 }
91 
92 #undef __FUNCT__
93 #define __FUNCT__ "PCSetUp_ASM"
94 static PetscErrorCode PCSetUp_ASM(PC pc)
95 {
96   PC_ASM         *osm  = (PC_ASM*)pc->data;
97   PetscErrorCode ierr;
98   PetscInt       i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
99   PetscInt       start,start_val,end_val,sz,bs;
100   PetscMPIInt    size;
101   MatReuse       scall = MAT_REUSE_MATRIX;
102   IS             isl;
103   KSP            ksp;
104   PC             subpc;
105   char           *prefix,*pprefix;
106   Vec            vec;
107 
108   PetscFunctionBegin;
109   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
110   if (!pc->setupcalled) {
111     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
112       /* no subdomains given, use one per processor */
113       osm->n_local_true = osm->n_local = 1;
114       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
115       osm->n = size;
116     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
117       PetscInt inwork[2],outwork[2];
118       inwork[0] = inwork[1] = osm->n_local_true;
119       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
120       osm->n_local = outwork[0];
121       osm->n       = outwork[1];
122     }
123     n_local      = osm->n_local;
124     n_local_true = osm->n_local_true;
125     if (!osm->is){ /* build the index sets */
126       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
127       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
128       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
129       sz    = end_val - start_val;
130       start = start_val;
131       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
132         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
133       }
134       for (i=0; i<n_local_true; i++){
135         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
136         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
137         start      += size;
138         osm->is[i] =  isl;
139       }
140       osm->is_flg = PETSC_TRUE;
141     }
142 
143     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
144     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
145     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
146     osm->y = osm->x + n_local;
147 
148     /*  Extend the "overlapping" regions by a number of steps  */
149     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
150     for (i=0; i<n_local_true; i++) {
151       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
152     }
153 
154     /* create the local work vectors and scatter contexts */
155     for (i=0; i<n_local_true; i++) {
156       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
157       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
158       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
159       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
160       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
161       ierr = ISDestroy(isl);CHKERRQ(ierr);
162     }
163     for (i=n_local_true; i<n_local; i++) {
164       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
165       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
166       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
167       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
168       ierr = ISDestroy(isl);CHKERRQ(ierr);
169     }
170 
171    /*
172        Create the local solvers.
173     */
174     for (i=0; i<n_local_true; i++) {
175       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
176       PetscLogObjectParent(pc,ksp);
177       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
178       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
179       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
180       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
181       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
182       osm->ksp[i] = ksp;
183     }
184     scall = MAT_INITIAL_MATRIX;
185   } else {
186     /*
187        Destroy the blocks from the previous iteration
188     */
189     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
190       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
191       scall = MAT_INITIAL_MATRIX;
192     }
193   }
194 
195   /* extract out the submatrices */
196   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
197 
198   /* Return control to the user so that the submatrices can be modified (e.g., to apply
199      different boundary conditions for the submatrices than for the global problem) */
200   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
201 
202   /* loop over subdomains putting them into local ksp */
203   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
204   for (i=0; i<n_local_true; i++) {
205     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
206     PetscLogObjectParent(pc,osm->pmat[i]);
207     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
208     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
209   }
210   ierr = VecDestroy(vec);CHKERRQ(ierr);
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
216 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
217 {
218   PC_ASM         *osm = (PC_ASM*)pc->data;
219   PetscErrorCode ierr;
220   PetscInt       i;
221 
222   PetscFunctionBegin;
223   for (i=0; i<osm->n_local_true; i++) {
224     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
225   }
226   /*
227      If inplace flag is set, then destroy the matrix after the setup
228      on blocks is done.
229   */
230   if (osm->inplace && osm->n_local_true > 0) {
231     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
232   }
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "PCApply_ASM"
238 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
239 {
240   PC_ASM         *osm = (PC_ASM*)pc->data;
241   PetscErrorCode ierr;
242   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
243   PetscScalar    zero = 0.0;
244   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
245 
246   PetscFunctionBegin;
247   /*
248        Support for limiting the restriction or interpolation to only local
249      subdomain values (leaving the other values 0).
250   */
251   if (!(osm->type & PC_ASM_RESTRICT)) {
252     forward = SCATTER_FORWARD_LOCAL;
253     /* have to zero the work RHS since scatter may leave some slots empty */
254     for (i=0; i<n_local; i++) {
255       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
256     }
257   }
258   if (!(osm->type & PC_ASM_INTERPOLATE)) {
259     reverse = SCATTER_REVERSE_LOCAL;
260   }
261 
262   for (i=0; i<n_local; i++) {
263     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
264   }
265   ierr = VecSet(&zero,y);CHKERRQ(ierr);
266   /* do the local solves */
267   for (i=0; i<n_local_true; i++) {
268     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
269     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
270     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
271   }
272   /* handle the rest of the scatters that do not have local solves */
273   for (i=n_local_true; i<n_local; i++) {
274     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
275     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
276   }
277   for (i=0; i<n_local; i++) {
278     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
279   }
280   PetscFunctionReturn(0);
281 }
282 
283 #undef __FUNCT__
284 #define __FUNCT__ "PCApplyTranspose_ASM"
285 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
286 {
287   PC_ASM         *osm = (PC_ASM*)pc->data;
288   PetscErrorCode ierr;
289   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
290   PetscScalar    zero = 0.0;
291   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
292 
293   PetscFunctionBegin;
294   /*
295        Support for limiting the restriction or interpolation to only local
296      subdomain values (leaving the other values 0).
297 
298        Note: these are reversed from the PCApply_ASM() because we are applying the
299      transpose of the three terms
300   */
301   if (!(osm->type & PC_ASM_INTERPOLATE)) {
302     forward = SCATTER_FORWARD_LOCAL;
303     /* have to zero the work RHS since scatter may leave some slots empty */
304     for (i=0; i<n_local; i++) {
305       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
306     }
307   }
308   if (!(osm->type & PC_ASM_RESTRICT)) {
309     reverse = SCATTER_REVERSE_LOCAL;
310   }
311 
312   for (i=0; i<n_local; i++) {
313     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
314   }
315   ierr = VecSet(&zero,y);CHKERRQ(ierr);
316   /* do the local solves */
317   for (i=0; i<n_local_true; i++) {
318     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
319     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
320     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
321   }
322   /* handle the rest of the scatters that do not have local solves */
323   for (i=n_local_true; i<n_local; i++) {
324     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
325     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
326   }
327   for (i=0; i<n_local; i++) {
328     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
329   }
330   PetscFunctionReturn(0);
331 }
332 
333 #undef __FUNCT__
334 #define __FUNCT__ "PCDestroy_ASM"
335 static PetscErrorCode PCDestroy_ASM(PC pc)
336 {
337   PC_ASM         *osm = (PC_ASM*)pc->data;
338   PetscErrorCode ierr;
339   PetscInt       i;
340 
341   PetscFunctionBegin;
342   for (i=0; i<osm->n_local; i++) {
343     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
344     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
345     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
346   }
347   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
348     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
349   }
350   if (osm->ksp) {
351     for (i=0; i<osm->n_local_true; i++) {
352       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
353     }
354   }
355   if (osm->is_flg) {
356     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
357     ierr = PetscFree(osm->is);CHKERRQ(ierr);
358   }
359   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
360   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
361   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
362   ierr = PetscFree(osm);CHKERRQ(ierr);
363   PetscFunctionReturn(0);
364 }
365 
366 #undef __FUNCT__
367 #define __FUNCT__ "PCSetFromOptions_ASM"
368 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
369 {
370   PC_ASM         *osm = (PC_ASM*)pc->data;
371   PetscErrorCode ierr;
372   PetscInt       blocks,ovl,indx;
373   PetscTruth     flg,set,sym;
374   const char     *type[] = {"none","restrict","interpolate","basic"};
375 
376   PetscFunctionBegin;
377   /* set the type to symmetric if matrix is symmetric */
378   if (pc->pmat && !osm->type_set) {
379     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
380     if (set && sym) {
381       osm->type = PC_ASM_BASIC;
382     }
383   }
384   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
385     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
386     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
387     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
388     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
389     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
390     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
391     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
392     if (flg) {
393       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
394     }
395   ierr = PetscOptionsTail();CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 /*------------------------------------------------------------------------------------*/
400 
401 EXTERN_C_BEGIN
402 #undef __FUNCT__
403 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
404 PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[])
405 {
406   PC_ASM *osm = (PC_ASM*)pc->data;
407 
408   PetscFunctionBegin;
409   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
410 
411   if (pc->setupcalled && n != osm->n_local_true) {
412     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
413   }
414   if (!pc->setupcalled){
415     osm->n_local_true = n;
416     osm->is           = is;
417   }
418   PetscFunctionReturn(0);
419 }
420 EXTERN_C_END
421 
422 EXTERN_C_BEGIN
423 #undef __FUNCT__
424 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
425 PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
426 {
427   PC_ASM         *osm = (PC_ASM*)pc->data;
428   PetscErrorCode ierr;
429   PetscMPIInt    rank,size;
430   PetscInt       n;
431 
432   PetscFunctionBegin;
433 
434   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
435 they cannot be set globally yet.");
436 
437   /*
438      Split the subdomains equally amoung all processors
439   */
440   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
441   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
442   n = N/size + ((N % size) > rank);
443   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
444   if (!pc->setupcalled){
445     osm->n_local_true = n;
446     osm->is           = 0;
447   }
448   PetscFunctionReturn(0);
449 }
450 EXTERN_C_END
451 
452 EXTERN_C_BEGIN
453 #undef __FUNCT__
454 #define __FUNCT__ "PCASMSetOverlap_ASM"
455 PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
456 {
457   PC_ASM *osm;
458 
459   PetscFunctionBegin;
460   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
461 
462   osm               = (PC_ASM*)pc->data;
463   osm->overlap      = ovl;
464   PetscFunctionReturn(0);
465 }
466 EXTERN_C_END
467 
468 EXTERN_C_BEGIN
469 #undef __FUNCT__
470 #define __FUNCT__ "PCASMSetType_ASM"
471 PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
472 {
473   PC_ASM *osm;
474 
475   PetscFunctionBegin;
476   osm           = (PC_ASM*)pc->data;
477   osm->type     = type;
478   osm->type_set = PETSC_TRUE;
479   PetscFunctionReturn(0);
480 }
481 EXTERN_C_END
482 
483 EXTERN_C_BEGIN
484 #undef __FUNCT__
485 #define __FUNCT__ "PCASMGetSubKSP_ASM"
486 PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
487 {
488   PC_ASM         *jac = (PC_ASM*)pc->data;
489   PetscErrorCode ierr;
490 
491   PetscFunctionBegin;
492   if (jac->n_local_true < 0) {
493     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
494   }
495 
496   if (n_local)     *n_local     = jac->n_local_true;
497   if (first_local) {
498     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPIU_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
499     *first_local -= jac->n_local_true;
500   }
501   *ksp                         = jac->ksp;
502   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
503                                       not necessarily true though!  This flag is
504                                       used only for PCView_ASM() */
505   PetscFunctionReturn(0);
506 }
507 EXTERN_C_END
508 
509 EXTERN_C_BEGIN
510 #undef __FUNCT__
511 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
512 PetscErrorCode PCASMSetUseInPlace_ASM(PC pc)
513 {
514   PC_ASM *dir;
515 
516   PetscFunctionBegin;
517   dir          = (PC_ASM*)pc->data;
518   dir->inplace = PETSC_TRUE;
519   PetscFunctionReturn(0);
520 }
521 EXTERN_C_END
522 
523 /*----------------------------------------------------------------------------*/
524 #undef __FUNCT__
525 #define __FUNCT__ "PCASMSetUseInPlace"
526 /*@
527    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
528 
529    Collective on PC
530 
531    Input Parameters:
532 .  pc - the preconditioner context
533 
534    Options Database Key:
535 .  -pc_asm_in_place - Activates in-place factorization
536 
537    Note:
538    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
539    when the original matrix is not required during the Solve process.
540    This destroys the matrix, early thus, saving on memory usage.
541 
542    Level: intermediate
543 
544 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
545 
546 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
547 @*/
548 PetscErrorCode PCASMSetUseInPlace(PC pc)
549 {
550   PetscErrorCode ierr,(*f)(PC);
551 
552   PetscFunctionBegin;
553   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
554   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
555   if (f) {
556     ierr = (*f)(pc);CHKERRQ(ierr);
557   }
558   PetscFunctionReturn(0);
559 }
560 /*----------------------------------------------------------------------------*/
561 
562 #undef __FUNCT__
563 #define __FUNCT__ "PCASMSetLocalSubdomains"
564 /*@C
565     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
566     only) for the additive Schwarz preconditioner.
567 
568     Collective on PC
569 
570     Input Parameters:
571 +   pc - the preconditioner context
572 .   n - the number of subdomains for this processor (default value = 1)
573 -   is - the index sets that define the subdomains for this processor
574          (or PETSC_NULL for PETSc to determine subdomains)
575 
576     Notes:
577     The IS numbering is in the parallel, global numbering of the vector.
578 
579     By default the ASM preconditioner uses 1 block per processor.
580 
581     These index sets cannot be destroyed until after completion of the
582     linear solves for which the ASM preconditioner is being used.
583 
584     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
585 
586     Level: advanced
587 
588 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
589 
590 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
591           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
592 @*/
593 PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[])
594 {
595   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[]);
596 
597   PetscFunctionBegin;
598   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
599   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
600   if (f) {
601     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "PCASMSetTotalSubdomains"
608 /*@C
609     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
610     additive Schwarz preconditioner.  Either all or no processors in the
611     PC communicator must call this routine, with the same index sets.
612 
613     Collective on PC
614 
615     Input Parameters:
616 +   pc - the preconditioner context
617 .   n - the number of subdomains for all processors
618 -   is - the index sets that define the subdomains for all processor
619          (or PETSC_NULL for PETSc to determine subdomains)
620 
621     Options Database Key:
622     To set the total number of subdomain blocks rather than specify the
623     index sets, use the option
624 .    -pc_asm_blocks <blks> - Sets total blocks
625 
626     Notes:
627     Currently you cannot use this to set the actual subdomains with the argument is.
628 
629     By default the ASM preconditioner uses 1 block per processor.
630 
631     These index sets cannot be destroyed until after completion of the
632     linear solves for which the ASM preconditioner is being used.
633 
634     Use PCASMSetLocalSubdomains() to set local subdomains.
635 
636     Level: advanced
637 
638 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
639 
640 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
641           PCASMCreateSubdomains2D()
642 @*/
643 PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS *is)
644 {
645   PetscErrorCode ierr,(*f)(PC,PetscInt,IS *);
646 
647   PetscFunctionBegin;
648   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
649   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
650   if (f) {
651     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
652   }
653   PetscFunctionReturn(0);
654 }
655 
656 #undef __FUNCT__
657 #define __FUNCT__ "PCASMSetOverlap"
658 /*@
659     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
660     additive Schwarz preconditioner.  Either all or no processors in the
661     PC communicator must call this routine.
662 
663     Collective on PC
664 
665     Input Parameters:
666 +   pc  - the preconditioner context
667 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
668 
669     Options Database Key:
670 .   -pc_asm_overlap <ovl> - Sets overlap
671 
672     Notes:
673     By default the ASM preconditioner uses 1 block per processor.  To use
674     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
675     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
676 
677     The overlap defaults to 1, so if one desires that no additional
678     overlap be computed beyond what may have been set with a call to
679     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
680     must be set to be 0.  In particular, if one does not explicitly set
681     the subdomains an application code, then all overlap would be computed
682     internally by PETSc, and using an overlap of 0 would result in an ASM
683     variant that is equivalent to the block Jacobi preconditioner.
684 
685     Note that one can define initial index sets with any overlap via
686     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
687     PCASMSetOverlap() merely allows PETSc to extend that overlap further
688     if desired.
689 
690     Level: intermediate
691 
692 .keywords: PC, ASM, set, overlap
693 
694 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
695           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
696 @*/
697 PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl)
698 {
699   PetscErrorCode ierr,(*f)(PC,PetscInt);
700 
701   PetscFunctionBegin;
702   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
703   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
704   if (f) {
705     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
706   }
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "PCASMSetType"
712 /*@
713     PCASMSetType - Sets the type of restriction and interpolation used
714     for local problems in the additive Schwarz method.
715 
716     Collective on PC
717 
718     Input Parameters:
719 +   pc  - the preconditioner context
720 -   type - variant of ASM, one of
721 .vb
722       PC_ASM_BASIC       - full interpolation and restriction
723       PC_ASM_RESTRICT    - full restriction, local processor interpolation
724       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
725       PC_ASM_NONE        - local processor restriction and interpolation
726 .ve
727 
728     Options Database Key:
729 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
730 
731     Level: intermediate
732 
733 .keywords: PC, ASM, set, type
734 
735 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
736           PCASMCreateSubdomains2D()
737 @*/
738 PetscErrorCode PCASMSetType(PC pc,PCASMType type)
739 {
740   PetscErrorCode ierr,(*f)(PC,PCASMType);
741 
742   PetscFunctionBegin;
743   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
744   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
745   if (f) {
746     ierr = (*f)(pc,type);CHKERRQ(ierr);
747   }
748   PetscFunctionReturn(0);
749 }
750 
751 #undef __FUNCT__
752 #define __FUNCT__ "PCASMGetSubKSP"
753 /*@C
754    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
755    this processor.
756 
757    Collective on PC iff first_local is requested
758 
759    Input Parameter:
760 .  pc - the preconditioner context
761 
762    Output Parameters:
763 +  n_local - the number of blocks on this processor or PETSC_NULL
764 .  first_local - the global number of the first block on this processor or PETSC_NULL,
765                  all processors must request or all must pass PETSC_NULL
766 -  ksp - the array of KSP contexts
767 
768    Note:
769    After PCASMGetSubKSP() the array of KSPes is not to be freed
770 
771    Currently for some matrix implementations only 1 block per processor
772    is supported.
773 
774    You must call KSPSetUp() before calling PCASMGetSubKSP().
775 
776    Level: advanced
777 
778 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
779 
780 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
781           PCASMCreateSubdomains2D(),
782 @*/
783 PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
784 {
785   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
786 
787   PetscFunctionBegin;
788   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
789   PetscValidIntPointer(n_local,2);
790   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
791   if (f) {
792     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
793   } else {
794     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
795   }
796 
797  PetscFunctionReturn(0);
798 }
799 
800 /* -------------------------------------------------------------------------------------*/
801 /*MC
802    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
803            its own KSP object.
804 
805    Options Database Keys:
806 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
807 .  -pc_asm_in_place - Activates in-place factorization
808 .  -pc_asm_blocks <blks> - Sets total blocks
809 .  -pc_asm_overlap <ovl> - Sets overlap
810 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
811 
812      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
813       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
814       -pc_asm_type basic to use the standard ASM.
815 
816    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
817      than one processor. Defaults to one block per processor.
818 
819      To set options on the solvers for each block append -sub_ to all the KSP, and PC
820         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
821 
822      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
823          and set the options directly on the resulting KSP object (you can access its PC
824          with KSPGetPC())
825 
826 
827    Level: beginner
828 
829    Concepts: additive Schwarz method
830 
831 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
832            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
833            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
834            PCASMSetUseInPlace()
835 M*/
836 
837 EXTERN_C_BEGIN
838 #undef __FUNCT__
839 #define __FUNCT__ "PCCreate_ASM"
840 PetscErrorCode PCCreate_ASM(PC pc)
841 {
842   PetscErrorCode ierr;
843   PC_ASM         *osm;
844 
845   PetscFunctionBegin;
846   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
847   PetscLogObjectMemory(pc,sizeof(PC_ASM));
848   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
849   osm->n                 = PETSC_DECIDE;
850   osm->n_local           = 0;
851   osm->n_local_true      = PETSC_DECIDE;
852   osm->overlap           = 1;
853   osm->is_flg            = PETSC_FALSE;
854   osm->ksp              = 0;
855   osm->scat              = 0;
856   osm->is                = 0;
857   osm->mat               = 0;
858   osm->pmat              = 0;
859   osm->type              = PC_ASM_RESTRICT;
860   osm->same_local_solves = PETSC_TRUE;
861   osm->inplace           = PETSC_FALSE;
862   pc->data               = (void*)osm;
863 
864   pc->ops->apply             = PCApply_ASM;
865   pc->ops->applytranspose    = PCApplyTranspose_ASM;
866   pc->ops->setup             = PCSetUp_ASM;
867   pc->ops->destroy           = PCDestroy_ASM;
868   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
869   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
870   pc->ops->view              = PCView_ASM;
871   pc->ops->applyrichardson   = 0;
872 
873   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
874                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
875   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
876                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
877   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
878                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
879   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
880                     PCASMSetType_ASM);CHKERRQ(ierr);
881   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
882                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
883 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
884                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
885   PetscFunctionReturn(0);
886 }
887 EXTERN_C_END
888 
889 
890 #undef __FUNCT__
891 #define __FUNCT__ "PCASMCreateSubdomains2D"
892 /*@
893    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
894    preconditioner for a two-dimensional problem on a regular grid.
895 
896    Not Collective
897 
898    Input Parameters:
899 +  m, n - the number of mesh points in the x and y directions
900 .  M, N - the number of subdomains in the x and y directions
901 .  dof - degrees of freedom per node
902 -  overlap - overlap in mesh lines
903 
904    Output Parameters:
905 +  Nsub - the number of subdomains created
906 -  is - the array of index sets defining the subdomains
907 
908    Note:
909    Presently PCAMSCreateSubdomains2d() is valid only for sequential
910    preconditioners.  More general related routines are
911    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
912 
913    Level: advanced
914 
915 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
916 
917 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
918           PCASMSetOverlap()
919 @*/
920 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
921 {
922   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
923   PetscErrorCode ierr;
924   PetscInt       nidx,*idx,loc,ii,jj,count;
925 
926   PetscFunctionBegin;
927   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
928 
929   *Nsub = N*M;
930   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
931   ystart = 0;
932   loc_outter = 0;
933   for (i=0; i<N; i++) {
934     height = n/N + ((n % N) > i); /* height of subdomain */
935     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
936     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
937     yright = ystart + height + overlap; if (yright > n) yright = n;
938     xstart = 0;
939     for (j=0; j<M; j++) {
940       width = m/M + ((m % M) > j); /* width of subdomain */
941       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
942       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
943       xright = xstart + width + overlap; if (xright > m) xright = m;
944       nidx   = (xright - xleft)*(yright - yleft);
945       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
946       loc    = 0;
947       for (ii=yleft; ii<yright; ii++) {
948         count = m*ii + xleft;
949         for (jj=xleft; jj<xright; jj++) {
950           idx[loc++] = count++;
951         }
952       }
953       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
954       ierr = PetscFree(idx);CHKERRQ(ierr);
955       xstart += width;
956     }
957     ystart += height;
958   }
959   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
960   PetscFunctionReturn(0);
961 }
962 
963 #undef __FUNCT__
964 #define __FUNCT__ "PCASMGetLocalSubdomains"
965 /*@C
966     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
967     only) for the additive Schwarz preconditioner.
968 
969     Collective on PC
970 
971     Input Parameter:
972 .   pc - the preconditioner context
973 
974     Output Parameters:
975 +   n - the number of subdomains for this processor (default value = 1)
976 -   is - the index sets that define the subdomains for this processor
977 
978 
979     Notes:
980     The IS numbering is in the parallel, global numbering of the vector.
981 
982     Level: advanced
983 
984 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
985 
986 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
987           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
988 @*/
989 PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[])
990 {
991   PC_ASM *osm;
992 
993   PetscFunctionBegin;
994   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
995   PetscValidIntPointer(n,2);
996   if (!pc->setupcalled) {
997     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
998   }
999 
1000   osm = (PC_ASM*)pc->data;
1001   if (n)  *n = osm->n_local_true;
1002   if (is) *is = osm->is;
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 #undef __FUNCT__
1007 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1008 /*@C
1009     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1010     only) for the additive Schwarz preconditioner.
1011 
1012     Collective on PC
1013 
1014     Input Parameter:
1015 .   pc - the preconditioner context
1016 
1017     Output Parameters:
1018 +   n - the number of matrices for this processor (default value = 1)
1019 -   mat - the matrices
1020 
1021 
1022     Level: advanced
1023 
1024 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1025 
1026 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1027           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1028 @*/
1029 PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1030 {
1031   PC_ASM *osm;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1035   PetscValidPointer(n,2);
1036   if (!pc->setupcalled) {
1037     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1038   }
1039 
1040   osm = (PC_ASM*)pc->data;
1041   if (n)   *n   = osm->n_local_true;
1042   if (mat) *mat = osm->pmat;
1043   PetscFunctionReturn(0);
1044 }
1045 
1046