xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 18be62a5feccf172f7bc80c15c4be8f6d6443e8b)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n,n_local,n_local_true;
18   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
19   PetscInt   overlap;             /* overlap requested by user */
20   KSP        *ksp;               /* linear solvers for each block */
21   VecScatter *scat;               /* mapping to subregion */
22   Vec        *x,*y;
23   IS         *is;                 /* index set that defines each subdomain */
24   Mat        *mat,*pmat;          /* mat is not currently used */
25   PCASMType  type;                /* use reduced interpolation, restriction or both */
26   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
27   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
28   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
29                                      PCSetUpOnBlocks() is done. Similar to inplace
30                                      factorization in the case of LU and ILU */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *jac = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (jac->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",jac->n,jac->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",jac->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[jac->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
56     if (jac->same_local_solves) {
57       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59       if (!rank && jac->ksp) {
60         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
62         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63       }
64       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65     } else {
66       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D\n",rank,jac->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
69       for (i=0; i<jac->n_local; i++) {
70         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
71         if (i < jac->n_local_true) {
72           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
73           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
74           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
75         }
76         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
77       }
78       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
80     }
81   } else if (isstring) {
82     ierr = PetscViewerStringSPrintf(viewer," blks=%D, overlap=%D, type=%D",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
85     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
86   } else {
87     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
88   }
89   PetscFunctionReturn(0);
90 }
91 
92 #undef __FUNCT__
93 #define __FUNCT__ "PCSetUp_ASM"
94 static PetscErrorCode PCSetUp_ASM(PC pc)
95 {
96   PC_ASM         *osm  = (PC_ASM*)pc->data;
97   PetscErrorCode ierr;
98   PetscInt       i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
99   PetscInt       start,start_val,end_val,sz,bs;
100   PetscMPIInt    size;
101   MatReuse       scall = MAT_REUSE_MATRIX;
102   IS             isl;
103   KSP            ksp;
104   PC             subpc;
105   const char     *prefix,*pprefix;
106   Vec            vec;
107 
108   PetscFunctionBegin;
109   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
110   if (!pc->setupcalled) {
111     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
112       /* no subdomains given, use one per processor */
113       osm->n_local_true = osm->n_local = 1;
114       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
115       osm->n = size;
116     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
117       PetscInt inwork[2],outwork[2];
118       inwork[0] = inwork[1] = osm->n_local_true;
119       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
120       osm->n_local = outwork[0];
121       osm->n       = outwork[1];
122     }
123     n_local      = osm->n_local;
124     n_local_true = osm->n_local_true;
125     if (!osm->is){ /* build the index sets */
126       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
127       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
128       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
129       sz    = end_val - start_val;
130       start = start_val;
131       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
132         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
133       }
134       for (i=0; i<n_local_true; i++){
135         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
136         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
137         start      += size;
138         osm->is[i] =  isl;
139       }
140       osm->is_flg = PETSC_TRUE;
141     }
142 
143     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
144     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
145     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
146     osm->y = osm->x + n_local;
147 
148     /*  Extend the "overlapping" regions by a number of steps  */
149     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
150     for (i=0; i<n_local_true; i++) {
151       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
152     }
153 
154     /* create the local work vectors and scatter contexts */
155     for (i=0; i<n_local_true; i++) {
156       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
157       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
158       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
159       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
160       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
161       ierr = ISDestroy(isl);CHKERRQ(ierr);
162     }
163     for (i=n_local_true; i<n_local; i++) {
164       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
165       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
166       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
167       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
168       ierr = ISDestroy(isl);CHKERRQ(ierr);
169     }
170 
171    /*
172        Create the local solvers.
173     */
174     for (i=0; i<n_local_true; i++) {
175       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
176       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
177       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
178       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
179       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
180       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
181       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
182       osm->ksp[i] = ksp;
183     }
184     scall = MAT_INITIAL_MATRIX;
185   } else {
186     /*
187        Destroy the blocks from the previous iteration
188     */
189     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
190       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
191       scall = MAT_INITIAL_MATRIX;
192     }
193   }
194 
195   /* extract out the submatrices */
196   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
197 
198   /* Return control to the user so that the submatrices can be modified (e.g., to apply
199      different boundary conditions for the submatrices than for the global problem) */
200   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
201 
202   /* loop over subdomains putting them into local ksp */
203   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
204   for (i=0; i<n_local_true; i++) {
205     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
206     ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
207     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
208     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
209   }
210   ierr = VecDestroy(vec);CHKERRQ(ierr);
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
216 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
217 {
218   PC_ASM         *osm = (PC_ASM*)pc->data;
219   PetscErrorCode ierr;
220   PetscInt       i;
221 
222   PetscFunctionBegin;
223   for (i=0; i<osm->n_local_true; i++) {
224     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
225   }
226   /*
227      If inplace flag is set, then destroy the matrix after the setup
228      on blocks is done.
229   */
230   if (osm->inplace && osm->n_local_true > 0) {
231     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
232   }
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "PCApply_ASM"
238 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
239 {
240   PC_ASM         *osm = (PC_ASM*)pc->data;
241   PetscErrorCode ierr;
242   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
243   PetscScalar    zero = 0.0;
244   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
245 
246   PetscFunctionBegin;
247   /*
248        Support for limiting the restriction or interpolation to only local
249      subdomain values (leaving the other values 0).
250   */
251   if (!(osm->type & PC_ASM_RESTRICT)) {
252     forward = SCATTER_FORWARD_LOCAL;
253     /* have to zero the work RHS since scatter may leave some slots empty */
254     for (i=0; i<n_local_true; i++) {
255       ierr = VecSet(osm->x[i],zero);CHKERRQ(ierr);
256     }
257   }
258   if (!(osm->type & PC_ASM_INTERPOLATE)) {
259     reverse = SCATTER_REVERSE_LOCAL;
260   }
261 
262   for (i=0; i<n_local; i++) {
263     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
264   }
265   ierr = VecSet(y,zero);CHKERRQ(ierr);
266   /* do the local solves */
267   for (i=0; i<n_local_true; i++) {
268     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
269     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
270     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
271   }
272   /* handle the rest of the scatters that do not have local solves */
273   for (i=n_local_true; i<n_local; i++) {
274     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
275     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
276   }
277   for (i=0; i<n_local; i++) {
278     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
279   }
280   PetscFunctionReturn(0);
281 }
282 
283 #undef __FUNCT__
284 #define __FUNCT__ "PCApplyTranspose_ASM"
285 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
286 {
287   PC_ASM         *osm = (PC_ASM*)pc->data;
288   PetscErrorCode ierr;
289   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
290   PetscScalar    zero = 0.0;
291   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
292 
293   PetscFunctionBegin;
294   /*
295        Support for limiting the restriction or interpolation to only local
296      subdomain values (leaving the other values 0).
297 
298        Note: these are reversed from the PCApply_ASM() because we are applying the
299      transpose of the three terms
300   */
301   if (!(osm->type & PC_ASM_INTERPOLATE)) {
302     forward = SCATTER_FORWARD_LOCAL;
303     /* have to zero the work RHS since scatter may leave some slots empty */
304     for (i=0; i<n_local_true; i++) {
305       ierr = VecSet(osm->x[i],zero);CHKERRQ(ierr);
306     }
307   }
308   if (!(osm->type & PC_ASM_RESTRICT)) {
309     reverse = SCATTER_REVERSE_LOCAL;
310   }
311 
312   for (i=0; i<n_local; i++) {
313     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
314   }
315   ierr = VecSet(y,zero);CHKERRQ(ierr);
316   /* do the local solves */
317   for (i=0; i<n_local_true; i++) {
318     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
319     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
320     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
321   }
322   /* handle the rest of the scatters that do not have local solves */
323   for (i=n_local_true; i<n_local; i++) {
324     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
325     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
326   }
327   for (i=0; i<n_local; i++) {
328     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
329   }
330   PetscFunctionReturn(0);
331 }
332 
333 #undef __FUNCT__
334 #define __FUNCT__ "PCDestroy_ASM"
335 static PetscErrorCode PCDestroy_ASM(PC pc)
336 {
337   PC_ASM         *osm = (PC_ASM*)pc->data;
338   PetscErrorCode ierr;
339   PetscInt       i;
340 
341   PetscFunctionBegin;
342   for (i=0; i<osm->n_local; i++) {
343     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
344     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
345     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
346   }
347   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
348     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
349   }
350   if (osm->ksp) {
351     for (i=0; i<osm->n_local_true; i++) {
352       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
353     }
354   }
355   if (osm->is_flg) {
356     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
357     ierr = PetscFree(osm->is);CHKERRQ(ierr);
358   }
359   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
360   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
361   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
362   ierr = PetscFree(osm);CHKERRQ(ierr);
363   PetscFunctionReturn(0);
364 }
365 
366 #undef __FUNCT__
367 #define __FUNCT__ "PCSetFromOptions_ASM"
368 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
369 {
370   PC_ASM         *osm = (PC_ASM*)pc->data;
371   PetscErrorCode ierr;
372   PetscInt       blocks,ovl;
373   PetscTruth     flg,set,sym;
374 
375   PetscFunctionBegin;
376   /* set the type to symmetric if matrix is symmetric */
377   if (pc->pmat && !osm->type_set) {
378     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
379     if (set && sym) {
380       osm->type = PC_ASM_BASIC;
381     }
382   }
383   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
384     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
385     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
386     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
387     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
388     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
389     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
390     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&osm->type,&flg);CHKERRQ(ierr);
391   ierr = PetscOptionsTail();CHKERRQ(ierr);
392   PetscFunctionReturn(0);
393 }
394 
395 /*------------------------------------------------------------------------------------*/
396 
397 EXTERN_C_BEGIN
398 #undef __FUNCT__
399 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
400 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[])
401 {
402   PC_ASM *osm = (PC_ASM*)pc->data;
403 
404   PetscFunctionBegin;
405   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
406 
407   if (pc->setupcalled && n != osm->n_local_true) {
408     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
409   }
410   if (!pc->setupcalled){
411     osm->n_local_true = n;
412     osm->is           = is;
413   }
414   PetscFunctionReturn(0);
415 }
416 EXTERN_C_END
417 
418 EXTERN_C_BEGIN
419 #undef __FUNCT__
420 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
421 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
422 {
423   PC_ASM         *osm = (PC_ASM*)pc->data;
424   PetscErrorCode ierr;
425   PetscMPIInt    rank,size;
426   PetscInt       n;
427 
428   PetscFunctionBegin;
429 
430   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
431 they cannot be set globally yet.");
432 
433   /*
434      Split the subdomains equally amoung all processors
435   */
436   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
437   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
438   n = N/size + ((N % size) > rank);
439   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
440   if (!pc->setupcalled){
441     osm->n_local_true = n;
442     osm->is           = 0;
443   }
444   PetscFunctionReturn(0);
445 }
446 EXTERN_C_END
447 
448 EXTERN_C_BEGIN
449 #undef __FUNCT__
450 #define __FUNCT__ "PCASMSetOverlap_ASM"
451 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
452 {
453   PC_ASM *osm;
454 
455   PetscFunctionBegin;
456   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
457 
458   osm               = (PC_ASM*)pc->data;
459   osm->overlap      = ovl;
460   PetscFunctionReturn(0);
461 }
462 EXTERN_C_END
463 
464 EXTERN_C_BEGIN
465 #undef __FUNCT__
466 #define __FUNCT__ "PCASMSetType_ASM"
467 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
468 {
469   PC_ASM *osm;
470 
471   PetscFunctionBegin;
472   osm           = (PC_ASM*)pc->data;
473   osm->type     = type;
474   osm->type_set = PETSC_TRUE;
475   PetscFunctionReturn(0);
476 }
477 EXTERN_C_END
478 
479 EXTERN_C_BEGIN
480 #undef __FUNCT__
481 #define __FUNCT__ "PCASMGetSubKSP_ASM"
482 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
483 {
484   PC_ASM         *jac = (PC_ASM*)pc->data;
485   PetscErrorCode ierr;
486 
487   PetscFunctionBegin;
488   if (jac->n_local_true < 0) {
489     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
490   }
491 
492   if (n_local)     *n_local     = jac->n_local_true;
493   if (first_local) {
494     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPIU_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
495     *first_local -= jac->n_local_true;
496   }
497   *ksp                         = jac->ksp;
498   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
499                                       not necessarily true though!  This flag is
500                                       used only for PCView_ASM() */
501   PetscFunctionReturn(0);
502 }
503 EXTERN_C_END
504 
505 EXTERN_C_BEGIN
506 #undef __FUNCT__
507 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
508 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace_ASM(PC pc)
509 {
510   PC_ASM *dir;
511 
512   PetscFunctionBegin;
513   dir          = (PC_ASM*)pc->data;
514   dir->inplace = PETSC_TRUE;
515   PetscFunctionReturn(0);
516 }
517 EXTERN_C_END
518 
519 /*----------------------------------------------------------------------------*/
520 #undef __FUNCT__
521 #define __FUNCT__ "PCASMSetUseInPlace"
522 /*@
523    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
524 
525    Collective on PC
526 
527    Input Parameters:
528 .  pc - the preconditioner context
529 
530    Options Database Key:
531 .  -pc_asm_in_place - Activates in-place factorization
532 
533    Note:
534    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
535    when the original matrix is not required during the Solve process.
536    This destroys the matrix, early thus, saving on memory usage.
537 
538    Level: intermediate
539 
540 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
541 
542 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
543 @*/
544 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace(PC pc)
545 {
546   PetscErrorCode ierr,(*f)(PC);
547 
548   PetscFunctionBegin;
549   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
550   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
551   if (f) {
552     ierr = (*f)(pc);CHKERRQ(ierr);
553   }
554   PetscFunctionReturn(0);
555 }
556 /*----------------------------------------------------------------------------*/
557 
558 #undef __FUNCT__
559 #define __FUNCT__ "PCASMSetLocalSubdomains"
560 /*@C
561     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
562     only) for the additive Schwarz preconditioner.
563 
564     Collective on PC
565 
566     Input Parameters:
567 +   pc - the preconditioner context
568 .   n - the number of subdomains for this processor (default value = 1)
569 -   is - the index sets that define the subdomains for this processor
570          (or PETSC_NULL for PETSc to determine subdomains)
571 
572     Notes:
573     The IS numbering is in the parallel, global numbering of the vector.
574 
575     By default the ASM preconditioner uses 1 block per processor.
576 
577     These index sets cannot be destroyed until after completion of the
578     linear solves for which the ASM preconditioner is being used.
579 
580     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
581 
582     Level: advanced
583 
584 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
585 
586 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
587           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
588 @*/
589 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[])
590 {
591   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[]);
592 
593   PetscFunctionBegin;
594   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
595   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
596   if (f) {
597     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
598   }
599   PetscFunctionReturn(0);
600 }
601 
602 #undef __FUNCT__
603 #define __FUNCT__ "PCASMSetTotalSubdomains"
604 /*@C
605     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
606     additive Schwarz preconditioner.  Either all or no processors in the
607     PC communicator must call this routine, with the same index sets.
608 
609     Collective on PC
610 
611     Input Parameters:
612 +   pc - the preconditioner context
613 .   n - the number of subdomains for all processors
614 -   is - the index sets that define the subdomains for all processor
615          (or PETSC_NULL for PETSc to determine subdomains)
616 
617     Options Database Key:
618     To set the total number of subdomain blocks rather than specify the
619     index sets, use the option
620 .    -pc_asm_blocks <blks> - Sets total blocks
621 
622     Notes:
623     Currently you cannot use this to set the actual subdomains with the argument is.
624 
625     By default the ASM preconditioner uses 1 block per processor.
626 
627     These index sets cannot be destroyed until after completion of the
628     linear solves for which the ASM preconditioner is being used.
629 
630     Use PCASMSetLocalSubdomains() to set local subdomains.
631 
632     Level: advanced
633 
634 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
635 
636 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
637           PCASMCreateSubdomains2D()
638 @*/
639 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS *is)
640 {
641   PetscErrorCode ierr,(*f)(PC,PetscInt,IS *);
642 
643   PetscFunctionBegin;
644   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
645   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
646   if (f) {
647     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
648   }
649   PetscFunctionReturn(0);
650 }
651 
652 #undef __FUNCT__
653 #define __FUNCT__ "PCASMSetOverlap"
654 /*@
655     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
656     additive Schwarz preconditioner.  Either all or no processors in the
657     PC communicator must call this routine.
658 
659     Collective on PC
660 
661     Input Parameters:
662 +   pc  - the preconditioner context
663 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
664 
665     Options Database Key:
666 .   -pc_asm_overlap <ovl> - Sets overlap
667 
668     Notes:
669     By default the ASM preconditioner uses 1 block per processor.  To use
670     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
671     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
672 
673     The overlap defaults to 1, so if one desires that no additional
674     overlap be computed beyond what may have been set with a call to
675     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
676     must be set to be 0.  In particular, if one does not explicitly set
677     the subdomains an application code, then all overlap would be computed
678     internally by PETSc, and using an overlap of 0 would result in an ASM
679     variant that is equivalent to the block Jacobi preconditioner.
680 
681     Note that one can define initial index sets with any overlap via
682     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
683     PCASMSetOverlap() merely allows PETSc to extend that overlap further
684     if desired.
685 
686     Level: intermediate
687 
688 .keywords: PC, ASM, set, overlap
689 
690 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
691           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
692 @*/
693 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
694 {
695   PetscErrorCode ierr,(*f)(PC,PetscInt);
696 
697   PetscFunctionBegin;
698   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
699   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
700   if (f) {
701     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
702   }
703   PetscFunctionReturn(0);
704 }
705 
706 #undef __FUNCT__
707 #define __FUNCT__ "PCASMSetType"
708 /*@
709     PCASMSetType - Sets the type of restriction and interpolation used
710     for local problems in the additive Schwarz method.
711 
712     Collective on PC
713 
714     Input Parameters:
715 +   pc  - the preconditioner context
716 -   type - variant of ASM, one of
717 .vb
718       PC_ASM_BASIC       - full interpolation and restriction
719       PC_ASM_RESTRICT    - full restriction, local processor interpolation
720       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
721       PC_ASM_NONE        - local processor restriction and interpolation
722 .ve
723 
724     Options Database Key:
725 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
726 
727     Level: intermediate
728 
729 .keywords: PC, ASM, set, type
730 
731 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
732           PCASMCreateSubdomains2D()
733 @*/
734 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
735 {
736   PetscErrorCode ierr,(*f)(PC,PCASMType);
737 
738   PetscFunctionBegin;
739   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
740   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
741   if (f) {
742     ierr = (*f)(pc,type);CHKERRQ(ierr);
743   }
744   PetscFunctionReturn(0);
745 }
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "PCASMGetSubKSP"
749 /*@C
750    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
751    this processor.
752 
753    Collective on PC iff first_local is requested
754 
755    Input Parameter:
756 .  pc - the preconditioner context
757 
758    Output Parameters:
759 +  n_local - the number of blocks on this processor or PETSC_NULL
760 .  first_local - the global number of the first block on this processor or PETSC_NULL,
761                  all processors must request or all must pass PETSC_NULL
762 -  ksp - the array of KSP contexts
763 
764    Note:
765    After PCASMGetSubKSP() the array of KSPes is not to be freed
766 
767    Currently for some matrix implementations only 1 block per processor
768    is supported.
769 
770    You must call KSPSetUp() before calling PCASMGetSubKSP().
771 
772    Level: advanced
773 
774 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
775 
776 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
777           PCASMCreateSubdomains2D(),
778 @*/
779 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
780 {
781   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
785   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
786   if (f) {
787     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
788   } else {
789     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
790   }
791 
792  PetscFunctionReturn(0);
793 }
794 
795 /* -------------------------------------------------------------------------------------*/
796 /*MC
797    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
798            its own KSP object.
799 
800    Options Database Keys:
801 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
802 .  -pc_asm_in_place - Activates in-place factorization
803 .  -pc_asm_blocks <blks> - Sets total blocks
804 .  -pc_asm_overlap <ovl> - Sets overlap
805 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
806 
807      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
808       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
809       -pc_asm_type basic to use the standard ASM.
810 
811    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
812      than one processor. Defaults to one block per processor.
813 
814      To set options on the solvers for each block append -sub_ to all the KSP, and PC
815         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
816 
817      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
818          and set the options directly on the resulting KSP object (you can access its PC
819          with KSPGetPC())
820 
821 
822    Level: beginner
823 
824    Concepts: additive Schwarz method
825 
826 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
827            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
828            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
829            PCASMSetUseInPlace()
830 M*/
831 
832 EXTERN_C_BEGIN
833 #undef __FUNCT__
834 #define __FUNCT__ "PCCreate_ASM"
835 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
836 {
837   PetscErrorCode ierr;
838   PC_ASM         *osm;
839 
840   PetscFunctionBegin;
841   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
842   ierr = PetscLogObjectMemory(pc,sizeof(PC_ASM));CHKERRQ(ierr);
843   osm->n                 = PETSC_DECIDE;
844   osm->n_local           = 0;
845   osm->n_local_true      = PETSC_DECIDE;
846   osm->overlap           = 1;
847   osm->is_flg            = PETSC_FALSE;
848   osm->ksp              = 0;
849   osm->scat              = 0;
850   osm->is                = 0;
851   osm->mat               = 0;
852   osm->pmat              = 0;
853   osm->type              = PC_ASM_RESTRICT;
854   osm->same_local_solves = PETSC_TRUE;
855   osm->inplace           = PETSC_FALSE;
856   pc->data               = (void*)osm;
857 
858   pc->ops->apply             = PCApply_ASM;
859   pc->ops->applytranspose    = PCApplyTranspose_ASM;
860   pc->ops->setup             = PCSetUp_ASM;
861   pc->ops->destroy           = PCDestroy_ASM;
862   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
863   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
864   pc->ops->view              = PCView_ASM;
865   pc->ops->applyrichardson   = 0;
866 
867   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
868                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
869   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
870                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
871   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
872                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
873   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
874                     PCASMSetType_ASM);CHKERRQ(ierr);
875   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
876                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
877 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
878                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
879   PetscFunctionReturn(0);
880 }
881 EXTERN_C_END
882 
883 
884 #undef __FUNCT__
885 #define __FUNCT__ "PCASMCreateSubdomains2D"
886 /*@
887    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
888    preconditioner for a two-dimensional problem on a regular grid.
889 
890    Not Collective
891 
892    Input Parameters:
893 +  m, n - the number of mesh points in the x and y directions
894 .  M, N - the number of subdomains in the x and y directions
895 .  dof - degrees of freedom per node
896 -  overlap - overlap in mesh lines
897 
898    Output Parameters:
899 +  Nsub - the number of subdomains created
900 -  is - the array of index sets defining the subdomains
901 
902    Note:
903    Presently PCAMSCreateSubdomains2d() is valid only for sequential
904    preconditioners.  More general related routines are
905    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
906 
907    Level: advanced
908 
909 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
910 
911 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
912           PCASMSetOverlap()
913 @*/
914 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
915 {
916   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
917   PetscErrorCode ierr;
918   PetscInt       nidx,*idx,loc,ii,jj,count;
919 
920   PetscFunctionBegin;
921   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
922 
923   *Nsub = N*M;
924   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
925   ystart = 0;
926   loc_outter = 0;
927   for (i=0; i<N; i++) {
928     height = n/N + ((n % N) > i); /* height of subdomain */
929     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
930     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
931     yright = ystart + height + overlap; if (yright > n) yright = n;
932     xstart = 0;
933     for (j=0; j<M; j++) {
934       width = m/M + ((m % M) > j); /* width of subdomain */
935       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
936       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
937       xright = xstart + width + overlap; if (xright > m) xright = m;
938       nidx   = (xright - xleft)*(yright - yleft);
939       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
940       loc    = 0;
941       for (ii=yleft; ii<yright; ii++) {
942         count = m*ii + xleft;
943         for (jj=xleft; jj<xright; jj++) {
944           idx[loc++] = count++;
945         }
946       }
947       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
948       ierr = PetscFree(idx);CHKERRQ(ierr);
949       xstart += width;
950     }
951     ystart += height;
952   }
953   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "PCASMGetLocalSubdomains"
959 /*@C
960     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
961     only) for the additive Schwarz preconditioner.
962 
963     Collective on PC
964 
965     Input Parameter:
966 .   pc - the preconditioner context
967 
968     Output Parameters:
969 +   n - the number of subdomains for this processor (default value = 1)
970 -   is - the index sets that define the subdomains for this processor
971 
972 
973     Notes:
974     The IS numbering is in the parallel, global numbering of the vector.
975 
976     Level: advanced
977 
978 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
979 
980 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
981           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
982 @*/
983 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[])
984 {
985   PC_ASM *osm;
986 
987   PetscFunctionBegin;
988   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
989   PetscValidIntPointer(n,2);
990   if (!pc->setupcalled) {
991     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
992   }
993 
994   osm = (PC_ASM*)pc->data;
995   if (n)  *n = osm->n_local_true;
996   if (is) *is = osm->is;
997   PetscFunctionReturn(0);
998 }
999 
1000 #undef __FUNCT__
1001 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1002 /*@C
1003     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1004     only) for the additive Schwarz preconditioner.
1005 
1006     Collective on PC
1007 
1008     Input Parameter:
1009 .   pc - the preconditioner context
1010 
1011     Output Parameters:
1012 +   n - the number of matrices for this processor (default value = 1)
1013 -   mat - the matrices
1014 
1015 
1016     Level: advanced
1017 
1018 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1019 
1020 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1021           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1022 @*/
1023 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1024 {
1025   PC_ASM *osm;
1026 
1027   PetscFunctionBegin;
1028   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1029   PetscValidPointer(n,2);
1030   if (!pc->setupcalled) {
1031     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1032   }
1033 
1034   osm = (PC_ASM*)pc->data;
1035   if (n)   *n   = osm->n_local_true;
1036   if (mat) *mat = osm->pmat;
1037   PetscFunctionReturn(0);
1038 }
1039 
1040