xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 7c4f633dc6bb6149cca88d301ead35a99e103cbb)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n,n_local,n_local_true;
18   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
19   PetscInt   overlap;             /* overlap requested by user */
20   KSP        *ksp;               /* linear solvers for each block */
21   VecScatter *scat;               /* mapping to subregion */
22   Vec        *x,*y;
23   IS         *is;                 /* index set that defines each subdomain */
24   Mat        *mat,*pmat;          /* mat is not currently used */
25   PCASMType  type;                /* use reduced interpolation, restriction or both */
26   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
27   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
28   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
29                                      PCSetUpOnBlocks() is done. Similar to inplace
30                                      factorization in the case of LU and ILU */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *jac = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (jac->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",jac->n,jac->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",jac->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[jac->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
56     if (jac->same_local_solves) {
57       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59       if (!rank && jac->ksp) {
60         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
62         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63       }
64       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65     } else {
66       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D\n",rank,jac->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
69       for (i=0; i<jac->n_local; i++) {
70         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
71         if (i < jac->n_local_true) {
72           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
73           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
74           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
75         }
76         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
77       }
78       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
80     }
81   } else if (isstring) {
82     ierr = PetscViewerStringSPrintf(viewer," blks=%D, overlap=%D, type=%D",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
85     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
86   } else {
87     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
88   }
89   PetscFunctionReturn(0);
90 }
91 
92 #undef __FUNCT__
93 #define __FUNCT__ "PCSetUp_ASM"
94 static PetscErrorCode PCSetUp_ASM(PC pc)
95 {
96   PC_ASM         *osm  = (PC_ASM*)pc->data;
97   PetscErrorCode ierr;
98   PetscInt       i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
99   PetscInt       start,start_val,end_val,sz,bs;
100   PetscMPIInt    size;
101   MatReuse       scall = MAT_REUSE_MATRIX;
102   IS             isl;
103   KSP            ksp;
104   PC             subpc;
105   const char     *prefix,*pprefix;
106   Vec            vec;
107 
108   PetscFunctionBegin;
109   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
110   if (!pc->setupcalled) {
111     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
112       /* no subdomains given, use one per processor */
113       osm->n_local_true = osm->n_local = 1;
114       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
115       osm->n = size;
116     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
117       PetscInt inwork[2],outwork[2];
118       inwork[0] = inwork[1] = osm->n_local_true;
119       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
120       osm->n_local = outwork[0];
121       osm->n       = outwork[1];
122     }
123     n_local      = osm->n_local;
124     n_local_true = osm->n_local_true;
125     if (!osm->is){ /* build the index sets */
126       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
127       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
128       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
129       sz    = end_val - start_val;
130       start = start_val;
131       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
132         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
133       }
134       for (i=0; i<n_local_true; i++){
135         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
136         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
137         start      += size;
138         osm->is[i] =  isl;
139       }
140       osm->is_flg = PETSC_TRUE;
141     }
142 
143     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
144     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
145     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
146     osm->y = osm->x + n_local;
147 
148     /*  Extend the "overlapping" regions by a number of steps  */
149     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
150     for (i=0; i<n_local_true; i++) {
151       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
152     }
153 
154     /* create the local work vectors and scatter contexts */
155     for (i=0; i<n_local_true; i++) {
156       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
157       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
158       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
159       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
160       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
161       ierr = ISDestroy(isl);CHKERRQ(ierr);
162     }
163     for (i=n_local_true; i<n_local; i++) {
164       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
165       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
166       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
167       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
168       ierr = ISDestroy(isl);CHKERRQ(ierr);
169     }
170 
171    /*
172        Create the local solvers.
173     */
174     for (i=0; i<n_local_true; i++) {
175       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
176       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
177       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
178       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
179       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
180       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
181       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
182       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
183       osm->ksp[i] = ksp;
184     }
185     scall = MAT_INITIAL_MATRIX;
186   } else {
187     /*
188        Destroy the blocks from the previous iteration
189     */
190     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
191       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
192       scall = MAT_INITIAL_MATRIX;
193     }
194   }
195 
196   /* extract out the submatrices */
197   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
198 
199   /* Return control to the user so that the submatrices can be modified (e.g., to apply
200      different boundary conditions for the submatrices than for the global problem) */
201   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
202 
203   /* loop over subdomains putting them into local ksp */
204   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
205   for (i=0; i<n_local_true; i++) {
206     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
207     ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
208     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
209     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
210   }
211   ierr = VecDestroy(vec);CHKERRQ(ierr);
212   PetscFunctionReturn(0);
213 }
214 
215 #undef __FUNCT__
216 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
217 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
218 {
219   PC_ASM         *osm = (PC_ASM*)pc->data;
220   PetscErrorCode ierr;
221   PetscInt       i;
222 
223   PetscFunctionBegin;
224   for (i=0; i<osm->n_local_true; i++) {
225     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
226   }
227   /*
228      If inplace flag is set, then destroy the matrix after the setup
229      on blocks is done.
230   */
231   if (osm->inplace && osm->n_local_true > 0) {
232     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "PCApply_ASM"
239 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
240 {
241   PC_ASM         *osm = (PC_ASM*)pc->data;
242   PetscErrorCode ierr;
243   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
244   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
245 
246   PetscFunctionBegin;
247   /*
248        Support for limiting the restriction or interpolation to only local
249      subdomain values (leaving the other values 0).
250   */
251   if (!(osm->type & PC_ASM_RESTRICT)) {
252     forward = SCATTER_FORWARD_LOCAL;
253     /* have to zero the work RHS since scatter may leave some slots empty */
254     for (i=0; i<n_local_true; i++) {
255       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
256     }
257   }
258   if (!(osm->type & PC_ASM_INTERPOLATE)) {
259     reverse = SCATTER_REVERSE_LOCAL;
260   }
261 
262   for (i=0; i<n_local; i++) {
263     ierr = VecScatterBegin(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
264   }
265   ierr = VecSet(y,0.0);CHKERRQ(ierr);
266   /* do the local solves */
267   for (i=0; i<n_local_true; i++) {
268     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
269     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
270     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
271   }
272   /* handle the rest of the scatters that do not have local solves */
273   for (i=n_local_true; i<n_local; i++) {
274     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
275     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
276   }
277   for (i=0; i<n_local; i++) {
278     ierr = VecScatterEnd(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
279   }
280   PetscFunctionReturn(0);
281 }
282 
283 #undef __FUNCT__
284 #define __FUNCT__ "PCApplyTranspose_ASM"
285 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
286 {
287   PC_ASM         *osm = (PC_ASM*)pc->data;
288   PetscErrorCode ierr;
289   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
290   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
291 
292   PetscFunctionBegin;
293   /*
294        Support for limiting the restriction or interpolation to only local
295      subdomain values (leaving the other values 0).
296 
297        Note: these are reversed from the PCApply_ASM() because we are applying the
298      transpose of the three terms
299   */
300   if (!(osm->type & PC_ASM_INTERPOLATE)) {
301     forward = SCATTER_FORWARD_LOCAL;
302     /* have to zero the work RHS since scatter may leave some slots empty */
303     for (i=0; i<n_local_true; i++) {
304       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
305     }
306   }
307   if (!(osm->type & PC_ASM_RESTRICT)) {
308     reverse = SCATTER_REVERSE_LOCAL;
309   }
310 
311   for (i=0; i<n_local; i++) {
312     ierr = VecScatterBegin(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
313   }
314   ierr = VecSet(y,0.0);CHKERRQ(ierr);
315   /* do the local solves */
316   for (i=0; i<n_local_true; i++) {
317     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
318     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
319     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
320   }
321   /* handle the rest of the scatters that do not have local solves */
322   for (i=n_local_true; i<n_local; i++) {
323     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
324     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
325   }
326   for (i=0; i<n_local; i++) {
327     ierr = VecScatterEnd(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
328   }
329   PetscFunctionReturn(0);
330 }
331 
332 #undef __FUNCT__
333 #define __FUNCT__ "PCDestroy_ASM"
334 static PetscErrorCode PCDestroy_ASM(PC pc)
335 {
336   PC_ASM         *osm = (PC_ASM*)pc->data;
337   PetscErrorCode ierr;
338   PetscInt       i;
339 
340   PetscFunctionBegin;
341   for (i=0; i<osm->n_local; i++) {
342     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
343     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
344     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
345   }
346   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
347     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
348   }
349   if (osm->ksp) {
350     for (i=0; i<osm->n_local_true; i++) {
351       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
352     }
353   }
354   if (osm->is_flg) {
355     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
356     ierr = PetscFree(osm->is);CHKERRQ(ierr);
357   }
358   ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
359   ierr = PetscFree(osm->scat);CHKERRQ(ierr);
360   ierr = PetscFree(osm->x);CHKERRQ(ierr);
361   ierr = PetscFree(osm);CHKERRQ(ierr);
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "PCSetFromOptions_ASM"
367 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
368 {
369   PC_ASM         *osm = (PC_ASM*)pc->data;
370   PetscErrorCode ierr;
371   PetscInt       blocks,ovl;
372   PetscTruth     flg,set,sym;
373 
374   PetscFunctionBegin;
375   /* set the type to symmetric if matrix is symmetric */
376   if (pc->pmat && !osm->type_set) {
377     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
378     if (set && sym) {
379       osm->type = PC_ASM_BASIC;
380     }
381   }
382   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
383     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
384     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
385     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
386     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
387     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
388     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
389     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&osm->type,&flg);CHKERRQ(ierr);
390   ierr = PetscOptionsTail();CHKERRQ(ierr);
391   PetscFunctionReturn(0);
392 }
393 
394 /*------------------------------------------------------------------------------------*/
395 
396 EXTERN_C_BEGIN
397 #undef __FUNCT__
398 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
399 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[])
400 {
401   PC_ASM *osm = (PC_ASM*)pc->data;
402 
403   PetscFunctionBegin;
404   if (n < 1) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks");
405 
406   if (pc->setupcalled && n != osm->n_local_true) {
407     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
408   }
409   if (!pc->setupcalled){
410     osm->n_local_true = n;
411     osm->is           = is;
412   }
413   PetscFunctionReturn(0);
414 }
415 EXTERN_C_END
416 
417 EXTERN_C_BEGIN
418 #undef __FUNCT__
419 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
420 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
421 {
422   PC_ASM         *osm = (PC_ASM*)pc->data;
423   PetscErrorCode ierr;
424   PetscMPIInt    rank,size;
425   PetscInt       n;
426 
427   PetscFunctionBegin;
428 
429   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
430 
431   /*
432      Split the subdomains equally amoung all processors
433   */
434   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
435   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
436   n = N/size + ((N % size) > rank);
437   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
438   if (!pc->setupcalled){
439     osm->n_local_true = n;
440     if (!n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have at least one block: total processors %d total blocks %d",size,(int)n);
441     osm->is           = 0;
442   }
443   PetscFunctionReturn(0);
444 }
445 EXTERN_C_END
446 
447 EXTERN_C_BEGIN
448 #undef __FUNCT__
449 #define __FUNCT__ "PCASMSetOverlap_ASM"
450 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
451 {
452   PC_ASM *osm;
453 
454   PetscFunctionBegin;
455   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
456 
457   osm               = (PC_ASM*)pc->data;
458   osm->overlap      = ovl;
459   PetscFunctionReturn(0);
460 }
461 EXTERN_C_END
462 
463 EXTERN_C_BEGIN
464 #undef __FUNCT__
465 #define __FUNCT__ "PCASMSetType_ASM"
466 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
467 {
468   PC_ASM *osm;
469 
470   PetscFunctionBegin;
471   osm           = (PC_ASM*)pc->data;
472   osm->type     = type;
473   osm->type_set = PETSC_TRUE;
474   PetscFunctionReturn(0);
475 }
476 EXTERN_C_END
477 
478 EXTERN_C_BEGIN
479 #undef __FUNCT__
480 #define __FUNCT__ "PCASMGetSubKSP_ASM"
481 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
482 {
483   PC_ASM         *jac = (PC_ASM*)pc->data;
484   PetscErrorCode ierr;
485 
486   PetscFunctionBegin;
487   if (jac->n_local_true < 0) {
488     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
489   }
490 
491   if (n_local)     *n_local     = jac->n_local_true;
492   if (first_local) {
493     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
494     *first_local -= jac->n_local_true;
495   }
496   *ksp                         = jac->ksp;
497   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
498                                       not necessarily true though!  This flag is
499                                       used only for PCView_ASM() */
500   PetscFunctionReturn(0);
501 }
502 EXTERN_C_END
503 
504 EXTERN_C_BEGIN
505 #undef __FUNCT__
506 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
507 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace_ASM(PC pc)
508 {
509   PC_ASM *dir;
510 
511   PetscFunctionBegin;
512   dir          = (PC_ASM*)pc->data;
513   dir->inplace = PETSC_TRUE;
514   PetscFunctionReturn(0);
515 }
516 EXTERN_C_END
517 
518 /*----------------------------------------------------------------------------*/
519 #undef __FUNCT__
520 #define __FUNCT__ "PCASMSetUseInPlace"
521 /*@
522    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
523 
524    Collective on PC
525 
526    Input Parameters:
527 .  pc - the preconditioner context
528 
529    Options Database Key:
530 .  -pc_asm_in_place - Activates in-place factorization
531 
532    Note:
533    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
534    when the original matrix is not required during the Solve process.
535    This destroys the matrix, early thus, saving on memory usage.
536 
537    Level: intermediate
538 
539 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
540 
541 .seealso: PCFactorSetUseInPlace()
542 @*/
543 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace(PC pc)
544 {
545   PetscErrorCode ierr,(*f)(PC);
546 
547   PetscFunctionBegin;
548   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
549   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
550   if (f) {
551     ierr = (*f)(pc);CHKERRQ(ierr);
552   }
553   PetscFunctionReturn(0);
554 }
555 /*----------------------------------------------------------------------------*/
556 
557 #undef __FUNCT__
558 #define __FUNCT__ "PCASMSetLocalSubdomains"
559 /*@C
560     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
561     only) for the additive Schwarz preconditioner.
562 
563     Collective on PC
564 
565     Input Parameters:
566 +   pc - the preconditioner context
567 .   n - the number of subdomains for this processor (default value = 1)
568 -   is - the index sets that define the subdomains for this processor
569          (or PETSC_NULL for PETSc to determine subdomains)
570 
571     Notes:
572     The IS numbering is in the parallel, global numbering of the vector.
573 
574     By default the ASM preconditioner uses 1 block per processor.
575 
576     These index sets cannot be destroyed until after completion of the
577     linear solves for which the ASM preconditioner is being used.
578 
579     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
580 
581     Level: advanced
582 
583 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
584 
585 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
586           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
587 @*/
588 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[])
589 {
590   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[]);
591 
592   PetscFunctionBegin;
593   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
594   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
595   if (f) {
596     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
597   }
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "PCASMSetTotalSubdomains"
603 /*@C
604     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
605     additive Schwarz preconditioner.  Either all or no processors in the
606     PC communicator must call this routine, with the same index sets.
607 
608     Collective on PC
609 
610     Input Parameters:
611 +   pc - the preconditioner context
612 .   n - the number of subdomains for all processors
613 -   is - the index sets that define the subdomains for all processor
614          (or PETSC_NULL for PETSc to determine subdomains)
615 
616     Options Database Key:
617     To set the total number of subdomain blocks rather than specify the
618     index sets, use the option
619 .    -pc_asm_blocks <blks> - Sets total blocks
620 
621     Notes:
622     Currently you cannot use this to set the actual subdomains with the argument is.
623 
624     By default the ASM preconditioner uses 1 block per processor.
625 
626     These index sets cannot be destroyed until after completion of the
627     linear solves for which the ASM preconditioner is being used.
628 
629     Use PCASMSetLocalSubdomains() to set local subdomains.
630 
631     Level: advanced
632 
633 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
634 
635 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
636           PCASMCreateSubdomains2D()
637 @*/
638 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS *is)
639 {
640   PetscErrorCode ierr,(*f)(PC,PetscInt,IS *);
641 
642   PetscFunctionBegin;
643   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
644   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
645   if (f) {
646     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
647   }
648   PetscFunctionReturn(0);
649 }
650 
651 #undef __FUNCT__
652 #define __FUNCT__ "PCASMSetOverlap"
653 /*@
654     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
655     additive Schwarz preconditioner.  Either all or no processors in the
656     PC communicator must call this routine.
657 
658     Collective on PC
659 
660     Input Parameters:
661 +   pc  - the preconditioner context
662 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
663 
664     Options Database Key:
665 .   -pc_asm_overlap <ovl> - Sets overlap
666 
667     Notes:
668     By default the ASM preconditioner uses 1 block per processor.  To use
669     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
670     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
671 
672     The overlap defaults to 1, so if one desires that no additional
673     overlap be computed beyond what may have been set with a call to
674     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
675     must be set to be 0.  In particular, if one does not explicitly set
676     the subdomains an application code, then all overlap would be computed
677     internally by PETSc, and using an overlap of 0 would result in an ASM
678     variant that is equivalent to the block Jacobi preconditioner.
679 
680     Note that one can define initial index sets with any overlap via
681     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
682     PCASMSetOverlap() merely allows PETSc to extend that overlap further
683     if desired.
684 
685     Level: intermediate
686 
687 .keywords: PC, ASM, set, overlap
688 
689 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
690           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
691 @*/
692 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
693 {
694   PetscErrorCode ierr,(*f)(PC,PetscInt);
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
698   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
699   if (f) {
700     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
701   }
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "PCASMSetType"
707 /*@
708     PCASMSetType - Sets the type of restriction and interpolation used
709     for local problems in the additive Schwarz method.
710 
711     Collective on PC
712 
713     Input Parameters:
714 +   pc  - the preconditioner context
715 -   type - variant of ASM, one of
716 .vb
717       PC_ASM_BASIC       - full interpolation and restriction
718       PC_ASM_RESTRICT    - full restriction, local processor interpolation
719       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
720       PC_ASM_NONE        - local processor restriction and interpolation
721 .ve
722 
723     Options Database Key:
724 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
725 
726     Level: intermediate
727 
728 .keywords: PC, ASM, set, type
729 
730 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
731           PCASMCreateSubdomains2D()
732 @*/
733 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
734 {
735   PetscErrorCode ierr,(*f)(PC,PCASMType);
736 
737   PetscFunctionBegin;
738   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
739   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
740   if (f) {
741     ierr = (*f)(pc,type);CHKERRQ(ierr);
742   }
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "PCASMGetSubKSP"
748 /*@C
749    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
750    this processor.
751 
752    Collective on PC iff first_local is requested
753 
754    Input Parameter:
755 .  pc - the preconditioner context
756 
757    Output Parameters:
758 +  n_local - the number of blocks on this processor or PETSC_NULL
759 .  first_local - the global number of the first block on this processor or PETSC_NULL,
760                  all processors must request or all must pass PETSC_NULL
761 -  ksp - the array of KSP contexts
762 
763    Note:
764    After PCASMGetSubKSP() the array of KSPes is not to be freed
765 
766    Currently for some matrix implementations only 1 block per processor
767    is supported.
768 
769    You must call KSPSetUp() before calling PCASMGetSubKSP().
770 
771    Level: advanced
772 
773 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
774 
775 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
776           PCASMCreateSubdomains2D(),
777 @*/
778 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
779 {
780   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
784   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
785   if (f) {
786     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
787   } else {
788     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
789   }
790 
791  PetscFunctionReturn(0);
792 }
793 
794 /* -------------------------------------------------------------------------------------*/
795 /*MC
796    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
797            its own KSP object.
798 
799    Options Database Keys:
800 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
801 .  -pc_asm_in_place - Activates in-place factorization
802 .  -pc_asm_blocks <blks> - Sets total blocks
803 .  -pc_asm_overlap <ovl> - Sets overlap
804 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
805 
806      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
807       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
808       -pc_asm_type basic to use the standard ASM.
809 
810    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
811      than one processor. Defaults to one block per processor.
812 
813      To set options on the solvers for each block append -sub_ to all the KSP, and PC
814         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
815 
816      To set the options on the solvers separate for each block call PCASMGetSubKSP()
817          and set the options directly on the resulting KSP object (you can access its PC
818          with KSPGetPC())
819 
820 
821    Level: beginner
822 
823    Concepts: additive Schwarz method
824 
825     References:
826     An additive variant of the Schwarz alternating method for the case of many subregions
827     M Dryja, OB Widlund - Courant Institute, New York University Technical report
828 
829     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
830     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
831 
832 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
833            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
834            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
835            PCASMSetUseInPlace()
836 M*/
837 
838 EXTERN_C_BEGIN
839 #undef __FUNCT__
840 #define __FUNCT__ "PCCreate_ASM"
841 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
842 {
843   PetscErrorCode ierr;
844   PC_ASM         *osm;
845 
846   PetscFunctionBegin;
847   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
848   osm->n                 = PETSC_DECIDE;
849   osm->n_local           = 0;
850   osm->n_local_true      = PETSC_DECIDE;
851   osm->overlap           = 1;
852   osm->is_flg            = PETSC_FALSE;
853   osm->ksp              = 0;
854   osm->scat              = 0;
855   osm->is                = 0;
856   osm->mat               = 0;
857   osm->pmat              = 0;
858   osm->type              = PC_ASM_RESTRICT;
859   osm->same_local_solves = PETSC_TRUE;
860   osm->inplace           = PETSC_FALSE;
861   pc->data               = (void*)osm;
862 
863   pc->ops->apply             = PCApply_ASM;
864   pc->ops->applytranspose    = PCApplyTranspose_ASM;
865   pc->ops->setup             = PCSetUp_ASM;
866   pc->ops->destroy           = PCDestroy_ASM;
867   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
868   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
869   pc->ops->view              = PCView_ASM;
870   pc->ops->applyrichardson   = 0;
871 
872   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
873                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
874   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
875                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
876   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
877                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
878   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
879                     PCASMSetType_ASM);CHKERRQ(ierr);
880   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
881                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
882 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
883                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
884   PetscFunctionReturn(0);
885 }
886 EXTERN_C_END
887 
888 
889 #undef __FUNCT__
890 #define __FUNCT__ "PCASMCreateSubdomains2D"
891 /*@
892    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
893    preconditioner for a two-dimensional problem on a regular grid.
894 
895    Not Collective
896 
897    Input Parameters:
898 +  m, n - the number of mesh points in the x and y directions
899 .  M, N - the number of subdomains in the x and y directions
900 .  dof - degrees of freedom per node
901 -  overlap - overlap in mesh lines
902 
903    Output Parameters:
904 +  Nsub - the number of subdomains created
905 -  is - the array of index sets defining the subdomains
906 
907    Note:
908    Presently PCAMSCreateSubdomains2d() is valid only for sequential
909    preconditioners.  More general related routines are
910    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
911 
912    Level: advanced
913 
914 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
915 
916 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
917           PCASMSetOverlap()
918 @*/
919 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
920 {
921   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
922   PetscErrorCode ierr;
923   PetscInt       nidx,*idx,loc,ii,jj,count;
924 
925   PetscFunctionBegin;
926   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
927 
928   *Nsub = N*M;
929   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
930   ystart = 0;
931   loc_outter = 0;
932   for (i=0; i<N; i++) {
933     height = n/N + ((n % N) > i); /* height of subdomain */
934     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
935     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
936     yright = ystart + height + overlap; if (yright > n) yright = n;
937     xstart = 0;
938     for (j=0; j<M; j++) {
939       width = m/M + ((m % M) > j); /* width of subdomain */
940       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
941       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
942       xright = xstart + width + overlap; if (xright > m) xright = m;
943       nidx   = (xright - xleft)*(yright - yleft);
944       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
945       loc    = 0;
946       for (ii=yleft; ii<yright; ii++) {
947         count = m*ii + xleft;
948         for (jj=xleft; jj<xright; jj++) {
949           idx[loc++] = count++;
950         }
951       }
952       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
953       ierr = PetscFree(idx);CHKERRQ(ierr);
954       xstart += width;
955     }
956     ystart += height;
957   }
958   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
959   PetscFunctionReturn(0);
960 }
961 
962 #undef __FUNCT__
963 #define __FUNCT__ "PCASMGetLocalSubdomains"
964 /*@C
965     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
966     only) for the additive Schwarz preconditioner.
967 
968     Collective on PC
969 
970     Input Parameter:
971 .   pc - the preconditioner context
972 
973     Output Parameters:
974 +   n - the number of subdomains for this processor (default value = 1)
975 -   is - the index sets that define the subdomains for this processor
976 
977 
978     Notes:
979     The IS numbering is in the parallel, global numbering of the vector.
980 
981     Level: advanced
982 
983 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
984 
985 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
986           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
987 @*/
988 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[])
989 {
990   PC_ASM *osm;
991 
992   PetscFunctionBegin;
993   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
994   PetscValidIntPointer(n,2);
995   if (!pc->setupcalled) {
996     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
997   }
998 
999   osm = (PC_ASM*)pc->data;
1000   if (n)  *n = osm->n_local_true;
1001   if (is) *is = osm->is;
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 #undef __FUNCT__
1006 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1007 /*@C
1008     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1009     only) for the additive Schwarz preconditioner.
1010 
1011     Collective on PC
1012 
1013     Input Parameter:
1014 .   pc - the preconditioner context
1015 
1016     Output Parameters:
1017 +   n - the number of matrices for this processor (default value = 1)
1018 -   mat - the matrices
1019 
1020 
1021     Level: advanced
1022 
1023 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1024 
1025 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1026           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1027 @*/
1028 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1029 {
1030   PC_ASM *osm;
1031 
1032   PetscFunctionBegin;
1033   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1034   PetscValidPointer(n,2);
1035   if (!pc->setupcalled) {
1036     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1037   }
1038 
1039   osm = (PC_ASM*)pc->data;
1040   if (n)   *n   = osm->n_local_true;
1041   if (mat) *mat = osm->pmat;
1042   PetscFunctionReturn(0);
1043 }
1044 
1045