xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision da9f1d6b25924a16baf1fafcd5e58fa8eaafd3cf)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n,n_local,n_local_true;
18   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
19   PetscInt   overlap;             /* overlap requested by user */
20   KSP        *ksp;               /* linear solvers for each block */
21   VecScatter *scat;               /* mapping to subregion */
22   Vec        *x,*y;
23   IS         *is;                 /* index set that defines each subdomain */
24   Mat        *mat,*pmat;          /* mat is not currently used */
25   PCASMType  type;                /* use reduced interpolation, restriction or both */
26   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
27   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
28   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
29                                      PCSetUpOnBlocks() is done. Similar to inplace
30                                      factorization in the case of LU and ILU */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *jac = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (jac->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",jac->n,jac->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",jac->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[jac->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
56     if (jac->same_local_solves) {
57       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59       if (!rank && jac->ksp) {
60         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
62         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63       }
64       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65     } else {
66       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D\n",rank,jac->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
69       for (i=0; i<jac->n_local; i++) {
70         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
71         if (i < jac->n_local_true) {
72           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
73           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
74           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
75         }
76         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
77       }
78       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
80     }
81   } else if (isstring) {
82     ierr = PetscViewerStringSPrintf(viewer," blks=%D, overlap=%D, type=%D",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
85     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
86   } else {
87     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
88   }
89   PetscFunctionReturn(0);
90 }
91 
92 #undef __FUNCT__
93 #define __FUNCT__ "PCSetUp_ASM"
94 static PetscErrorCode PCSetUp_ASM(PC pc)
95 {
96   PC_ASM         *osm  = (PC_ASM*)pc->data;
97   PetscErrorCode ierr;
98   PetscInt       i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
99   PetscInt       start,start_val,end_val,sz,bs;
100   PetscMPIInt    size;
101   MatReuse       scall = MAT_REUSE_MATRIX;
102   IS             isl;
103   KSP            ksp;
104   PC             subpc;
105   const char     *prefix,*pprefix;
106   Vec            vec;
107 
108   PetscFunctionBegin;
109   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
110   if (!pc->setupcalled) {
111     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
112       /* no subdomains given, use one per processor */
113       osm->n_local_true = osm->n_local = 1;
114       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
115       osm->n = size;
116     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
117       PetscInt inwork[2],outwork[2];
118       inwork[0] = inwork[1] = osm->n_local_true;
119       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
120       osm->n_local = outwork[0];
121       osm->n       = outwork[1];
122     }
123     n_local      = osm->n_local;
124     n_local_true = osm->n_local_true;
125     if (!osm->is){ /* build the index sets */
126       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
127       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
128       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
129       sz    = end_val - start_val;
130       start = start_val;
131       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
132         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
133       }
134       for (i=0; i<n_local_true; i++){
135         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
136         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
137         start      += size;
138         osm->is[i] =  isl;
139       }
140       osm->is_flg = PETSC_TRUE;
141     }
142 
143     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
144     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
145     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
146     osm->y = osm->x + n_local;
147 
148     /*  Extend the "overlapping" regions by a number of steps  */
149     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
150     for (i=0; i<n_local_true; i++) {
151       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
152     }
153 
154     /* create the local work vectors and scatter contexts */
155     for (i=0; i<n_local_true; i++) {
156       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
157       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
158       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
159       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
160       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
161       ierr = ISDestroy(isl);CHKERRQ(ierr);
162     }
163     for (i=n_local_true; i<n_local; i++) {
164       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
165       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
166       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
167       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
168       ierr = ISDestroy(isl);CHKERRQ(ierr);
169     }
170 
171    /*
172        Create the local solvers.
173     */
174     for (i=0; i<n_local_true; i++) {
175       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
176       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
177       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
178       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
179       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
180       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
181       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
182       osm->ksp[i] = ksp;
183     }
184     scall = MAT_INITIAL_MATRIX;
185   } else {
186     /*
187        Destroy the blocks from the previous iteration
188     */
189     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
190       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
191       scall = MAT_INITIAL_MATRIX;
192     }
193   }
194 
195   /* extract out the submatrices */
196   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
197 
198   /* Return control to the user so that the submatrices can be modified (e.g., to apply
199      different boundary conditions for the submatrices than for the global problem) */
200   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
201 
202   /* loop over subdomains putting them into local ksp */
203   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
204   for (i=0; i<n_local_true; i++) {
205     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
206     ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
207     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
208     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
209   }
210   ierr = VecDestroy(vec);CHKERRQ(ierr);
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
216 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
217 {
218   PC_ASM         *osm = (PC_ASM*)pc->data;
219   PetscErrorCode ierr;
220   PetscInt       i;
221 
222   PetscFunctionBegin;
223   for (i=0; i<osm->n_local_true; i++) {
224     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
225   }
226   /*
227      If inplace flag is set, then destroy the matrix after the setup
228      on blocks is done.
229   */
230   if (osm->inplace && osm->n_local_true > 0) {
231     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
232   }
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "PCApply_ASM"
238 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
239 {
240   PC_ASM         *osm = (PC_ASM*)pc->data;
241   PetscErrorCode ierr;
242   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
243   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
244 
245   PetscFunctionBegin;
246   /*
247        Support for limiting the restriction or interpolation to only local
248      subdomain values (leaving the other values 0).
249   */
250   if (!(osm->type & PC_ASM_RESTRICT)) {
251     forward = SCATTER_FORWARD_LOCAL;
252     /* have to zero the work RHS since scatter may leave some slots empty */
253     for (i=0; i<n_local_true; i++) {
254       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
255     }
256   }
257   if (!(osm->type & PC_ASM_INTERPOLATE)) {
258     reverse = SCATTER_REVERSE_LOCAL;
259   }
260 
261   for (i=0; i<n_local; i++) {
262     ierr = VecScatterBegin(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
263   }
264   ierr = VecSet(y,0.0);CHKERRQ(ierr);
265   /* do the local solves */
266   for (i=0; i<n_local_true; i++) {
267     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
268     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
269     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
270   }
271   /* handle the rest of the scatters that do not have local solves */
272   for (i=n_local_true; i<n_local; i++) {
273     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
274     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
275   }
276   for (i=0; i<n_local; i++) {
277     ierr = VecScatterEnd(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
278   }
279   PetscFunctionReturn(0);
280 }
281 
282 #undef __FUNCT__
283 #define __FUNCT__ "PCApplyTranspose_ASM"
284 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
285 {
286   PC_ASM         *osm = (PC_ASM*)pc->data;
287   PetscErrorCode ierr;
288   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
289   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
290 
291   PetscFunctionBegin;
292   /*
293        Support for limiting the restriction or interpolation to only local
294      subdomain values (leaving the other values 0).
295 
296        Note: these are reversed from the PCApply_ASM() because we are applying the
297      transpose of the three terms
298   */
299   if (!(osm->type & PC_ASM_INTERPOLATE)) {
300     forward = SCATTER_FORWARD_LOCAL;
301     /* have to zero the work RHS since scatter may leave some slots empty */
302     for (i=0; i<n_local_true; i++) {
303       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
304     }
305   }
306   if (!(osm->type & PC_ASM_RESTRICT)) {
307     reverse = SCATTER_REVERSE_LOCAL;
308   }
309 
310   for (i=0; i<n_local; i++) {
311     ierr = VecScatterBegin(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
312   }
313   ierr = VecSet(y,0.0);CHKERRQ(ierr);
314   /* do the local solves */
315   for (i=0; i<n_local_true; i++) {
316     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
317     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
318     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
319   }
320   /* handle the rest of the scatters that do not have local solves */
321   for (i=n_local_true; i<n_local; i++) {
322     ierr = VecScatterEnd(osm->scat[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
323     ierr = VecScatterBegin(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
324   }
325   for (i=0; i<n_local; i++) {
326     ierr = VecScatterEnd(osm->scat[i],osm->y[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
327   }
328   PetscFunctionReturn(0);
329 }
330 
331 #undef __FUNCT__
332 #define __FUNCT__ "PCDestroy_ASM"
333 static PetscErrorCode PCDestroy_ASM(PC pc)
334 {
335   PC_ASM         *osm = (PC_ASM*)pc->data;
336   PetscErrorCode ierr;
337   PetscInt       i;
338 
339   PetscFunctionBegin;
340   for (i=0; i<osm->n_local; i++) {
341     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
342     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
343     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
344   }
345   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
346     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
347   }
348   if (osm->ksp) {
349     for (i=0; i<osm->n_local_true; i++) {
350       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
351     }
352   }
353   if (osm->is_flg) {
354     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
355     ierr = PetscFree(osm->is);CHKERRQ(ierr);
356   }
357   ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
358   ierr = PetscFree(osm->scat);CHKERRQ(ierr);
359   ierr = PetscFree(osm->x);CHKERRQ(ierr);
360   ierr = PetscFree(osm);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "PCSetFromOptions_ASM"
366 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
367 {
368   PC_ASM         *osm = (PC_ASM*)pc->data;
369   PetscErrorCode ierr;
370   PetscInt       blocks,ovl;
371   PetscTruth     flg,set,sym;
372 
373   PetscFunctionBegin;
374   /* set the type to symmetric if matrix is symmetric */
375   if (pc->pmat && !osm->type_set) {
376     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
377     if (set && sym) {
378       osm->type = PC_ASM_BASIC;
379     }
380   }
381   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
382     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
383     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
384     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
385     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
386     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
387     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
388     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&osm->type,&flg);CHKERRQ(ierr);
389   ierr = PetscOptionsTail();CHKERRQ(ierr);
390   PetscFunctionReturn(0);
391 }
392 
393 /*------------------------------------------------------------------------------------*/
394 
395 EXTERN_C_BEGIN
396 #undef __FUNCT__
397 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
398 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[])
399 {
400   PC_ASM *osm = (PC_ASM*)pc->data;
401 
402   PetscFunctionBegin;
403   if (n < 1) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks");
404 
405   if (pc->setupcalled && n != osm->n_local_true) {
406     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
407   }
408   if (!pc->setupcalled){
409     osm->n_local_true = n;
410     osm->is           = is;
411   }
412   PetscFunctionReturn(0);
413 }
414 EXTERN_C_END
415 
416 EXTERN_C_BEGIN
417 #undef __FUNCT__
418 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
419 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
420 {
421   PC_ASM         *osm = (PC_ASM*)pc->data;
422   PetscErrorCode ierr;
423   PetscMPIInt    rank,size;
424   PetscInt       n;
425 
426   PetscFunctionBegin;
427 
428   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
429 
430   /*
431      Split the subdomains equally amoung all processors
432   */
433   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
434   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
435   n = N/size + ((N % size) > rank);
436   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
437   if (!pc->setupcalled){
438     osm->n_local_true = n;
439     if (!n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have at least one block: total processors %d total blocks %d",size,(int)n);
440     osm->is           = 0;
441   }
442   PetscFunctionReturn(0);
443 }
444 EXTERN_C_END
445 
446 EXTERN_C_BEGIN
447 #undef __FUNCT__
448 #define __FUNCT__ "PCASMSetOverlap_ASM"
449 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
450 {
451   PC_ASM *osm;
452 
453   PetscFunctionBegin;
454   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
455 
456   osm               = (PC_ASM*)pc->data;
457   osm->overlap      = ovl;
458   PetscFunctionReturn(0);
459 }
460 EXTERN_C_END
461 
462 EXTERN_C_BEGIN
463 #undef __FUNCT__
464 #define __FUNCT__ "PCASMSetType_ASM"
465 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
466 {
467   PC_ASM *osm;
468 
469   PetscFunctionBegin;
470   osm           = (PC_ASM*)pc->data;
471   osm->type     = type;
472   osm->type_set = PETSC_TRUE;
473   PetscFunctionReturn(0);
474 }
475 EXTERN_C_END
476 
477 EXTERN_C_BEGIN
478 #undef __FUNCT__
479 #define __FUNCT__ "PCASMGetSubKSP_ASM"
480 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
481 {
482   PC_ASM         *jac = (PC_ASM*)pc->data;
483   PetscErrorCode ierr;
484 
485   PetscFunctionBegin;
486   if (jac->n_local_true < 0) {
487     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
488   }
489 
490   if (n_local)     *n_local     = jac->n_local_true;
491   if (first_local) {
492     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPIU_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
493     *first_local -= jac->n_local_true;
494   }
495   *ksp                         = jac->ksp;
496   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
497                                       not necessarily true though!  This flag is
498                                       used only for PCView_ASM() */
499   PetscFunctionReturn(0);
500 }
501 EXTERN_C_END
502 
503 EXTERN_C_BEGIN
504 #undef __FUNCT__
505 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
506 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace_ASM(PC pc)
507 {
508   PC_ASM *dir;
509 
510   PetscFunctionBegin;
511   dir          = (PC_ASM*)pc->data;
512   dir->inplace = PETSC_TRUE;
513   PetscFunctionReturn(0);
514 }
515 EXTERN_C_END
516 
517 /*----------------------------------------------------------------------------*/
518 #undef __FUNCT__
519 #define __FUNCT__ "PCASMSetUseInPlace"
520 /*@
521    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
522 
523    Collective on PC
524 
525    Input Parameters:
526 .  pc - the preconditioner context
527 
528    Options Database Key:
529 .  -pc_asm_in_place - Activates in-place factorization
530 
531    Note:
532    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
533    when the original matrix is not required during the Solve process.
534    This destroys the matrix, early thus, saving on memory usage.
535 
536    Level: intermediate
537 
538 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
539 
540 .seealso: PCFactorSetUseInPlace()
541 @*/
542 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace(PC pc)
543 {
544   PetscErrorCode ierr,(*f)(PC);
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
548   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
549   if (f) {
550     ierr = (*f)(pc);CHKERRQ(ierr);
551   }
552   PetscFunctionReturn(0);
553 }
554 /*----------------------------------------------------------------------------*/
555 
556 #undef __FUNCT__
557 #define __FUNCT__ "PCASMSetLocalSubdomains"
558 /*@C
559     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
560     only) for the additive Schwarz preconditioner.
561 
562     Collective on PC
563 
564     Input Parameters:
565 +   pc - the preconditioner context
566 .   n - the number of subdomains for this processor (default value = 1)
567 -   is - the index sets that define the subdomains for this processor
568          (or PETSC_NULL for PETSc to determine subdomains)
569 
570     Notes:
571     The IS numbering is in the parallel, global numbering of the vector.
572 
573     By default the ASM preconditioner uses 1 block per processor.
574 
575     These index sets cannot be destroyed until after completion of the
576     linear solves for which the ASM preconditioner is being used.
577 
578     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
579 
580     Level: advanced
581 
582 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
583 
584 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
585           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
586 @*/
587 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[])
588 {
589   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[]);
590 
591   PetscFunctionBegin;
592   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
593   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
594   if (f) {
595     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
596   }
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "PCASMSetTotalSubdomains"
602 /*@C
603     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
604     additive Schwarz preconditioner.  Either all or no processors in the
605     PC communicator must call this routine, with the same index sets.
606 
607     Collective on PC
608 
609     Input Parameters:
610 +   pc - the preconditioner context
611 .   n - the number of subdomains for all processors
612 -   is - the index sets that define the subdomains for all processor
613          (or PETSC_NULL for PETSc to determine subdomains)
614 
615     Options Database Key:
616     To set the total number of subdomain blocks rather than specify the
617     index sets, use the option
618 .    -pc_asm_blocks <blks> - Sets total blocks
619 
620     Notes:
621     Currently you cannot use this to set the actual subdomains with the argument is.
622 
623     By default the ASM preconditioner uses 1 block per processor.
624 
625     These index sets cannot be destroyed until after completion of the
626     linear solves for which the ASM preconditioner is being used.
627 
628     Use PCASMSetLocalSubdomains() to set local subdomains.
629 
630     Level: advanced
631 
632 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
633 
634 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
635           PCASMCreateSubdomains2D()
636 @*/
637 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS *is)
638 {
639   PetscErrorCode ierr,(*f)(PC,PetscInt,IS *);
640 
641   PetscFunctionBegin;
642   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
643   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
644   if (f) {
645     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
646   }
647   PetscFunctionReturn(0);
648 }
649 
650 #undef __FUNCT__
651 #define __FUNCT__ "PCASMSetOverlap"
652 /*@
653     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
654     additive Schwarz preconditioner.  Either all or no processors in the
655     PC communicator must call this routine.
656 
657     Collective on PC
658 
659     Input Parameters:
660 +   pc  - the preconditioner context
661 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
662 
663     Options Database Key:
664 .   -pc_asm_overlap <ovl> - Sets overlap
665 
666     Notes:
667     By default the ASM preconditioner uses 1 block per processor.  To use
668     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
669     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
670 
671     The overlap defaults to 1, so if one desires that no additional
672     overlap be computed beyond what may have been set with a call to
673     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
674     must be set to be 0.  In particular, if one does not explicitly set
675     the subdomains an application code, then all overlap would be computed
676     internally by PETSc, and using an overlap of 0 would result in an ASM
677     variant that is equivalent to the block Jacobi preconditioner.
678 
679     Note that one can define initial index sets with any overlap via
680     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
681     PCASMSetOverlap() merely allows PETSc to extend that overlap further
682     if desired.
683 
684     Level: intermediate
685 
686 .keywords: PC, ASM, set, overlap
687 
688 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
689           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
690 @*/
691 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
692 {
693   PetscErrorCode ierr,(*f)(PC,PetscInt);
694 
695   PetscFunctionBegin;
696   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
697   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
698   if (f) {
699     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
700   }
701   PetscFunctionReturn(0);
702 }
703 
704 #undef __FUNCT__
705 #define __FUNCT__ "PCASMSetType"
706 /*@
707     PCASMSetType - Sets the type of restriction and interpolation used
708     for local problems in the additive Schwarz method.
709 
710     Collective on PC
711 
712     Input Parameters:
713 +   pc  - the preconditioner context
714 -   type - variant of ASM, one of
715 .vb
716       PC_ASM_BASIC       - full interpolation and restriction
717       PC_ASM_RESTRICT    - full restriction, local processor interpolation
718       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
719       PC_ASM_NONE        - local processor restriction and interpolation
720 .ve
721 
722     Options Database Key:
723 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
724 
725     Level: intermediate
726 
727 .keywords: PC, ASM, set, type
728 
729 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
730           PCASMCreateSubdomains2D()
731 @*/
732 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
733 {
734   PetscErrorCode ierr,(*f)(PC,PCASMType);
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
738   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
739   if (f) {
740     ierr = (*f)(pc,type);CHKERRQ(ierr);
741   }
742   PetscFunctionReturn(0);
743 }
744 
745 #undef __FUNCT__
746 #define __FUNCT__ "PCASMGetSubKSP"
747 /*@C
748    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
749    this processor.
750 
751    Collective on PC iff first_local is requested
752 
753    Input Parameter:
754 .  pc - the preconditioner context
755 
756    Output Parameters:
757 +  n_local - the number of blocks on this processor or PETSC_NULL
758 .  first_local - the global number of the first block on this processor or PETSC_NULL,
759                  all processors must request or all must pass PETSC_NULL
760 -  ksp - the array of KSP contexts
761 
762    Note:
763    After PCASMGetSubKSP() the array of KSPes is not to be freed
764 
765    Currently for some matrix implementations only 1 block per processor
766    is supported.
767 
768    You must call KSPSetUp() before calling PCASMGetSubKSP().
769 
770    Level: advanced
771 
772 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
773 
774 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
775           PCASMCreateSubdomains2D(),
776 @*/
777 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
778 {
779   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
780 
781   PetscFunctionBegin;
782   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
783   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
784   if (f) {
785     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
786   } else {
787     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
788   }
789 
790  PetscFunctionReturn(0);
791 }
792 
793 /* -------------------------------------------------------------------------------------*/
794 /*MC
795    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
796            its own KSP object.
797 
798    Options Database Keys:
799 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
800 .  -pc_asm_in_place - Activates in-place factorization
801 .  -pc_asm_blocks <blks> - Sets total blocks
802 .  -pc_asm_overlap <ovl> - Sets overlap
803 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
804 
805      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
806       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
807       -pc_asm_type basic to use the standard ASM.
808 
809    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
810      than one processor. Defaults to one block per processor.
811 
812      To set options on the solvers for each block append -sub_ to all the KSP, and PC
813         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
814 
815      To set the options on the solvers separate for each block call PCASMGetSubKSP()
816          and set the options directly on the resulting KSP object (you can access its PC
817          with KSPGetPC())
818 
819 
820    Level: beginner
821 
822    Concepts: additive Schwarz method
823 
824 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
825            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
826            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
827            PCASMSetUseInPlace()
828 M*/
829 
830 EXTERN_C_BEGIN
831 #undef __FUNCT__
832 #define __FUNCT__ "PCCreate_ASM"
833 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
834 {
835   PetscErrorCode ierr;
836   PC_ASM         *osm;
837 
838   PetscFunctionBegin;
839   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
840   osm->n                 = PETSC_DECIDE;
841   osm->n_local           = 0;
842   osm->n_local_true      = PETSC_DECIDE;
843   osm->overlap           = 1;
844   osm->is_flg            = PETSC_FALSE;
845   osm->ksp              = 0;
846   osm->scat              = 0;
847   osm->is                = 0;
848   osm->mat               = 0;
849   osm->pmat              = 0;
850   osm->type              = PC_ASM_RESTRICT;
851   osm->same_local_solves = PETSC_TRUE;
852   osm->inplace           = PETSC_FALSE;
853   pc->data               = (void*)osm;
854 
855   pc->ops->apply             = PCApply_ASM;
856   pc->ops->applytranspose    = PCApplyTranspose_ASM;
857   pc->ops->setup             = PCSetUp_ASM;
858   pc->ops->destroy           = PCDestroy_ASM;
859   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
860   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
861   pc->ops->view              = PCView_ASM;
862   pc->ops->applyrichardson   = 0;
863 
864   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
865                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
866   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
867                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
868   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
869                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
870   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
871                     PCASMSetType_ASM);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
873                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
874 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
875                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
876   PetscFunctionReturn(0);
877 }
878 EXTERN_C_END
879 
880 
881 #undef __FUNCT__
882 #define __FUNCT__ "PCASMCreateSubdomains2D"
883 /*@
884    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
885    preconditioner for a two-dimensional problem on a regular grid.
886 
887    Not Collective
888 
889    Input Parameters:
890 +  m, n - the number of mesh points in the x and y directions
891 .  M, N - the number of subdomains in the x and y directions
892 .  dof - degrees of freedom per node
893 -  overlap - overlap in mesh lines
894 
895    Output Parameters:
896 +  Nsub - the number of subdomains created
897 -  is - the array of index sets defining the subdomains
898 
899    Note:
900    Presently PCAMSCreateSubdomains2d() is valid only for sequential
901    preconditioners.  More general related routines are
902    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
903 
904    Level: advanced
905 
906 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
907 
908 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
909           PCASMSetOverlap()
910 @*/
911 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
912 {
913   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
914   PetscErrorCode ierr;
915   PetscInt       nidx,*idx,loc,ii,jj,count;
916 
917   PetscFunctionBegin;
918   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
919 
920   *Nsub = N*M;
921   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
922   ystart = 0;
923   loc_outter = 0;
924   for (i=0; i<N; i++) {
925     height = n/N + ((n % N) > i); /* height of subdomain */
926     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
927     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
928     yright = ystart + height + overlap; if (yright > n) yright = n;
929     xstart = 0;
930     for (j=0; j<M; j++) {
931       width = m/M + ((m % M) > j); /* width of subdomain */
932       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
933       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
934       xright = xstart + width + overlap; if (xright > m) xright = m;
935       nidx   = (xright - xleft)*(yright - yleft);
936       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
937       loc    = 0;
938       for (ii=yleft; ii<yright; ii++) {
939         count = m*ii + xleft;
940         for (jj=xleft; jj<xright; jj++) {
941           idx[loc++] = count++;
942         }
943       }
944       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
945       ierr = PetscFree(idx);CHKERRQ(ierr);
946       xstart += width;
947     }
948     ystart += height;
949   }
950   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "PCASMGetLocalSubdomains"
956 /*@C
957     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
958     only) for the additive Schwarz preconditioner.
959 
960     Collective on PC
961 
962     Input Parameter:
963 .   pc - the preconditioner context
964 
965     Output Parameters:
966 +   n - the number of subdomains for this processor (default value = 1)
967 -   is - the index sets that define the subdomains for this processor
968 
969 
970     Notes:
971     The IS numbering is in the parallel, global numbering of the vector.
972 
973     Level: advanced
974 
975 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
976 
977 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
978           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
979 @*/
980 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[])
981 {
982   PC_ASM *osm;
983 
984   PetscFunctionBegin;
985   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
986   PetscValidIntPointer(n,2);
987   if (!pc->setupcalled) {
988     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
989   }
990 
991   osm = (PC_ASM*)pc->data;
992   if (n)  *n = osm->n_local_true;
993   if (is) *is = osm->is;
994   PetscFunctionReturn(0);
995 }
996 
997 #undef __FUNCT__
998 #define __FUNCT__ "PCASMGetLocalSubmatrices"
999 /*@C
1000     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1001     only) for the additive Schwarz preconditioner.
1002 
1003     Collective on PC
1004 
1005     Input Parameter:
1006 .   pc - the preconditioner context
1007 
1008     Output Parameters:
1009 +   n - the number of matrices for this processor (default value = 1)
1010 -   mat - the matrices
1011 
1012 
1013     Level: advanced
1014 
1015 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1016 
1017 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1018           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1019 @*/
1020 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1021 {
1022   PC_ASM *osm;
1023 
1024   PetscFunctionBegin;
1025   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1026   PetscValidPointer(n,2);
1027   if (!pc->setupcalled) {
1028     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1029   }
1030 
1031   osm = (PC_ASM*)pc->data;
1032   if (n)   *n   = osm->n_local_true;
1033   if (mat) *mat = osm->pmat;
1034   PetscFunctionReturn(0);
1035 }
1036 
1037