xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision dba47a550923b04c7c4ebbb735eb62a1b3e4e9ae) !
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n,n_local,n_local_true;
18   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
19   PetscInt   overlap;             /* overlap requested by user */
20   KSP        *ksp;               /* linear solvers for each block */
21   VecScatter *scat;               /* mapping to subregion */
22   Vec        *x,*y;
23   IS         *is;                 /* index set that defines each subdomain */
24   Mat        *mat,*pmat;          /* mat is not currently used */
25   PCASMType  type;                /* use reduced interpolation, restriction or both */
26   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
27   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
28   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
29                                      PCSetUpOnBlocks() is done. Similar to inplace
30                                      factorization in the case of LU and ILU */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *jac = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i;
41   const char     *cstring = 0;
42   PetscTruth     iascii,isstring;
43   PetscViewer    sviewer;
44 
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
49   if (iascii) {
50     if (jac->n > 0) {
51       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",jac->n,jac->overlap);CHKERRQ(ierr);
52     } else {
53       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",jac->overlap);CHKERRQ(ierr);
54     }
55     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
56     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
57     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
58     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
59     else                                      cstring = "Unknown ASM type";
60     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
61     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
62     if (jac->same_local_solves) {
63       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       if (!rank && jac->ksp) {
66         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
68         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69       }
70       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
71     } else {
72       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
73       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D\n",rank,jac->n_local_true);CHKERRQ(ierr);
74       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
75       for (i=0; i<jac->n_local_true; i++) {
76         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
77         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
78         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80         if (i != jac->n_local_true-1) {
81           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
82         }
83       }
84       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
85       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
86     }
87   } else if (isstring) {
88     ierr = PetscViewerStringSPrintf(viewer," blks=%D, overlap=%D, type=%D",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
89     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
90       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
91     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
92   } else {
93     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
94   }
95   PetscFunctionReturn(0);
96 }
97 
98 #undef __FUNCT__
99 #define __FUNCT__ "PCSetUp_ASM"
100 static PetscErrorCode PCSetUp_ASM(PC pc)
101 {
102   PC_ASM         *osm  = (PC_ASM*)pc->data;
103   PetscErrorCode ierr;
104   PetscInt       i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
105   PetscInt       start,start_val,end_val,sz,bs;
106   PetscMPIInt    size;
107   MatReuse       scall = MAT_REUSE_MATRIX;
108   IS             isl;
109   KSP            ksp;
110   PC             subpc;
111   char           *prefix,*pprefix;
112   Vec            vec;
113 
114   PetscFunctionBegin;
115   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
116   if (!pc->setupcalled) {
117     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
118       /* no subdomains given, use one per processor */
119       osm->n_local_true = osm->n_local = 1;
120       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
121       osm->n = size;
122     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
123       PetscInt inwork[2],outwork[2];
124       inwork[0] = inwork[1] = osm->n_local_true;
125       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
126       osm->n_local = outwork[0];
127       osm->n       = outwork[1];
128     }
129     n_local      = osm->n_local;
130     n_local_true = osm->n_local_true;
131     if (!osm->is){ /* build the index sets */
132       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
133       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
134       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
135       sz    = end_val - start_val;
136       start = start_val;
137       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
138         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
139       }
140       for (i=0; i<n_local_true; i++){
141         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
142         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
143         start      += size;
144         osm->is[i] =  isl;
145       }
146       osm->is_flg = PETSC_TRUE;
147     }
148 
149     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
150     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
151     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
152     osm->y = osm->x + n_local;
153 
154     /*  Extend the "overlapping" regions by a number of steps  */
155     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
156     for (i=0; i<n_local_true; i++) {
157       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
158     }
159 
160     /* create the local work vectors and scatter contexts */
161     for (i=0; i<n_local_true; i++) {
162       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
163       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
164       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
165       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
166       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
167       ierr = ISDestroy(isl);CHKERRQ(ierr);
168     }
169     for (i=n_local_true; i<n_local; i++) {
170       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
171       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
172       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
173       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
174       ierr = ISDestroy(isl);CHKERRQ(ierr);
175     }
176 
177    /*
178        Create the local solvers.
179     */
180     for (i=0; i<n_local_true; i++) {
181       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
182       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
183       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
184       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
185       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
186       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
187       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
188       osm->ksp[i] = ksp;
189     }
190     scall = MAT_INITIAL_MATRIX;
191   } else {
192     /*
193        Destroy the blocks from the previous iteration
194     */
195     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
196       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
197       scall = MAT_INITIAL_MATRIX;
198     }
199   }
200 
201   /* extract out the submatrices */
202   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
203 
204   /* Return control to the user so that the submatrices can be modified (e.g., to apply
205      different boundary conditions for the submatrices than for the global problem) */
206   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
207 
208   /* loop over subdomains putting them into local ksp */
209   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
210   for (i=0; i<n_local_true; i++) {
211     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
212     ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
213     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
214     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
215   }
216   ierr = VecDestroy(vec);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
222 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
223 {
224   PC_ASM         *osm = (PC_ASM*)pc->data;
225   PetscErrorCode ierr;
226   PetscInt       i;
227 
228   PetscFunctionBegin;
229   for (i=0; i<osm->n_local_true; i++) {
230     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
231   }
232   /*
233      If inplace flag is set, then destroy the matrix after the setup
234      on blocks is done.
235   */
236   if (osm->inplace && osm->n_local_true > 0) {
237     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
238   }
239   PetscFunctionReturn(0);
240 }
241 
242 #undef __FUNCT__
243 #define __FUNCT__ "PCApply_ASM"
244 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
245 {
246   PC_ASM         *osm = (PC_ASM*)pc->data;
247   PetscErrorCode ierr;
248   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
249   PetscScalar    zero = 0.0;
250   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
251 
252   PetscFunctionBegin;
253   /*
254        Support for limiting the restriction or interpolation to only local
255      subdomain values (leaving the other values 0).
256   */
257   if (!(osm->type & PC_ASM_RESTRICT)) {
258     forward = SCATTER_FORWARD_LOCAL;
259     /* have to zero the work RHS since scatter may leave some slots empty */
260     for (i=0; i<n_local_true; i++) {
261       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
262     }
263   }
264   if (!(osm->type & PC_ASM_INTERPOLATE)) {
265     reverse = SCATTER_REVERSE_LOCAL;
266   }
267 
268   for (i=0; i<n_local; i++) {
269     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
270   }
271   ierr = VecSet(&zero,y);CHKERRQ(ierr);
272   /* do the local solves */
273   for (i=0; i<n_local_true; i++) {
274     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
275     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
276     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
277   }
278   /* handle the rest of the scatters that do not have local solves */
279   for (i=n_local_true; i<n_local; i++) {
280     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
281     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
282   }
283   for (i=0; i<n_local; i++) {
284     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
285   }
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "PCApplyTranspose_ASM"
291 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
292 {
293   PC_ASM         *osm = (PC_ASM*)pc->data;
294   PetscErrorCode ierr;
295   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
296   PetscScalar    zero = 0.0;
297   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
298 
299   PetscFunctionBegin;
300   /*
301        Support for limiting the restriction or interpolation to only local
302      subdomain values (leaving the other values 0).
303 
304        Note: these are reversed from the PCApply_ASM() because we are applying the
305      transpose of the three terms
306   */
307   if (!(osm->type & PC_ASM_INTERPOLATE)) {
308     forward = SCATTER_FORWARD_LOCAL;
309     /* have to zero the work RHS since scatter may leave some slots empty */
310     for (i=0; i<n_local_true; i++) {
311       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
312     }
313   }
314   if (!(osm->type & PC_ASM_RESTRICT)) {
315     reverse = SCATTER_REVERSE_LOCAL;
316   }
317 
318   for (i=0; i<n_local; i++) {
319     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
320   }
321   ierr = VecSet(&zero,y);CHKERRQ(ierr);
322   /* do the local solves */
323   for (i=0; i<n_local_true; i++) {
324     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
325     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
326     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
327   }
328   /* handle the rest of the scatters that do not have local solves */
329   for (i=n_local_true; i<n_local; i++) {
330     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
331     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
332   }
333   for (i=0; i<n_local; i++) {
334     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 #undef __FUNCT__
340 #define __FUNCT__ "PCDestroy_ASM"
341 static PetscErrorCode PCDestroy_ASM(PC pc)
342 {
343   PC_ASM         *osm = (PC_ASM*)pc->data;
344   PetscErrorCode ierr;
345   PetscInt       i;
346 
347   PetscFunctionBegin;
348   for (i=0; i<osm->n_local; i++) {
349     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
350     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
351     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
352   }
353   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
354     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
355   }
356   if (osm->ksp) {
357     for (i=0; i<osm->n_local_true; i++) {
358       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
359     }
360   }
361   if (osm->is_flg) {
362     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
363     ierr = PetscFree(osm->is);CHKERRQ(ierr);
364   }
365   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
366   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
367   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
368   ierr = PetscFree(osm);CHKERRQ(ierr);
369   PetscFunctionReturn(0);
370 }
371 
372 #undef __FUNCT__
373 #define __FUNCT__ "PCSetFromOptions_ASM"
374 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
375 {
376   PC_ASM         *osm = (PC_ASM*)pc->data;
377   PetscErrorCode ierr;
378   PetscInt       blocks,ovl,indx;
379   PetscTruth     flg,set,sym;
380   const char     *type[] = {"none","restrict","interpolate","basic"};
381 
382   PetscFunctionBegin;
383   /* set the type to symmetric if matrix is symmetric */
384   if (pc->pmat && !osm->type_set) {
385     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
386     if (set && sym) {
387       osm->type = PC_ASM_BASIC;
388     }
389   }
390   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
391     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
392     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
393     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
394     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
395     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
396     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
397     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
398     if (flg) {
399       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
400     }
401   ierr = PetscOptionsTail();CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 /*------------------------------------------------------------------------------------*/
406 
407 EXTERN_C_BEGIN
408 #undef __FUNCT__
409 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
410 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[])
411 {
412   PC_ASM *osm = (PC_ASM*)pc->data;
413 
414   PetscFunctionBegin;
415   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
416 
417   if (pc->setupcalled && n != osm->n_local_true) {
418     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
419   }
420   if (!pc->setupcalled){
421     osm->n_local_true = n;
422     osm->is           = is;
423   }
424   PetscFunctionReturn(0);
425 }
426 EXTERN_C_END
427 
428 EXTERN_C_BEGIN
429 #undef __FUNCT__
430 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
431 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
432 {
433   PC_ASM         *osm = (PC_ASM*)pc->data;
434   PetscErrorCode ierr;
435   PetscMPIInt    rank,size;
436   PetscInt       n;
437 
438   PetscFunctionBegin;
439 
440   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
441 they cannot be set globally yet.");
442 
443   /*
444      Split the subdomains equally amoung all processors
445   */
446   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
447   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
448   n = N/size + ((N % size) > rank);
449   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
450   if (!pc->setupcalled){
451     osm->n_local_true = n;
452     osm->is           = 0;
453   }
454   PetscFunctionReturn(0);
455 }
456 EXTERN_C_END
457 
458 EXTERN_C_BEGIN
459 #undef __FUNCT__
460 #define __FUNCT__ "PCASMSetOverlap_ASM"
461 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
462 {
463   PC_ASM *osm;
464 
465   PetscFunctionBegin;
466   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
467 
468   osm               = (PC_ASM*)pc->data;
469   osm->overlap      = ovl;
470   PetscFunctionReturn(0);
471 }
472 EXTERN_C_END
473 
474 EXTERN_C_BEGIN
475 #undef __FUNCT__
476 #define __FUNCT__ "PCASMSetType_ASM"
477 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
478 {
479   PC_ASM *osm;
480 
481   PetscFunctionBegin;
482   osm           = (PC_ASM*)pc->data;
483   osm->type     = type;
484   osm->type_set = PETSC_TRUE;
485   PetscFunctionReturn(0);
486 }
487 EXTERN_C_END
488 
489 EXTERN_C_BEGIN
490 #undef __FUNCT__
491 #define __FUNCT__ "PCASMGetSubKSP_ASM"
492 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
493 {
494   PC_ASM         *jac = (PC_ASM*)pc->data;
495   PetscErrorCode ierr;
496 
497   PetscFunctionBegin;
498   if (jac->n_local_true < 0) {
499     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
500   }
501 
502   if (n_local)     *n_local     = jac->n_local_true;
503   if (first_local) {
504     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPIU_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
505     *first_local -= jac->n_local_true;
506   }
507   *ksp                         = jac->ksp;
508   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
509                                       not necessarily true though!  This flag is
510                                       used only for PCView_ASM() */
511   PetscFunctionReturn(0);
512 }
513 EXTERN_C_END
514 
515 EXTERN_C_BEGIN
516 #undef __FUNCT__
517 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
518 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace_ASM(PC pc)
519 {
520   PC_ASM *dir;
521 
522   PetscFunctionBegin;
523   dir          = (PC_ASM*)pc->data;
524   dir->inplace = PETSC_TRUE;
525   PetscFunctionReturn(0);
526 }
527 EXTERN_C_END
528 
529 /*----------------------------------------------------------------------------*/
530 #undef __FUNCT__
531 #define __FUNCT__ "PCASMSetUseInPlace"
532 /*@
533    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
534 
535    Collective on PC
536 
537    Input Parameters:
538 .  pc - the preconditioner context
539 
540    Options Database Key:
541 .  -pc_asm_in_place - Activates in-place factorization
542 
543    Note:
544    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
545    when the original matrix is not required during the Solve process.
546    This destroys the matrix, early thus, saving on memory usage.
547 
548    Level: intermediate
549 
550 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
551 
552 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
553 @*/
554 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace(PC pc)
555 {
556   PetscErrorCode ierr,(*f)(PC);
557 
558   PetscFunctionBegin;
559   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
560   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
561   if (f) {
562     ierr = (*f)(pc);CHKERRQ(ierr);
563   }
564   PetscFunctionReturn(0);
565 }
566 /*----------------------------------------------------------------------------*/
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "PCASMSetLocalSubdomains"
570 /*@C
571     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
572     only) for the additive Schwarz preconditioner.
573 
574     Collective on PC
575 
576     Input Parameters:
577 +   pc - the preconditioner context
578 .   n - the number of subdomains for this processor (default value = 1)
579 -   is - the index sets that define the subdomains for this processor
580          (or PETSC_NULL for PETSc to determine subdomains)
581 
582     Notes:
583     The IS numbering is in the parallel, global numbering of the vector.
584 
585     By default the ASM preconditioner uses 1 block per processor.
586 
587     These index sets cannot be destroyed until after completion of the
588     linear solves for which the ASM preconditioner is being used.
589 
590     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
591 
592     Level: advanced
593 
594 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
595 
596 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
597           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
598 @*/
599 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[])
600 {
601   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[]);
602 
603   PetscFunctionBegin;
604   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
605   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
606   if (f) {
607     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
608   }
609   PetscFunctionReturn(0);
610 }
611 
612 #undef __FUNCT__
613 #define __FUNCT__ "PCASMSetTotalSubdomains"
614 /*@C
615     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
616     additive Schwarz preconditioner.  Either all or no processors in the
617     PC communicator must call this routine, with the same index sets.
618 
619     Collective on PC
620 
621     Input Parameters:
622 +   pc - the preconditioner context
623 .   n - the number of subdomains for all processors
624 -   is - the index sets that define the subdomains for all processor
625          (or PETSC_NULL for PETSc to determine subdomains)
626 
627     Options Database Key:
628     To set the total number of subdomain blocks rather than specify the
629     index sets, use the option
630 .    -pc_asm_blocks <blks> - Sets total blocks
631 
632     Notes:
633     Currently you cannot use this to set the actual subdomains with the argument is.
634 
635     By default the ASM preconditioner uses 1 block per processor.
636 
637     These index sets cannot be destroyed until after completion of the
638     linear solves for which the ASM preconditioner is being used.
639 
640     Use PCASMSetLocalSubdomains() to set local subdomains.
641 
642     Level: advanced
643 
644 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
645 
646 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
647           PCASMCreateSubdomains2D()
648 @*/
649 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS *is)
650 {
651   PetscErrorCode ierr,(*f)(PC,PetscInt,IS *);
652 
653   PetscFunctionBegin;
654   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
655   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
656   if (f) {
657     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
658   }
659   PetscFunctionReturn(0);
660 }
661 
662 #undef __FUNCT__
663 #define __FUNCT__ "PCASMSetOverlap"
664 /*@
665     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
666     additive Schwarz preconditioner.  Either all or no processors in the
667     PC communicator must call this routine.
668 
669     Collective on PC
670 
671     Input Parameters:
672 +   pc  - the preconditioner context
673 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
674 
675     Options Database Key:
676 .   -pc_asm_overlap <ovl> - Sets overlap
677 
678     Notes:
679     By default the ASM preconditioner uses 1 block per processor.  To use
680     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
681     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
682 
683     The overlap defaults to 1, so if one desires that no additional
684     overlap be computed beyond what may have been set with a call to
685     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
686     must be set to be 0.  In particular, if one does not explicitly set
687     the subdomains an application code, then all overlap would be computed
688     internally by PETSc, and using an overlap of 0 would result in an ASM
689     variant that is equivalent to the block Jacobi preconditioner.
690 
691     Note that one can define initial index sets with any overlap via
692     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
693     PCASMSetOverlap() merely allows PETSc to extend that overlap further
694     if desired.
695 
696     Level: intermediate
697 
698 .keywords: PC, ASM, set, overlap
699 
700 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
701           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
702 @*/
703 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
704 {
705   PetscErrorCode ierr,(*f)(PC,PetscInt);
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
709   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
710   if (f) {
711     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
712   }
713   PetscFunctionReturn(0);
714 }
715 
716 #undef __FUNCT__
717 #define __FUNCT__ "PCASMSetType"
718 /*@
719     PCASMSetType - Sets the type of restriction and interpolation used
720     for local problems in the additive Schwarz method.
721 
722     Collective on PC
723 
724     Input Parameters:
725 +   pc  - the preconditioner context
726 -   type - variant of ASM, one of
727 .vb
728       PC_ASM_BASIC       - full interpolation and restriction
729       PC_ASM_RESTRICT    - full restriction, local processor interpolation
730       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
731       PC_ASM_NONE        - local processor restriction and interpolation
732 .ve
733 
734     Options Database Key:
735 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
736 
737     Level: intermediate
738 
739 .keywords: PC, ASM, set, type
740 
741 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
742           PCASMCreateSubdomains2D()
743 @*/
744 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
745 {
746   PetscErrorCode ierr,(*f)(PC,PCASMType);
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
750   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
751   if (f) {
752     ierr = (*f)(pc,type);CHKERRQ(ierr);
753   }
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "PCASMGetSubKSP"
759 /*@C
760    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
761    this processor.
762 
763    Collective on PC iff first_local is requested
764 
765    Input Parameter:
766 .  pc - the preconditioner context
767 
768    Output Parameters:
769 +  n_local - the number of blocks on this processor or PETSC_NULL
770 .  first_local - the global number of the first block on this processor or PETSC_NULL,
771                  all processors must request or all must pass PETSC_NULL
772 -  ksp - the array of KSP contexts
773 
774    Note:
775    After PCASMGetSubKSP() the array of KSPes is not to be freed
776 
777    Currently for some matrix implementations only 1 block per processor
778    is supported.
779 
780    You must call KSPSetUp() before calling PCASMGetSubKSP().
781 
782    Level: advanced
783 
784 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
785 
786 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
787           PCASMCreateSubdomains2D(),
788 @*/
789 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
790 {
791   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
792 
793   PetscFunctionBegin;
794   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
795   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
796   if (f) {
797     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
798   } else {
799     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
800   }
801 
802  PetscFunctionReturn(0);
803 }
804 
805 /* -------------------------------------------------------------------------------------*/
806 /*MC
807    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
808            its own KSP object.
809 
810    Options Database Keys:
811 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
812 .  -pc_asm_in_place - Activates in-place factorization
813 .  -pc_asm_blocks <blks> - Sets total blocks
814 .  -pc_asm_overlap <ovl> - Sets overlap
815 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
816 
817      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
818       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
819       -pc_asm_type basic to use the standard ASM.
820 
821    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
822      than one processor. Defaults to one block per processor.
823 
824      To set options on the solvers for each block append -sub_ to all the KSP, and PC
825         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
826 
827      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
828          and set the options directly on the resulting KSP object (you can access its PC
829          with KSPGetPC())
830 
831 
832    Level: beginner
833 
834    Concepts: additive Schwarz method
835 
836 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
837            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
838            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
839            PCASMSetUseInPlace()
840 M*/
841 
842 EXTERN_C_BEGIN
843 #undef __FUNCT__
844 #define __FUNCT__ "PCCreate_ASM"
845 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
846 {
847   PetscErrorCode ierr;
848   PC_ASM         *osm;
849 
850   PetscFunctionBegin;
851   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
852   ierr = PetscLogObjectMemory(pc,sizeof(PC_ASM));CHKERRQ(ierr);
853   osm->n                 = PETSC_DECIDE;
854   osm->n_local           = 0;
855   osm->n_local_true      = PETSC_DECIDE;
856   osm->overlap           = 1;
857   osm->is_flg            = PETSC_FALSE;
858   osm->ksp              = 0;
859   osm->scat              = 0;
860   osm->is                = 0;
861   osm->mat               = 0;
862   osm->pmat              = 0;
863   osm->type              = PC_ASM_RESTRICT;
864   osm->same_local_solves = PETSC_TRUE;
865   osm->inplace           = PETSC_FALSE;
866   pc->data               = (void*)osm;
867 
868   pc->ops->apply             = PCApply_ASM;
869   pc->ops->applytranspose    = PCApplyTranspose_ASM;
870   pc->ops->setup             = PCSetUp_ASM;
871   pc->ops->destroy           = PCDestroy_ASM;
872   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
873   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
874   pc->ops->view              = PCView_ASM;
875   pc->ops->applyrichardson   = 0;
876 
877   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
878                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
879   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
880                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
881   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
882                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
883   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
884                     PCASMSetType_ASM);CHKERRQ(ierr);
885   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
886                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
887 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
888                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 EXTERN_C_END
892 
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "PCASMCreateSubdomains2D"
896 /*@
897    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
898    preconditioner for a two-dimensional problem on a regular grid.
899 
900    Not Collective
901 
902    Input Parameters:
903 +  m, n - the number of mesh points in the x and y directions
904 .  M, N - the number of subdomains in the x and y directions
905 .  dof - degrees of freedom per node
906 -  overlap - overlap in mesh lines
907 
908    Output Parameters:
909 +  Nsub - the number of subdomains created
910 -  is - the array of index sets defining the subdomains
911 
912    Note:
913    Presently PCAMSCreateSubdomains2d() is valid only for sequential
914    preconditioners.  More general related routines are
915    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
916 
917    Level: advanced
918 
919 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
920 
921 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
922           PCASMSetOverlap()
923 @*/
924 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
925 {
926   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
927   PetscErrorCode ierr;
928   PetscInt       nidx,*idx,loc,ii,jj,count;
929 
930   PetscFunctionBegin;
931   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
932 
933   *Nsub = N*M;
934   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
935   ystart = 0;
936   loc_outter = 0;
937   for (i=0; i<N; i++) {
938     height = n/N + ((n % N) > i); /* height of subdomain */
939     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
940     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
941     yright = ystart + height + overlap; if (yright > n) yright = n;
942     xstart = 0;
943     for (j=0; j<M; j++) {
944       width = m/M + ((m % M) > j); /* width of subdomain */
945       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
946       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
947       xright = xstart + width + overlap; if (xright > m) xright = m;
948       nidx   = (xright - xleft)*(yright - yleft);
949       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
950       loc    = 0;
951       for (ii=yleft; ii<yright; ii++) {
952         count = m*ii + xleft;
953         for (jj=xleft; jj<xright; jj++) {
954           idx[loc++] = count++;
955         }
956       }
957       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
958       ierr = PetscFree(idx);CHKERRQ(ierr);
959       xstart += width;
960     }
961     ystart += height;
962   }
963   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
964   PetscFunctionReturn(0);
965 }
966 
967 #undef __FUNCT__
968 #define __FUNCT__ "PCASMGetLocalSubdomains"
969 /*@C
970     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
971     only) for the additive Schwarz preconditioner.
972 
973     Collective on PC
974 
975     Input Parameter:
976 .   pc - the preconditioner context
977 
978     Output Parameters:
979 +   n - the number of subdomains for this processor (default value = 1)
980 -   is - the index sets that define the subdomains for this processor
981 
982 
983     Notes:
984     The IS numbering is in the parallel, global numbering of the vector.
985 
986     Level: advanced
987 
988 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
989 
990 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
991           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
992 @*/
993 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[])
994 {
995   PC_ASM *osm;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
999   PetscValidIntPointer(n,2);
1000   if (!pc->setupcalled) {
1001     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1002   }
1003 
1004   osm = (PC_ASM*)pc->data;
1005   if (n)  *n = osm->n_local_true;
1006   if (is) *is = osm->is;
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1012 /*@C
1013     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1014     only) for the additive Schwarz preconditioner.
1015 
1016     Collective on PC
1017 
1018     Input Parameter:
1019 .   pc - the preconditioner context
1020 
1021     Output Parameters:
1022 +   n - the number of matrices for this processor (default value = 1)
1023 -   mat - the matrices
1024 
1025 
1026     Level: advanced
1027 
1028 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1029 
1030 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1031           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1032 @*/
1033 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1034 {
1035   PC_ASM *osm;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1039   PetscValidPointer(n,2);
1040   if (!pc->setupcalled) {
1041     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1042   }
1043 
1044   osm = (PC_ASM*)pc->data;
1045   if (n)   *n   = osm->n_local_true;
1046   if (mat) *mat = osm->pmat;
1047   PetscFunctionReturn(0);
1048 }
1049 
1050