xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 2cfcea2917e9827eeaf4fa9c2e8dd75b054ced7b)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n,n_local,n_local_true;
18   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
19   PetscInt   overlap;             /* overlap requested by user */
20   KSP        *ksp;               /* linear solvers for each block */
21   VecScatter *scat;               /* mapping to subregion */
22   Vec        *x,*y;
23   IS         *is;                 /* index set that defines each subdomain */
24   Mat        *mat,*pmat;          /* mat is not currently used */
25   PCASMType  type;                /* use reduced interpolation, restriction or both */
26   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
27   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
28   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
29                                      PCSetUpOnBlocks() is done. Similar to inplace
30                                      factorization in the case of LU and ILU */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *jac = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (jac->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",jac->n,jac->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",jac->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[jac->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
56     if (jac->same_local_solves) {
57       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59       if (!rank && jac->ksp) {
60         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
62         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63       }
64       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65     } else {
66       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D\n",rank,jac->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
69       for (i=0; i<jac->n_local; i++) {
70         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
71         if (i < jac->n_local_true) {
72           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
73           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
74           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
75         }
76         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
77       }
78       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
80     }
81   } else if (isstring) {
82     ierr = PetscViewerStringSPrintf(viewer," blks=%D, overlap=%D, type=%D",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
85     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
86   } else {
87     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
88   }
89   PetscFunctionReturn(0);
90 }
91 
92 #undef __FUNCT__
93 #define __FUNCT__ "PCSetUp_ASM"
94 static PetscErrorCode PCSetUp_ASM(PC pc)
95 {
96   PC_ASM         *osm  = (PC_ASM*)pc->data;
97   PetscErrorCode ierr;
98   PetscInt       i,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
99   PetscInt       start,start_val,end_val,sz,bs;
100   PetscMPIInt    size;
101   MatReuse       scall = MAT_REUSE_MATRIX;
102   IS             isl;
103   KSP            ksp;
104   PC             subpc;
105   const char     *prefix,*pprefix;
106   Vec            vec;
107 
108   PetscFunctionBegin;
109   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
110   if (!pc->setupcalled) {
111     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
112       /* no subdomains given, use one per processor */
113       osm->n_local_true = osm->n_local = 1;
114       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
115       osm->n = size;
116     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
117       PetscInt inwork[2],outwork[2];
118       inwork[0] = inwork[1] = osm->n_local_true;
119       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
120       osm->n_local = outwork[0];
121       osm->n       = outwork[1];
122     }
123     n_local      = osm->n_local;
124     n_local_true = osm->n_local_true;
125     if (!osm->is){ /* build the index sets */
126       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
127       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
128       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
129       sz    = end_val - start_val;
130       start = start_val;
131       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
132         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
133       }
134       for (i=0; i<n_local_true; i++){
135         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
136         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
137         start      += size;
138         osm->is[i] =  isl;
139       }
140       osm->is_flg = PETSC_TRUE;
141     }
142 
143     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
144     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
145     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
146     osm->y = osm->x + n_local;
147 
148     /*  Extend the "overlapping" regions by a number of steps  */
149     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
150     for (i=0; i<n_local_true; i++) {
151       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
152     }
153 
154     /* create the local work vectors and scatter contexts */
155     for (i=0; i<n_local_true; i++) {
156       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
157       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
158       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
159       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
160       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
161       ierr = ISDestroy(isl);CHKERRQ(ierr);
162     }
163     for (i=n_local_true; i<n_local; i++) {
164       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
165       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
166       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
167       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
168       ierr = ISDestroy(isl);CHKERRQ(ierr);
169     }
170 
171    /*
172        Create the local solvers.
173     */
174     for (i=0; i<n_local_true; i++) {
175       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
176       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
177       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
178       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
179       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
180       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
181       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
182       osm->ksp[i] = ksp;
183     }
184     scall = MAT_INITIAL_MATRIX;
185   } else {
186     /*
187        Destroy the blocks from the previous iteration
188     */
189     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
190       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
191       scall = MAT_INITIAL_MATRIX;
192     }
193   }
194 
195   /* extract out the submatrices */
196   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
197 
198   /* Return control to the user so that the submatrices can be modified (e.g., to apply
199      different boundary conditions for the submatrices than for the global problem) */
200   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
201 
202   /* loop over subdomains putting them into local ksp */
203   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
204   for (i=0; i<n_local_true; i++) {
205     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
206     ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
207     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
208     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
209   }
210   ierr = VecDestroy(vec);CHKERRQ(ierr);
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
216 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
217 {
218   PC_ASM         *osm = (PC_ASM*)pc->data;
219   PetscErrorCode ierr;
220   PetscInt       i;
221 
222   PetscFunctionBegin;
223   for (i=0; i<osm->n_local_true; i++) {
224     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
225   }
226   /*
227      If inplace flag is set, then destroy the matrix after the setup
228      on blocks is done.
229   */
230   if (osm->inplace && osm->n_local_true > 0) {
231     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
232   }
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "PCApply_ASM"
238 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
239 {
240   PC_ASM         *osm = (PC_ASM*)pc->data;
241   PetscErrorCode ierr;
242   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
243   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
244 
245   PetscFunctionBegin;
246   /*
247        Support for limiting the restriction or interpolation to only local
248      subdomain values (leaving the other values 0).
249   */
250   if (!(osm->type & PC_ASM_RESTRICT)) {
251     forward = SCATTER_FORWARD_LOCAL;
252     /* have to zero the work RHS since scatter may leave some slots empty */
253     for (i=0; i<n_local_true; i++) {
254       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
255     }
256   }
257   if (!(osm->type & PC_ASM_INTERPOLATE)) {
258     reverse = SCATTER_REVERSE_LOCAL;
259   }
260 
261   for (i=0; i<n_local; i++) {
262     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
263   }
264   ierr = VecSet(y,0.0);CHKERRQ(ierr);
265   /* do the local solves */
266   for (i=0; i<n_local_true; i++) {
267     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
268     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
269     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
270   }
271   /* handle the rest of the scatters that do not have local solves */
272   for (i=n_local_true; i<n_local; i++) {
273     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
274     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
275   }
276   for (i=0; i<n_local; i++) {
277     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
278   }
279   PetscFunctionReturn(0);
280 }
281 
282 #undef __FUNCT__
283 #define __FUNCT__ "PCApplyTranspose_ASM"
284 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
285 {
286   PC_ASM         *osm = (PC_ASM*)pc->data;
287   PetscErrorCode ierr;
288   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
289   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
290 
291   PetscFunctionBegin;
292   /*
293        Support for limiting the restriction or interpolation to only local
294      subdomain values (leaving the other values 0).
295 
296        Note: these are reversed from the PCApply_ASM() because we are applying the
297      transpose of the three terms
298   */
299   if (!(osm->type & PC_ASM_INTERPOLATE)) {
300     forward = SCATTER_FORWARD_LOCAL;
301     /* have to zero the work RHS since scatter may leave some slots empty */
302     for (i=0; i<n_local_true; i++) {
303       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
304     }
305   }
306   if (!(osm->type & PC_ASM_RESTRICT)) {
307     reverse = SCATTER_REVERSE_LOCAL;
308   }
309 
310   for (i=0; i<n_local; i++) {
311     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
312   }
313   ierr = VecSet(y,0.0);CHKERRQ(ierr);
314   /* do the local solves */
315   for (i=0; i<n_local_true; i++) {
316     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
317     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
318     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
319   }
320   /* handle the rest of the scatters that do not have local solves */
321   for (i=n_local_true; i<n_local; i++) {
322     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
323     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
324   }
325   for (i=0; i<n_local; i++) {
326     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
327   }
328   PetscFunctionReturn(0);
329 }
330 
331 #undef __FUNCT__
332 #define __FUNCT__ "PCDestroy_ASM"
333 static PetscErrorCode PCDestroy_ASM(PC pc)
334 {
335   PC_ASM         *osm = (PC_ASM*)pc->data;
336   PetscErrorCode ierr;
337   PetscInt       i;
338 
339   PetscFunctionBegin;
340   for (i=0; i<osm->n_local; i++) {
341     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
342     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
343     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
344   }
345   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
346     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
347   }
348   if (osm->ksp) {
349     for (i=0; i<osm->n_local_true; i++) {
350       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
351     }
352   }
353   if (osm->is_flg) {
354     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
355     ierr = PetscFree(osm->is);CHKERRQ(ierr);
356   }
357   ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
358   ierr = PetscFree(osm->scat);CHKERRQ(ierr);
359   ierr = PetscFree(osm->x);CHKERRQ(ierr);
360   ierr = PetscFree(osm);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "PCSetFromOptions_ASM"
366 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
367 {
368   PC_ASM         *osm = (PC_ASM*)pc->data;
369   PetscErrorCode ierr;
370   PetscInt       blocks,ovl;
371   PetscTruth     flg,set,sym;
372 
373   PetscFunctionBegin;
374   /* set the type to symmetric if matrix is symmetric */
375   if (pc->pmat && !osm->type_set) {
376     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
377     if (set && sym) {
378       osm->type = PC_ASM_BASIC;
379     }
380   }
381   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
382     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
383     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
384     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
385     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
386     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
387     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
388     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&osm->type,&flg);CHKERRQ(ierr);
389   ierr = PetscOptionsTail();CHKERRQ(ierr);
390   PetscFunctionReturn(0);
391 }
392 
393 /*------------------------------------------------------------------------------------*/
394 
395 EXTERN_C_BEGIN
396 #undef __FUNCT__
397 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
398 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[])
399 {
400   PC_ASM *osm = (PC_ASM*)pc->data;
401 
402   PetscFunctionBegin;
403   if (n < 1) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
404 
405   if (pc->setupcalled && n != osm->n_local_true) {
406     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
407   }
408   if (!pc->setupcalled){
409     osm->n_local_true = n;
410     osm->is           = is;
411   }
412   PetscFunctionReturn(0);
413 }
414 EXTERN_C_END
415 
416 EXTERN_C_BEGIN
417 #undef __FUNCT__
418 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
419 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
420 {
421   PC_ASM         *osm = (PC_ASM*)pc->data;
422   PetscErrorCode ierr;
423   PetscMPIInt    rank,size;
424   PetscInt       n;
425 
426   PetscFunctionBegin;
427 
428   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
429 they cannot be set globally yet.");
430 
431   /*
432      Split the subdomains equally amoung all processors
433   */
434   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
435   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
436   n = N/size + ((N % size) > rank);
437   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
438   if (!pc->setupcalled){
439     osm->n_local_true = n;
440     if (!n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have at least one block: total processors %d total blocks %d",size,(int)n);
441     osm->is           = 0;
442   }
443   PetscFunctionReturn(0);
444 }
445 EXTERN_C_END
446 
447 EXTERN_C_BEGIN
448 #undef __FUNCT__
449 #define __FUNCT__ "PCASMSetOverlap_ASM"
450 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
451 {
452   PC_ASM *osm;
453 
454   PetscFunctionBegin;
455   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
456 
457   osm               = (PC_ASM*)pc->data;
458   osm->overlap      = ovl;
459   PetscFunctionReturn(0);
460 }
461 EXTERN_C_END
462 
463 EXTERN_C_BEGIN
464 #undef __FUNCT__
465 #define __FUNCT__ "PCASMSetType_ASM"
466 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
467 {
468   PC_ASM *osm;
469 
470   PetscFunctionBegin;
471   osm           = (PC_ASM*)pc->data;
472   osm->type     = type;
473   osm->type_set = PETSC_TRUE;
474   PetscFunctionReturn(0);
475 }
476 EXTERN_C_END
477 
478 EXTERN_C_BEGIN
479 #undef __FUNCT__
480 #define __FUNCT__ "PCASMGetSubKSP_ASM"
481 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
482 {
483   PC_ASM         *jac = (PC_ASM*)pc->data;
484   PetscErrorCode ierr;
485 
486   PetscFunctionBegin;
487   if (jac->n_local_true < 0) {
488     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
489   }
490 
491   if (n_local)     *n_local     = jac->n_local_true;
492   if (first_local) {
493     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPIU_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
494     *first_local -= jac->n_local_true;
495   }
496   *ksp                         = jac->ksp;
497   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
498                                       not necessarily true though!  This flag is
499                                       used only for PCView_ASM() */
500   PetscFunctionReturn(0);
501 }
502 EXTERN_C_END
503 
504 EXTERN_C_BEGIN
505 #undef __FUNCT__
506 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
507 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace_ASM(PC pc)
508 {
509   PC_ASM *dir;
510 
511   PetscFunctionBegin;
512   dir          = (PC_ASM*)pc->data;
513   dir->inplace = PETSC_TRUE;
514   PetscFunctionReturn(0);
515 }
516 EXTERN_C_END
517 
518 /*----------------------------------------------------------------------------*/
519 #undef __FUNCT__
520 #define __FUNCT__ "PCASMSetUseInPlace"
521 /*@
522    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
523 
524    Collective on PC
525 
526    Input Parameters:
527 .  pc - the preconditioner context
528 
529    Options Database Key:
530 .  -pc_asm_in_place - Activates in-place factorization
531 
532    Note:
533    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
534    when the original matrix is not required during the Solve process.
535    This destroys the matrix, early thus, saving on memory usage.
536 
537    Level: intermediate
538 
539 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
540 
541 .seealso: PCFactorSetUseInPlace()
542 @*/
543 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetUseInPlace(PC pc)
544 {
545   PetscErrorCode ierr,(*f)(PC);
546 
547   PetscFunctionBegin;
548   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
549   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
550   if (f) {
551     ierr = (*f)(pc);CHKERRQ(ierr);
552   }
553   PetscFunctionReturn(0);
554 }
555 /*----------------------------------------------------------------------------*/
556 
557 #undef __FUNCT__
558 #define __FUNCT__ "PCASMSetLocalSubdomains"
559 /*@C
560     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
561     only) for the additive Schwarz preconditioner.
562 
563     Collective on PC
564 
565     Input Parameters:
566 +   pc - the preconditioner context
567 .   n - the number of subdomains for this processor (default value = 1)
568 -   is - the index sets that define the subdomains for this processor
569          (or PETSC_NULL for PETSc to determine subdomains)
570 
571     Notes:
572     The IS numbering is in the parallel, global numbering of the vector.
573 
574     By default the ASM preconditioner uses 1 block per processor.
575 
576     These index sets cannot be destroyed until after completion of the
577     linear solves for which the ASM preconditioner is being used.
578 
579     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
580 
581     Level: advanced
582 
583 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
584 
585 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
586           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
587 @*/
588 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[])
589 {
590   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[]);
591 
592   PetscFunctionBegin;
593   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
594   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
595   if (f) {
596     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
597   }
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "PCASMSetTotalSubdomains"
603 /*@C
604     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
605     additive Schwarz preconditioner.  Either all or no processors in the
606     PC communicator must call this routine, with the same index sets.
607 
608     Collective on PC
609 
610     Input Parameters:
611 +   pc - the preconditioner context
612 .   n - the number of subdomains for all processors
613 -   is - the index sets that define the subdomains for all processor
614          (or PETSC_NULL for PETSc to determine subdomains)
615 
616     Options Database Key:
617     To set the total number of subdomain blocks rather than specify the
618     index sets, use the option
619 .    -pc_asm_blocks <blks> - Sets total blocks
620 
621     Notes:
622     Currently you cannot use this to set the actual subdomains with the argument is.
623 
624     By default the ASM preconditioner uses 1 block per processor.
625 
626     These index sets cannot be destroyed until after completion of the
627     linear solves for which the ASM preconditioner is being used.
628 
629     Use PCASMSetLocalSubdomains() to set local subdomains.
630 
631     Level: advanced
632 
633 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
634 
635 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
636           PCASMCreateSubdomains2D()
637 @*/
638 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS *is)
639 {
640   PetscErrorCode ierr,(*f)(PC,PetscInt,IS *);
641 
642   PetscFunctionBegin;
643   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
644   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
645   if (f) {
646     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
647   }
648   PetscFunctionReturn(0);
649 }
650 
651 #undef __FUNCT__
652 #define __FUNCT__ "PCASMSetOverlap"
653 /*@
654     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
655     additive Schwarz preconditioner.  Either all or no processors in the
656     PC communicator must call this routine.
657 
658     Collective on PC
659 
660     Input Parameters:
661 +   pc  - the preconditioner context
662 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
663 
664     Options Database Key:
665 .   -pc_asm_overlap <ovl> - Sets overlap
666 
667     Notes:
668     By default the ASM preconditioner uses 1 block per processor.  To use
669     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
670     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
671 
672     The overlap defaults to 1, so if one desires that no additional
673     overlap be computed beyond what may have been set with a call to
674     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
675     must be set to be 0.  In particular, if one does not explicitly set
676     the subdomains an application code, then all overlap would be computed
677     internally by PETSc, and using an overlap of 0 would result in an ASM
678     variant that is equivalent to the block Jacobi preconditioner.
679 
680     Note that one can define initial index sets with any overlap via
681     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
682     PCASMSetOverlap() merely allows PETSc to extend that overlap further
683     if desired.
684 
685     Level: intermediate
686 
687 .keywords: PC, ASM, set, overlap
688 
689 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
690           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
691 @*/
692 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
693 {
694   PetscErrorCode ierr,(*f)(PC,PetscInt);
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
698   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
699   if (f) {
700     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
701   }
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "PCASMSetType"
707 /*@
708     PCASMSetType - Sets the type of restriction and interpolation used
709     for local problems in the additive Schwarz method.
710 
711     Collective on PC
712 
713     Input Parameters:
714 +   pc  - the preconditioner context
715 -   type - variant of ASM, one of
716 .vb
717       PC_ASM_BASIC       - full interpolation and restriction
718       PC_ASM_RESTRICT    - full restriction, local processor interpolation
719       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
720       PC_ASM_NONE        - local processor restriction and interpolation
721 .ve
722 
723     Options Database Key:
724 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
725 
726     Level: intermediate
727 
728 .keywords: PC, ASM, set, type
729 
730 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
731           PCASMCreateSubdomains2D()
732 @*/
733 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
734 {
735   PetscErrorCode ierr,(*f)(PC,PCASMType);
736 
737   PetscFunctionBegin;
738   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
739   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
740   if (f) {
741     ierr = (*f)(pc,type);CHKERRQ(ierr);
742   }
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "PCASMGetSubKSP"
748 /*@C
749    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
750    this processor.
751 
752    Collective on PC iff first_local is requested
753 
754    Input Parameter:
755 .  pc - the preconditioner context
756 
757    Output Parameters:
758 +  n_local - the number of blocks on this processor or PETSC_NULL
759 .  first_local - the global number of the first block on this processor or PETSC_NULL,
760                  all processors must request or all must pass PETSC_NULL
761 -  ksp - the array of KSP contexts
762 
763    Note:
764    After PCASMGetSubKSP() the array of KSPes is not to be freed
765 
766    Currently for some matrix implementations only 1 block per processor
767    is supported.
768 
769    You must call KSPSetUp() before calling PCASMGetSubKSP().
770 
771    Level: advanced
772 
773 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
774 
775 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
776           PCASMCreateSubdomains2D(),
777 @*/
778 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
779 {
780   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
784   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
785   if (f) {
786     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
787   } else {
788     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
789   }
790 
791  PetscFunctionReturn(0);
792 }
793 
794 /* -------------------------------------------------------------------------------------*/
795 /*MC
796    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
797            its own KSP object.
798 
799    Options Database Keys:
800 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
801 .  -pc_asm_in_place - Activates in-place factorization
802 .  -pc_asm_blocks <blks> - Sets total blocks
803 .  -pc_asm_overlap <ovl> - Sets overlap
804 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
805 
806      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
807       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
808       -pc_asm_type basic to use the standard ASM.
809 
810    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
811      than one processor. Defaults to one block per processor.
812 
813      To set options on the solvers for each block append -sub_ to all the KSP, and PC
814         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
815 
816      To set the options on the solvers separate for each block call PCASMGetSubKSP()
817          and set the options directly on the resulting KSP object (you can access its PC
818          with KSPGetPC())
819 
820 
821    Level: beginner
822 
823    Concepts: additive Schwarz method
824 
825 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
826            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
827            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
828            PCASMSetUseInPlace()
829 M*/
830 
831 EXTERN_C_BEGIN
832 #undef __FUNCT__
833 #define __FUNCT__ "PCCreate_ASM"
834 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
835 {
836   PetscErrorCode ierr;
837   PC_ASM         *osm;
838 
839   PetscFunctionBegin;
840   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
841   ierr = PetscLogObjectMemory(pc,sizeof(PC_ASM));CHKERRQ(ierr);
842   osm->n                 = PETSC_DECIDE;
843   osm->n_local           = 0;
844   osm->n_local_true      = PETSC_DECIDE;
845   osm->overlap           = 1;
846   osm->is_flg            = PETSC_FALSE;
847   osm->ksp              = 0;
848   osm->scat              = 0;
849   osm->is                = 0;
850   osm->mat               = 0;
851   osm->pmat              = 0;
852   osm->type              = PC_ASM_RESTRICT;
853   osm->same_local_solves = PETSC_TRUE;
854   osm->inplace           = PETSC_FALSE;
855   pc->data               = (void*)osm;
856 
857   pc->ops->apply             = PCApply_ASM;
858   pc->ops->applytranspose    = PCApplyTranspose_ASM;
859   pc->ops->setup             = PCSetUp_ASM;
860   pc->ops->destroy           = PCDestroy_ASM;
861   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
862   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
863   pc->ops->view              = PCView_ASM;
864   pc->ops->applyrichardson   = 0;
865 
866   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
867                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
868   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
869                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
870   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
871                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
873                     PCASMSetType_ASM);CHKERRQ(ierr);
874   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
875                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
876 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
877                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
878   PetscFunctionReturn(0);
879 }
880 EXTERN_C_END
881 
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "PCASMCreateSubdomains2D"
885 /*@
886    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
887    preconditioner for a two-dimensional problem on a regular grid.
888 
889    Not Collective
890 
891    Input Parameters:
892 +  m, n - the number of mesh points in the x and y directions
893 .  M, N - the number of subdomains in the x and y directions
894 .  dof - degrees of freedom per node
895 -  overlap - overlap in mesh lines
896 
897    Output Parameters:
898 +  Nsub - the number of subdomains created
899 -  is - the array of index sets defining the subdomains
900 
901    Note:
902    Presently PCAMSCreateSubdomains2d() is valid only for sequential
903    preconditioners.  More general related routines are
904    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
905 
906    Level: advanced
907 
908 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
909 
910 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
911           PCASMSetOverlap()
912 @*/
913 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
914 {
915   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
916   PetscErrorCode ierr;
917   PetscInt       nidx,*idx,loc,ii,jj,count;
918 
919   PetscFunctionBegin;
920   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
921 
922   *Nsub = N*M;
923   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
924   ystart = 0;
925   loc_outter = 0;
926   for (i=0; i<N; i++) {
927     height = n/N + ((n % N) > i); /* height of subdomain */
928     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
929     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
930     yright = ystart + height + overlap; if (yright > n) yright = n;
931     xstart = 0;
932     for (j=0; j<M; j++) {
933       width = m/M + ((m % M) > j); /* width of subdomain */
934       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
935       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
936       xright = xstart + width + overlap; if (xright > m) xright = m;
937       nidx   = (xright - xleft)*(yright - yleft);
938       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
939       loc    = 0;
940       for (ii=yleft; ii<yright; ii++) {
941         count = m*ii + xleft;
942         for (jj=xleft; jj<xright; jj++) {
943           idx[loc++] = count++;
944         }
945       }
946       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
947       ierr = PetscFree(idx);CHKERRQ(ierr);
948       xstart += width;
949     }
950     ystart += height;
951   }
952   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
953   PetscFunctionReturn(0);
954 }
955 
956 #undef __FUNCT__
957 #define __FUNCT__ "PCASMGetLocalSubdomains"
958 /*@C
959     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
960     only) for the additive Schwarz preconditioner.
961 
962     Collective on PC
963 
964     Input Parameter:
965 .   pc - the preconditioner context
966 
967     Output Parameters:
968 +   n - the number of subdomains for this processor (default value = 1)
969 -   is - the index sets that define the subdomains for this processor
970 
971 
972     Notes:
973     The IS numbering is in the parallel, global numbering of the vector.
974 
975     Level: advanced
976 
977 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
978 
979 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
980           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
981 @*/
982 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[])
983 {
984   PC_ASM *osm;
985 
986   PetscFunctionBegin;
987   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
988   PetscValidIntPointer(n,2);
989   if (!pc->setupcalled) {
990     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
991   }
992 
993   osm = (PC_ASM*)pc->data;
994   if (n)  *n = osm->n_local_true;
995   if (is) *is = osm->is;
996   PetscFunctionReturn(0);
997 }
998 
999 #undef __FUNCT__
1000 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1001 /*@C
1002     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1003     only) for the additive Schwarz preconditioner.
1004 
1005     Collective on PC
1006 
1007     Input Parameter:
1008 .   pc - the preconditioner context
1009 
1010     Output Parameters:
1011 +   n - the number of matrices for this processor (default value = 1)
1012 -   mat - the matrices
1013 
1014 
1015     Level: advanced
1016 
1017 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1018 
1019 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1020           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1021 @*/
1022 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1023 {
1024   PC_ASM *osm;
1025 
1026   PetscFunctionBegin;
1027   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1028   PetscValidPointer(n,2);
1029   if (!pc->setupcalled) {
1030     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1031   }
1032 
1033   osm = (PC_ASM*)pc->data;
1034   if (n)   *n   = osm->n_local_true;
1035   if (mat) *mat = osm->pmat;
1036   PetscFunctionReturn(0);
1037 }
1038 
1039