xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 0700a8246d308f50502909ba325e6169d3ee27eb)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscTruth sort_indices;        /* flag to sort subdomain indices */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *osm = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i,bsz;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (osm->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",osm->n,osm->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",osm->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
56     if (osm->same_local_solves) {
57       if (osm->ksp) {
58         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
59         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
60         if (!rank) {
61           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
62           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
63           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
64         }
65         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
66       }
67     } else {
68       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
69       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
72       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
73       for (i=0; i<osm->n_local; i++) {
74         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
75         if (i < osm->n_local_true) {
76           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
78           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
79           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80         }
81         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       }
83       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
85     }
86   } else if (isstring) {
87     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
88     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
89     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
90     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
91   } else {
92     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCASMPrintSubdomains"
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm  = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
112   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
113   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
114   for (i=0;i<osm->n_local_true;i++) {
115     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
116     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
117     for (j=0; j<nidx; j++) {
118       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
119     }
120     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
121     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
122     if (osm->is_local) {
123       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
125       for (j=0; j<nidx; j++) {
126         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
127       }
128       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
129       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
130     }
131   }
132   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
133   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "PCSetUp_ASM"
139 static PetscErrorCode PCSetUp_ASM(PC pc)
140 {
141   PC_ASM         *osm  = (PC_ASM*)pc->data;
142   PetscErrorCode ierr;
143   PetscTruth     symset,flg;
144   PetscInt       i,m,m_local,firstRow,lastRow;
145   PetscMPIInt    size;
146   MatReuse       scall = MAT_REUSE_MATRIX;
147   IS             isl;
148   KSP            ksp;
149   PC             subpc;
150   const char     *prefix,*pprefix;
151   Vec            vec;
152 
153   PetscFunctionBegin;
154   if (!pc->setupcalled) {
155 
156     if (!osm->type_set) {
157       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
158       if (symset && flg) { osm->type = PC_ASM_BASIC; }
159     }
160 
161     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
162       /* no subdomains given, use one per processor */
163       osm->n_local = osm->n_local_true = 1;
164       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
165       osm->n = size;
166     } else if (osm->n == PETSC_DECIDE) {
167       /* determine global number of subdomains */
168       PetscInt inwork[2],outwork[2];
169       inwork[0] = inwork[1] = osm->n_local_true;
170       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
171       osm->n_local = outwork[0];
172       osm->n       = outwork[1];
173     }
174 
175     if (!osm->is){ /* create the index sets */
176       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
177     }
178     if (osm->n_local_true > 1 && !osm->is_local) {
179       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
180       for (i=0; i<osm->n_local_true; i++) {
181         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
182           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
183           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
184         } else {
185           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
186           osm->is_local[i] = osm->is[i];
187         }
188       }
189     }
190     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
191     flg  = PETSC_FALSE;
192     ierr = PetscOptionsGetTruth(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
193     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
194 
195     /*  Extend the "overlapping" regions by a number of steps  */
196     ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
197     if (osm->sort_indices) {
198       for (i=0; i<osm->n_local_true; i++) {
199         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
200         if (osm->is_local) {
201           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
202         }
203       }
204     }
205 
206     /* Create the local work vectors and scatter contexts */
207     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
208     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->restriction);CHKERRQ(ierr);
209     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->localization);CHKERRQ(ierr);}
210     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->prolongation);CHKERRQ(ierr);
211     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->x);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y);CHKERRQ(ierr);
213     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y_local);CHKERRQ(ierr);
214     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
215     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
216       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
217       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
218       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
219       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
220       ierr = ISDestroy(isl);CHKERRQ(ierr);
221       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
222       if (osm->is_local) {
223         ISLocalToGlobalMapping ltog;
224         IS                     isll;
225         const PetscInt         *idx_local;
226         PetscInt               *idx,nout;
227 
228         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
229         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
230         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
231         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
232         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
233         ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
234         if (nout != m_local) SETERRQ(PETSC_ERR_PLIB,"is_local not a subset of is");
235         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
236         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,&isll);CHKERRQ(ierr);
237         ierr = PetscFree(idx);CHKERRQ(ierr);
238         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
239         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
240         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
241         ierr = ISDestroy(isll);CHKERRQ(ierr);
242 
243         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
244         ierr = ISDestroy(isl);CHKERRQ(ierr);
245       } else {
246         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
247         osm->y_local[i] = osm->y[i];
248         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
249         osm->prolongation[i] = osm->restriction[i];
250         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
251       }
252     }
253     if (firstRow != lastRow) SETERRQ2(PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
254     for (i=osm->n_local_true; i<osm->n_local; i++) {
255       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
256       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
257       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
258       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
259       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
260       if (osm->is_local) {
261         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
262         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
263       } else {
264         osm->prolongation[i] = osm->restriction[i];
265         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
266       }
267       ierr = ISDestroy(isl);CHKERRQ(ierr);
268     }
269     ierr = VecDestroy(vec);CHKERRQ(ierr);
270 
271     /* Create the local solvers */
272     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
273     for (i=0; i<osm->n_local_true; i++) {
274       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
275       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
276       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
277       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
278       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
279       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
280       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
281       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
282       osm->ksp[i] = ksp;
283     }
284     scall = MAT_INITIAL_MATRIX;
285 
286   } else {
287     /*
288        Destroy the blocks from the previous iteration
289     */
290     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
291       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
292       scall = MAT_INITIAL_MATRIX;
293     }
294   }
295 
296   /*
297      Extract out the submatrices
298   */
299   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
300   if (scall == MAT_INITIAL_MATRIX) {
301     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
302     for (i=0; i<osm->n_local_true; i++) {
303       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
304       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
305     }
306   }
307 
308   /* Return control to the user so that the submatrices can be modified (e.g., to apply
309      different boundary conditions for the submatrices than for the global problem) */
310   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
311 
312   /*
313      Loop over subdomains putting them into local ksp
314   */
315   for (i=0; i<osm->n_local_true; i++) {
316     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
317     if (!pc->setupcalled) {
318       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
319     }
320   }
321 
322   PetscFunctionReturn(0);
323 }
324 
325 #undef __FUNCT__
326 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
327 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
328 {
329   PC_ASM         *osm = (PC_ASM*)pc->data;
330   PetscErrorCode ierr;
331   PetscInt       i;
332 
333   PetscFunctionBegin;
334   for (i=0; i<osm->n_local_true; i++) {
335     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "PCApply_ASM"
342 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
343 {
344   PC_ASM         *osm = (PC_ASM*)pc->data;
345   PetscErrorCode ierr;
346   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
347   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
348 
349   PetscFunctionBegin;
350   /*
351      Support for limiting the restriction or interpolation to only local
352      subdomain values (leaving the other values 0).
353   */
354   if (!(osm->type & PC_ASM_RESTRICT)) {
355     forward = SCATTER_FORWARD_LOCAL;
356     /* have to zero the work RHS since scatter may leave some slots empty */
357     for (i=0; i<n_local_true; i++) {
358       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
359     }
360   }
361   if (!(osm->type & PC_ASM_INTERPOLATE)) {
362     reverse = SCATTER_REVERSE_LOCAL;
363   }
364 
365   for (i=0; i<n_local; i++) {
366     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
367   }
368   ierr = VecSet(y,0.0);CHKERRQ(ierr);
369   /* do the local solves */
370   for (i=0; i<n_local_true; i++) {
371     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
372     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
373     if (osm->localization) {
374       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
375       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
376     }
377     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
378   }
379   /* handle the rest of the scatters that do not have local solves */
380   for (i=n_local_true; i<n_local; i++) {
381     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
382     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
383   }
384   for (i=0; i<n_local; i++) {
385     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
386   }
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "PCApplyTranspose_ASM"
392 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
393 {
394   PC_ASM         *osm = (PC_ASM*)pc->data;
395   PetscErrorCode ierr;
396   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
397   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
398 
399   PetscFunctionBegin;
400   /*
401      Support for limiting the restriction or interpolation to only local
402      subdomain values (leaving the other values 0).
403 
404      Note: these are reversed from the PCApply_ASM() because we are applying the
405      transpose of the three terms
406   */
407   if (!(osm->type & PC_ASM_INTERPOLATE)) {
408     forward = SCATTER_FORWARD_LOCAL;
409     /* have to zero the work RHS since scatter may leave some slots empty */
410     for (i=0; i<n_local_true; i++) {
411       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
412     }
413   }
414   if (!(osm->type & PC_ASM_RESTRICT)) {
415     reverse = SCATTER_REVERSE_LOCAL;
416   }
417 
418   for (i=0; i<n_local; i++) {
419     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
420   }
421   ierr = VecSet(y,0.0);CHKERRQ(ierr);
422   /* do the local solves */
423   for (i=0; i<n_local_true; i++) {
424     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
425     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
426     if (osm->localization) {
427       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
428       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
429     }
430     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
431   }
432   /* handle the rest of the scatters that do not have local solves */
433   for (i=n_local_true; i<n_local; i++) {
434     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
435     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
436   }
437   for (i=0; i<n_local; i++) {
438     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
439   }
440   PetscFunctionReturn(0);
441 }
442 
443 #undef __FUNCT__
444 #define __FUNCT__ "PCDestroy_ASM"
445 static PetscErrorCode PCDestroy_ASM(PC pc)
446 {
447   PC_ASM         *osm = (PC_ASM*)pc->data;
448   PetscErrorCode ierr;
449   PetscInt       i;
450 
451   PetscFunctionBegin;
452   if (osm->ksp) {
453     for (i=0; i<osm->n_local_true; i++) {
454       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
455     }
456     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
457   }
458   if (osm->pmat) {
459     if (osm->n_local_true > 0) {
460       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
461     }
462   }
463   if (osm->restriction) {
464     for (i=0; i<osm->n_local; i++) {
465       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
466       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
467       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
468       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
469       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
470       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
471     }
472     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
473     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
474     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
475     ierr = PetscFree(osm->x);CHKERRQ(ierr);
476     ierr = PetscFree(osm->y);CHKERRQ(ierr);
477     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
478   }
479   if (osm->is) {
480     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
481   }
482   ierr = PetscFree(osm);CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "PCSetFromOptions_ASM"
488 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
489 {
490   PC_ASM         *osm = (PC_ASM*)pc->data;
491   PetscErrorCode ierr;
492   PetscInt       blocks,ovl;
493   PetscTruth     symset,flg;
494   PCASMType      asmtype;
495 
496   PetscFunctionBegin;
497   /* set the type to symmetric if matrix is symmetric */
498   if (!osm->type_set && pc->pmat) {
499     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
500     if (symset && flg) { osm->type = PC_ASM_BASIC; }
501   }
502   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
503     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
504     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
505     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
506     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
507     flg  = PETSC_FALSE;
508     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
509     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
510   ierr = PetscOptionsTail();CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 /*------------------------------------------------------------------------------------*/
515 
516 EXTERN_C_BEGIN
517 #undef __FUNCT__
518 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
519 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
520 {
521   PC_ASM         *osm = (PC_ASM*)pc->data;
522   PetscErrorCode ierr;
523   PetscInt       i;
524 
525   PetscFunctionBegin;
526   if (n < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
527   if (pc->setupcalled && (n != osm->n_local_true || is)) {
528     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
529   }
530   if (!pc->setupcalled) {
531     if (is) {
532       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
533     }
534     if (is_local) {
535       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
536     }
537     if (osm->is) {
538       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
539     }
540     osm->n_local_true = n;
541     osm->is           = 0;
542     osm->is_local     = 0;
543     if (is) {
544       ierr = PetscMalloc(n*sizeof(IS *),&osm->is);CHKERRQ(ierr);
545       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
546     }
547     if (is_local) {
548       ierr = PetscMalloc(n*sizeof(IS *),&osm->is_local);CHKERRQ(ierr);
549       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
550     }
551   }
552   PetscFunctionReturn(0);
553 }
554 EXTERN_C_END
555 
556 EXTERN_C_BEGIN
557 #undef __FUNCT__
558 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
559 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
560 {
561   PC_ASM         *osm = (PC_ASM*)pc->data;
562   PetscErrorCode ierr;
563   PetscMPIInt    rank,size;
564   PetscInt       n;
565 
566   PetscFunctionBegin;
567   if (N < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
568   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
569 
570   /*
571      Split the subdomains equally among all processors
572   */
573   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
574   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
575   n = N/size + ((N % size) > rank);
576   if (!n) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
577   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
578   if (!pc->setupcalled) {
579     if (osm->is) {
580       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
581     }
582     osm->n_local_true = n;
583     osm->is           = 0;
584     osm->is_local     = 0;
585   }
586   PetscFunctionReturn(0);
587 }
588 EXTERN_C_END
589 
590 EXTERN_C_BEGIN
591 #undef __FUNCT__
592 #define __FUNCT__ "PCASMSetOverlap_ASM"
593 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
594 {
595   PC_ASM *osm = (PC_ASM*)pc->data;
596 
597   PetscFunctionBegin;
598   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
599   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
600   if (!pc->setupcalled) {
601     osm->overlap = ovl;
602   }
603   PetscFunctionReturn(0);
604 }
605 EXTERN_C_END
606 
607 EXTERN_C_BEGIN
608 #undef __FUNCT__
609 #define __FUNCT__ "PCASMSetType_ASM"
610 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
611 {
612   PC_ASM *osm = (PC_ASM*)pc->data;
613 
614   PetscFunctionBegin;
615   osm->type     = type;
616   osm->type_set = PETSC_TRUE;
617   PetscFunctionReturn(0);
618 }
619 EXTERN_C_END
620 
621 EXTERN_C_BEGIN
622 #undef __FUNCT__
623 #define __FUNCT__ "PCASMSetSortIndices_ASM"
624 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices_ASM(PC pc,PetscTruth doSort)
625 {
626   PC_ASM *osm = (PC_ASM*)pc->data;
627 
628   PetscFunctionBegin;
629   osm->sort_indices = doSort;
630   PetscFunctionReturn(0);
631 }
632 EXTERN_C_END
633 
634 EXTERN_C_BEGIN
635 #undef __FUNCT__
636 #define __FUNCT__ "PCASMGetSubKSP_ASM"
637 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
638 {
639   PC_ASM         *osm = (PC_ASM*)pc->data;
640   PetscErrorCode ierr;
641 
642   PetscFunctionBegin;
643   if (osm->n_local_true < 1) {
644     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
645   }
646 
647   if (n_local) {
648     *n_local = osm->n_local_true;
649   }
650   if (first_local) {
651     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
652     *first_local -= osm->n_local_true;
653   }
654   if (ksp) {
655     /* Assume that local solves are now different; not necessarily
656        true though!  This flag is used only for PCView_ASM() */
657     *ksp                   = osm->ksp;
658     osm->same_local_solves = PETSC_FALSE;
659   }
660   PetscFunctionReturn(0);
661 }
662 EXTERN_C_END
663 
664 
665 #undef __FUNCT__
666 #define __FUNCT__ "PCASMSetLocalSubdomains"
667 /*@C
668     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
669     only) for the additive Schwarz preconditioner.
670 
671     Collective on PC
672 
673     Input Parameters:
674 +   pc - the preconditioner context
675 .   n - the number of subdomains for this processor (default value = 1)
676 .   is - the index sets that define the subdomains for this processor
677          (or PETSC_NULL for PETSc to determine subdomains)
678 -   is_local - the index sets that define the local part of the subdomains for this processor
679          (or PETSC_NULL to use the default of 1 subdomain per process)
680 
681     Notes:
682     The IS numbering is in the parallel, global numbering of the vector.
683 
684     By default the ASM preconditioner uses 1 block per processor.
685 
686     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
687 
688     Level: advanced
689 
690 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
691 
692 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
693           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
694 @*/
695 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
696 {
697   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
698 
699   PetscFunctionBegin;
700   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
701   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
702   if (f) {
703     ierr = (*f)(pc,n,is,is_local);CHKERRQ(ierr);
704   }
705   PetscFunctionReturn(0);
706 }
707 
708 #undef __FUNCT__
709 #define __FUNCT__ "PCASMSetTotalSubdomains"
710 /*@C
711     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
712     additive Schwarz preconditioner.  Either all or no processors in the
713     PC communicator must call this routine, with the same index sets.
714 
715     Collective on PC
716 
717     Input Parameters:
718 +   pc - the preconditioner context
719 .   n - the number of subdomains for all processors
720 .   is - the index sets that define the subdomains for all processor
721          (or PETSC_NULL for PETSc to determine subdomains)
722 -   is_local - the index sets that define the local part of the subdomains for this processor
723          (or PETSC_NULL to use the default of 1 subdomain per process)
724 
725     Options Database Key:
726     To set the total number of subdomain blocks rather than specify the
727     index sets, use the option
728 .    -pc_asm_blocks <blks> - Sets total blocks
729 
730     Notes:
731     Currently you cannot use this to set the actual subdomains with the argument is.
732 
733     By default the ASM preconditioner uses 1 block per processor.
734 
735     These index sets cannot be destroyed until after completion of the
736     linear solves for which the ASM preconditioner is being used.
737 
738     Use PCASMSetLocalSubdomains() to set local subdomains.
739 
740     Level: advanced
741 
742 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
743 
744 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
745           PCASMCreateSubdomains2D()
746 @*/
747 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
748 {
749   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
750 
751   PetscFunctionBegin;
752   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
753   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
754   if (f) {
755     ierr = (*f)(pc,N,is,is_local);CHKERRQ(ierr);
756   }
757   PetscFunctionReturn(0);
758 }
759 
760 #undef __FUNCT__
761 #define __FUNCT__ "PCASMSetOverlap"
762 /*@
763     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
764     additive Schwarz preconditioner.  Either all or no processors in the
765     PC communicator must call this routine.
766 
767     Collective on PC
768 
769     Input Parameters:
770 +   pc  - the preconditioner context
771 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
772 
773     Options Database Key:
774 .   -pc_asm_overlap <ovl> - Sets overlap
775 
776     Notes:
777     By default the ASM preconditioner uses 1 block per processor.  To use
778     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
779     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
780 
781     The overlap defaults to 1, so if one desires that no additional
782     overlap be computed beyond what may have been set with a call to
783     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
784     must be set to be 0.  In particular, if one does not explicitly set
785     the subdomains an application code, then all overlap would be computed
786     internally by PETSc, and using an overlap of 0 would result in an ASM
787     variant that is equivalent to the block Jacobi preconditioner.
788 
789     Note that one can define initial index sets with any overlap via
790     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
791     PCASMSetOverlap() merely allows PETSc to extend that overlap further
792     if desired.
793 
794     Level: intermediate
795 
796 .keywords: PC, ASM, set, overlap
797 
798 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
799           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
800 @*/
801 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
802 {
803   PetscErrorCode ierr,(*f)(PC,PetscInt);
804 
805   PetscFunctionBegin;
806   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
807   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
808   if (f) {
809     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
810   }
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "PCASMSetType"
816 /*@
817     PCASMSetType - Sets the type of restriction and interpolation used
818     for local problems in the additive Schwarz method.
819 
820     Collective on PC
821 
822     Input Parameters:
823 +   pc  - the preconditioner context
824 -   type - variant of ASM, one of
825 .vb
826       PC_ASM_BASIC       - full interpolation and restriction
827       PC_ASM_RESTRICT    - full restriction, local processor interpolation
828       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
829       PC_ASM_NONE        - local processor restriction and interpolation
830 .ve
831 
832     Options Database Key:
833 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
834 
835     Level: intermediate
836 
837 .keywords: PC, ASM, set, type
838 
839 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
840           PCASMCreateSubdomains2D()
841 @*/
842 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
843 {
844   PetscErrorCode ierr,(*f)(PC,PCASMType);
845 
846   PetscFunctionBegin;
847   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
848   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
849   if (f) {
850     ierr = (*f)(pc,type);CHKERRQ(ierr);
851   }
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "PCASMSetSortIndices"
857 /*@
858     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
859 
860     Collective on PC
861 
862     Input Parameters:
863 +   pc  - the preconditioner context
864 -   doSort - sort the subdomain indices
865 
866     Level: intermediate
867 
868 .keywords: PC, ASM, set, type
869 
870 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
871           PCASMCreateSubdomains2D()
872 @*/
873 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices(PC pc,PetscTruth doSort)
874 {
875   PetscErrorCode ierr,(*f)(PC,PetscTruth);
876 
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
879   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetSortIndices_C",(void (**)(void))&f);CHKERRQ(ierr);
880   if (f) {
881     ierr = (*f)(pc,doSort);CHKERRQ(ierr);
882   }
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "PCASMGetSubKSP"
888 /*@C
889    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
890    this processor.
891 
892    Collective on PC iff first_local is requested
893 
894    Input Parameter:
895 .  pc - the preconditioner context
896 
897    Output Parameters:
898 +  n_local - the number of blocks on this processor or PETSC_NULL
899 .  first_local - the global number of the first block on this processor or PETSC_NULL,
900                  all processors must request or all must pass PETSC_NULL
901 -  ksp - the array of KSP contexts
902 
903    Note:
904    After PCASMGetSubKSP() the array of KSPes is not to be freed
905 
906    Currently for some matrix implementations only 1 block per processor
907    is supported.
908 
909    You must call KSPSetUp() before calling PCASMGetSubKSP().
910 
911    Level: advanced
912 
913 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
914 
915 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
916           PCASMCreateSubdomains2D(),
917 @*/
918 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
919 {
920   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
921 
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
924   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
925   if (f) {
926     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
927   } else {
928     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
929   }
930 
931  PetscFunctionReturn(0);
932 }
933 
934 /* -------------------------------------------------------------------------------------*/
935 /*MC
936    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
937            its own KSP object.
938 
939    Options Database Keys:
940 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
941 .  -pc_asm_blocks <blks> - Sets total blocks
942 .  -pc_asm_overlap <ovl> - Sets overlap
943 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
944 
945      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
946       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
947       -pc_asm_type basic to use the standard ASM.
948 
949    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
950      than one processor. Defaults to one block per processor.
951 
952      To set options on the solvers for each block append -sub_ to all the KSP, and PC
953         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
954 
955      To set the options on the solvers separate for each block call PCASMGetSubKSP()
956          and set the options directly on the resulting KSP object (you can access its PC
957          with KSPGetPC())
958 
959 
960    Level: beginner
961 
962    Concepts: additive Schwarz method
963 
964     References:
965     An additive variant of the Schwarz alternating method for the case of many subregions
966     M Dryja, OB Widlund - Courant Institute, New York University Technical report
967 
968     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
969     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
970 
971 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
972            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
973            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
974 
975 M*/
976 
977 EXTERN_C_BEGIN
978 #undef __FUNCT__
979 #define __FUNCT__ "PCCreate_ASM"
980 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
981 {
982   PetscErrorCode ierr;
983   PC_ASM         *osm;
984 
985   PetscFunctionBegin;
986   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
987   osm->n                 = PETSC_DECIDE;
988   osm->n_local           = 0;
989   osm->n_local_true      = 0;
990   osm->overlap           = 1;
991   osm->ksp               = 0;
992   osm->restriction       = 0;
993   osm->localization      = 0;
994   osm->prolongation      = 0;
995   osm->x                 = 0;
996   osm->y                 = 0;
997   osm->y_local           = 0;
998   osm->is                = 0;
999   osm->is_local          = 0;
1000   osm->mat               = 0;
1001   osm->pmat              = 0;
1002   osm->type              = PC_ASM_RESTRICT;
1003   osm->same_local_solves = PETSC_TRUE;
1004   osm->sort_indices      = PETSC_TRUE;
1005 
1006   pc->data                   = (void*)osm;
1007   pc->ops->apply             = PCApply_ASM;
1008   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1009   pc->ops->setup             = PCSetUp_ASM;
1010   pc->ops->destroy           = PCDestroy_ASM;
1011   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1012   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1013   pc->ops->view              = PCView_ASM;
1014   pc->ops->applyrichardson   = 0;
1015 
1016   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1017                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1018   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1019                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1020   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1021                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1022   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1023                     PCASMSetType_ASM);CHKERRQ(ierr);
1024   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1025                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1026   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1027                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1028   PetscFunctionReturn(0);
1029 }
1030 EXTERN_C_END
1031 
1032 
1033 #undef __FUNCT__
1034 #define __FUNCT__ "PCASMCreateSubdomains"
1035 /*@C
1036    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1037    preconditioner for a any problem on a general grid.
1038 
1039    Collective
1040 
1041    Input Parameters:
1042 +  A - The global matrix operator
1043 -  n - the number of local blocks
1044 
1045    Output Parameters:
1046 .  outis - the array of index sets defining the subdomains
1047 
1048    Level: advanced
1049 
1050    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1051     from these if you use PCASMSetLocalSubdomains()
1052 
1053     In the Fortran version you must provide the array outis[] already allocated of length n.
1054 
1055 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1056 
1057 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1058 @*/
1059 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1060 {
1061   MatPartitioning           mpart;
1062   const char                *prefix;
1063   PetscErrorCode            (*f)(Mat,PetscTruth*,MatReuse,Mat*);
1064   PetscMPIInt               size;
1065   PetscInt                  i,j,rstart,rend,bs;
1066   PetscTruth                iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1067   Mat                       Ad = PETSC_NULL, adj;
1068   IS                        ispart,isnumb,*is;
1069   PetscErrorCode            ierr;
1070 
1071   PetscFunctionBegin;
1072   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1073   PetscValidPointer(outis,3);
1074   if (n < 1) {SETERRQ1(PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);}
1075 
1076   /* Get prefix, row distribution, and block size */
1077   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1078   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1079   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1080   if (rstart/bs*bs != rstart || rend/bs*bs != rend) {
1081     SETERRQ3(PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1082   }
1083   /* Get diagonal block from matrix if possible */
1084   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1085   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1086   if (f) {
1087     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1088   } else if (size == 1) {
1089     iscopy = PETSC_FALSE; Ad = A;
1090   } else {
1091     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1092   }
1093   if (Ad) {
1094     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1095     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1096   }
1097   if (Ad && n > 1) {
1098     PetscTruth match,done;
1099     /* Try to setup a good matrix partitioning if available */
1100     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1101     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1102     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1103     ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_CURRENT,&match);CHKERRQ(ierr);
1104     if (!match) {
1105       ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_SQUARE,&match);CHKERRQ(ierr);
1106     }
1107     if (!match) { /* assume a "good" partitioner is available */
1108       PetscInt na,*ia,*ja;
1109       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1110       if (done) {
1111 	/* Build adjacency matrix by hand. Unfortunately a call to
1112 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1113 	   remove the block-aij structure and we cannot expect
1114 	   MatPartitioning to split vertices as we need */
1115 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1116 	nnz = 0;
1117 	for (i=0; i<na; i++) { /* count number of nonzeros */
1118 	  len = ia[i+1] - ia[i];
1119 	  row = ja + ia[i];
1120 	  for (j=0; j<len; j++) {
1121 	    if (row[j] == i) { /* don't count diagonal */
1122 	      len--; break;
1123 	    }
1124 	  }
1125 	  nnz += len;
1126 	}
1127 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1128 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1129 	nnz    = 0;
1130 	iia[0] = 0;
1131 	for (i=0; i<na; i++) { /* fill adjacency */
1132 	  cnt = 0;
1133 	  len = ia[i+1] - ia[i];
1134 	  row = ja + ia[i];
1135 	  for (j=0; j<len; j++) {
1136 	    if (row[j] != i) { /* if not diagonal */
1137 	      jja[nnz+cnt++] = row[j];
1138 	    }
1139 	  }
1140 	  nnz += cnt;
1141 	  iia[i+1] = nnz;
1142 	}
1143 	/* Partitioning of the adjacency matrix */
1144 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1145 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1146 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1147 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1148 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1149 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1150 	foundpart = PETSC_TRUE;
1151       }
1152       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1153     }
1154     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1155   }
1156   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1157 
1158   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1159   *outis = is;
1160 
1161   if (!foundpart) {
1162 
1163     /* Partitioning by contiguous chunks of rows */
1164 
1165     PetscInt mbs   = (rend-rstart)/bs;
1166     PetscInt start = rstart;
1167     for (i=0; i<n; i++) {
1168       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1169       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1170       start += count;
1171     }
1172 
1173   } else {
1174 
1175     /* Partitioning by adjacency of diagonal block  */
1176 
1177     const PetscInt *numbering;
1178     PetscInt       *count,nidx,*indices,*newidx,start=0;
1179     /* Get node count in each partition */
1180     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1181     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1182     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1183       for (i=0; i<n; i++) count[i] *= bs;
1184     }
1185     /* Build indices from node numbering */
1186     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1187     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1188     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1189     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1190     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1191     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1192     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1193       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1194       for (i=0; i<nidx; i++)
1195 	for (j=0; j<bs; j++)
1196 	  newidx[i*bs+j] = indices[i]*bs + j;
1197       ierr = PetscFree(indices);CHKERRQ(ierr);
1198       nidx   *= bs;
1199       indices = newidx;
1200     }
1201     /* Shift to get global indices */
1202     for (i=0; i<nidx; i++) indices[i] += rstart;
1203 
1204     /* Build the index sets for each block */
1205     for (i=0; i<n; i++) {
1206       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],&is[i]);CHKERRQ(ierr);
1207       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1208       start += count[i];
1209     }
1210 
1211     ierr = PetscFree(count);
1212     ierr = PetscFree(indices);
1213     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1214     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1215 
1216   }
1217 
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "PCASMDestroySubdomains"
1223 /*@C
1224    PCASMDestroySubdomains - Destroys the index sets created with
1225    PCASMCreateSubdomains(). Should be called after setting subdomains
1226    with PCASMSetLocalSubdomains().
1227 
1228    Collective
1229 
1230    Input Parameters:
1231 +  n - the number of index sets
1232 .  is - the array of index sets
1233 -  is_local - the array of local index sets, can be PETSC_NULL
1234 
1235    Level: advanced
1236 
1237 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1238 
1239 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1240 @*/
1241 PetscErrorCode PETSCKSP_DLLEXPORT PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1242 {
1243   PetscInt       i;
1244   PetscErrorCode ierr;
1245   PetscFunctionBegin;
1246   if (n <= 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1247   PetscValidPointer(is,2);
1248   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1249   ierr = PetscFree(is);CHKERRQ(ierr);
1250   if (is_local) {
1251     PetscValidPointer(is_local,3);
1252     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1253     ierr = PetscFree(is_local);CHKERRQ(ierr);
1254   }
1255   PetscFunctionReturn(0);
1256 }
1257 
1258 #undef __FUNCT__
1259 #define __FUNCT__ "PCASMCreateSubdomains2D"
1260 /*@
1261    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1262    preconditioner for a two-dimensional problem on a regular grid.
1263 
1264    Not Collective
1265 
1266    Input Parameters:
1267 +  m, n - the number of mesh points in the x and y directions
1268 .  M, N - the number of subdomains in the x and y directions
1269 .  dof - degrees of freedom per node
1270 -  overlap - overlap in mesh lines
1271 
1272    Output Parameters:
1273 +  Nsub - the number of subdomains created
1274 -  is - the array of index sets defining the subdomains
1275 
1276    Note:
1277    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1278    preconditioners.  More general related routines are
1279    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1280 
1281    Level: advanced
1282 
1283 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1284 
1285 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1286           PCASMSetOverlap()
1287 @*/
1288 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
1289 {
1290   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
1291   PetscErrorCode ierr;
1292   PetscInt       nidx,*idx,loc,ii,jj,count;
1293 
1294   PetscFunctionBegin;
1295   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
1296 
1297   *Nsub = N*M;
1298   ierr = PetscMalloc((*Nsub)*sizeof(IS *),is);CHKERRQ(ierr);
1299   ystart = 0;
1300   loc_outter = 0;
1301   for (i=0; i<N; i++) {
1302     height = n/N + ((n % N) > i); /* height of subdomain */
1303     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1304     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1305     yright = ystart + height + overlap; if (yright > n) yright = n;
1306     xstart = 0;
1307     for (j=0; j<M; j++) {
1308       width = m/M + ((m % M) > j); /* width of subdomain */
1309       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1310       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1311       xright = xstart + width + overlap; if (xright > m) xright = m;
1312       nidx   = (xright - xleft)*(yright - yleft);
1313       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1314       loc    = 0;
1315       for (ii=yleft; ii<yright; ii++) {
1316         count = m*ii + xleft;
1317         for (jj=xleft; jj<xright; jj++) {
1318           idx[loc++] = count++;
1319         }
1320       }
1321       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
1322       ierr = PetscFree(idx);CHKERRQ(ierr);
1323       xstart += width;
1324     }
1325     ystart += height;
1326   }
1327   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1328   PetscFunctionReturn(0);
1329 }
1330 
1331 #undef __FUNCT__
1332 #define __FUNCT__ "PCASMGetLocalSubdomains"
1333 /*@C
1334     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1335     only) for the additive Schwarz preconditioner.
1336 
1337     Collective on PC
1338 
1339     Input Parameter:
1340 .   pc - the preconditioner context
1341 
1342     Output Parameters:
1343 +   n - the number of subdomains for this processor (default value = 1)
1344 .   is - the index sets that define the subdomains for this processor
1345 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1346 
1347 
1348     Notes:
1349     The IS numbering is in the parallel, global numbering of the vector.
1350 
1351     Level: advanced
1352 
1353 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1354 
1355 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1356           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1357 @*/
1358 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1359 {
1360   PC_ASM         *osm;
1361   PetscErrorCode ierr;
1362   PetscTruth     match;
1363 
1364   PetscFunctionBegin;
1365   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1366   PetscValidIntPointer(n,2);
1367   if (is) PetscValidPointer(is,3);
1368   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1369   if (!match) {
1370     if (n)  *n  = 0;
1371     if (is) *is = PETSC_NULL;
1372   } else {
1373     osm = (PC_ASM*)pc->data;
1374     if (n)  *n  = osm->n_local_true;
1375     if (is) *is = osm->is;
1376     if (is_local) *is_local = osm->is_local;
1377   }
1378   PetscFunctionReturn(0);
1379 }
1380 
1381 #undef __FUNCT__
1382 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1383 /*@C
1384     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1385     only) for the additive Schwarz preconditioner.
1386 
1387     Collective on PC
1388 
1389     Input Parameter:
1390 .   pc - the preconditioner context
1391 
1392     Output Parameters:
1393 +   n - the number of matrices for this processor (default value = 1)
1394 -   mat - the matrices
1395 
1396 
1397     Level: advanced
1398 
1399 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1400 
1401 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1402           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1403 @*/
1404 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1405 {
1406   PC_ASM         *osm;
1407   PetscErrorCode ierr;
1408   PetscTruth     match;
1409 
1410   PetscFunctionBegin;
1411   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1412   PetscValidIntPointer(n,2);
1413   if (mat) PetscValidPointer(mat,3);
1414   if (!pc->setupcalled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1415   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1416   if (!match) {
1417     if (n)   *n   = 0;
1418     if (mat) *mat = PETSC_NULL;
1419   } else {
1420     osm = (PC_ASM*)pc->data;
1421     if (n)   *n   = osm->n_local_true;
1422     if (mat) *mat = osm->pmat;
1423   }
1424   PetscFunctionReturn(0);
1425 }
1426