xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 28adb3f739167aae2a3fdf9bf501dbd0150de1de)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscTruth sort_indices;        /* flag to sort subdomain indices */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *osm = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i,bsz;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (osm->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",osm->n,osm->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",osm->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
56     if (osm->same_local_solves) {
57       if (osm->ksp) {
58         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
59         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
60         if (!rank) {
61           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
62           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
63           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
64         }
65         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
66       }
67     } else {
68       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
69       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
72       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
73       for (i=0; i<osm->n_local; i++) {
74         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
75         if (i < osm->n_local_true) {
76           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
78           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
79           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80         }
81         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       }
83       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
85     }
86   } else if (isstring) {
87     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
88     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
89     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
90     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
91   } else {
92     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCASMPrintSubdomains"
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm  = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
112   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
113   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
114   for (i=0;i<osm->n_local_true;i++) {
115     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
116     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
117     for (j=0; j<nidx; j++) {
118       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
119     }
120     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
121     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
122     if (osm->is_local) {
123       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
125       for (j=0; j<nidx; j++) {
126         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
127       }
128       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
129       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
130     }
131   }
132   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
133   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "PCSetUp_ASM"
139 static PetscErrorCode PCSetUp_ASM(PC pc)
140 {
141   PC_ASM         *osm  = (PC_ASM*)pc->data;
142   PetscErrorCode ierr;
143   PetscTruth     symset,flg;
144   PetscInt       i,m,m_local,firstRow,lastRow;
145   PetscMPIInt    size;
146   MatReuse       scall = MAT_REUSE_MATRIX;
147   IS             isl;
148   KSP            ksp;
149   PC             subpc;
150   const char     *prefix,*pprefix;
151   Vec            vec;
152 
153   PetscFunctionBegin;
154   if (!pc->setupcalled) {
155 
156     if (!osm->type_set) {
157       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
158       if (symset && flg) { osm->type = PC_ASM_BASIC; }
159     }
160 
161     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
162       /* no subdomains given, use one per processor */
163       osm->n_local = osm->n_local_true = 1;
164       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
165       osm->n = size;
166     } else if (osm->n == PETSC_DECIDE) {
167       /* determine global number of subdomains */
168       PetscInt inwork[2],outwork[2];
169       inwork[0] = inwork[1] = osm->n_local_true;
170       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
171       osm->n_local = outwork[0];
172       osm->n       = outwork[1];
173     }
174 
175     if (!osm->is){ /* create the index sets */
176       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
177     }
178     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
179     flg  = PETSC_FALSE;
180     ierr = PetscOptionsGetTruth(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
181     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
182 
183     /*  Extend the "overlapping" regions by a number of steps  */
184     ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
185     if (osm->sort_indices) {
186       for (i=0; i<osm->n_local_true; i++) {
187         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
188         if (osm->is_local) {
189           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
190         }
191       }
192     }
193 
194     /* Create the local work vectors and scatter contexts */
195     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
196     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->restriction);CHKERRQ(ierr);
197     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->localization);CHKERRQ(ierr);}
198     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->prolongation);CHKERRQ(ierr);
199     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->x);CHKERRQ(ierr);
200     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y);CHKERRQ(ierr);
201     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y_local);CHKERRQ(ierr);
202     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
203     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
204       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
205       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
206       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
207       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
208       ierr = ISDestroy(isl);CHKERRQ(ierr);
209       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
210       if (osm->is_local) {
211         IS              isShift;
212         const PetscInt *idx_local;
213         PetscInt       *idx, j;
214 
215         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
216         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
217         ierr = PetscMalloc(m*sizeof(PetscInt), &idx);CHKERRQ(ierr);
218         for(j = 0; j < m_local; ++j) {
219           idx[j] = idx_local[j] - firstRow;
220         }
221         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
222         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,&isShift);CHKERRQ(ierr);
223         ierr = PetscFree(idx);CHKERRQ(ierr);
224         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
225         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
226         ierr = VecScatterCreate(osm->y[i],isShift,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
227         ierr = ISDestroy(isShift);CHKERRQ(ierr);
228 
229         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
230         ierr = ISDestroy(isl);CHKERRQ(ierr);
231       } else {
232         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
233         osm->y_local[i] = osm->y[i];
234         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
235         osm->prolongation[i] = osm->restriction[i];
236         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
237       }
238     }
239     if (firstRow != lastRow) SETERRQ2(PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
240     for (i=osm->n_local_true; i<osm->n_local; i++) {
241       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
242       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
243       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
244       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
245       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
246       if (osm->is_local) {
247         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
248         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
249       } else {
250         osm->prolongation[i] = osm->restriction[i];
251         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
252       }
253       ierr = ISDestroy(isl);CHKERRQ(ierr);
254     }
255     ierr = VecDestroy(vec);CHKERRQ(ierr);
256 
257     /* Create the local solvers */
258     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
259     for (i=0; i<osm->n_local_true; i++) {
260       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
261       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
262       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
263       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
264       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
265       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
266       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
267       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
268       osm->ksp[i] = ksp;
269     }
270     scall = MAT_INITIAL_MATRIX;
271 
272   } else {
273     /*
274        Destroy the blocks from the previous iteration
275     */
276     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
277       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
278       scall = MAT_INITIAL_MATRIX;
279     }
280   }
281 
282   /*
283      Extract out the submatrices
284   */
285   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
286   if (scall == MAT_INITIAL_MATRIX) {
287     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
288     for (i=0; i<osm->n_local_true; i++) {
289       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
290       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
291     }
292   }
293 
294   /* Return control to the user so that the submatrices can be modified (e.g., to apply
295      different boundary conditions for the submatrices than for the global problem) */
296   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
297 
298   /*
299      Loop over subdomains putting them into local ksp
300   */
301   for (i=0; i<osm->n_local_true; i++) {
302     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
303     if (!pc->setupcalled) {
304       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
305     }
306   }
307 
308   PetscFunctionReturn(0);
309 }
310 
311 #undef __FUNCT__
312 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
313 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
314 {
315   PC_ASM         *osm = (PC_ASM*)pc->data;
316   PetscErrorCode ierr;
317   PetscInt       i;
318 
319   PetscFunctionBegin;
320   for (i=0; i<osm->n_local_true; i++) {
321     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "PCApply_ASM"
328 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
329 {
330   PC_ASM         *osm = (PC_ASM*)pc->data;
331   PetscErrorCode ierr;
332   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
333   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
334 
335   PetscFunctionBegin;
336   /*
337      Support for limiting the restriction or interpolation to only local
338      subdomain values (leaving the other values 0).
339   */
340   if (!(osm->type & PC_ASM_RESTRICT)) {
341     forward = SCATTER_FORWARD_LOCAL;
342     /* have to zero the work RHS since scatter may leave some slots empty */
343     for (i=0; i<n_local_true; i++) {
344       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
345     }
346   }
347   if (!(osm->type & PC_ASM_INTERPOLATE)) {
348     reverse = SCATTER_REVERSE_LOCAL;
349   }
350 
351   for (i=0; i<n_local; i++) {
352     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
353   }
354   ierr = VecSet(y,0.0);CHKERRQ(ierr);
355   /* do the local solves */
356   for (i=0; i<n_local_true; i++) {
357     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
358     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
359     if (osm->localization) {
360       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
361       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
362     }
363     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
364   }
365   /* handle the rest of the scatters that do not have local solves */
366   for (i=n_local_true; i<n_local; i++) {
367     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
368     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
369   }
370   for (i=0; i<n_local; i++) {
371     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
372   }
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "PCApplyTranspose_ASM"
378 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
379 {
380   PC_ASM         *osm = (PC_ASM*)pc->data;
381   PetscErrorCode ierr;
382   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
383   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
384 
385   PetscFunctionBegin;
386   /*
387      Support for limiting the restriction or interpolation to only local
388      subdomain values (leaving the other values 0).
389 
390      Note: these are reversed from the PCApply_ASM() because we are applying the
391      transpose of the three terms
392   */
393   if (!(osm->type & PC_ASM_INTERPOLATE)) {
394     forward = SCATTER_FORWARD_LOCAL;
395     /* have to zero the work RHS since scatter may leave some slots empty */
396     for (i=0; i<n_local_true; i++) {
397       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
398     }
399   }
400   if (!(osm->type & PC_ASM_RESTRICT)) {
401     reverse = SCATTER_REVERSE_LOCAL;
402   }
403 
404   for (i=0; i<n_local; i++) {
405     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
406   }
407   ierr = VecSet(y,0.0);CHKERRQ(ierr);
408   /* do the local solves */
409   for (i=0; i<n_local_true; i++) {
410     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
411     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
412     if (osm->localization) {
413       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
414       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
415     }
416     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
417   }
418   /* handle the rest of the scatters that do not have local solves */
419   for (i=n_local_true; i<n_local; i++) {
420     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
421     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
422   }
423   for (i=0; i<n_local; i++) {
424     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
425   }
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "PCDestroy_ASM"
431 static PetscErrorCode PCDestroy_ASM(PC pc)
432 {
433   PC_ASM         *osm = (PC_ASM*)pc->data;
434   PetscErrorCode ierr;
435   PetscInt       i;
436 
437   PetscFunctionBegin;
438   if (osm->ksp) {
439     for (i=0; i<osm->n_local_true; i++) {
440       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
441     }
442     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
443   }
444   if (osm->pmat) {
445     if (osm->n_local_true > 0) {
446       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
447     }
448   }
449   if (osm->restriction) {
450     for (i=0; i<osm->n_local; i++) {
451       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
452       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
453       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
454       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
455       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
456       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
457     }
458     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
459     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
460     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
461     ierr = PetscFree(osm->x);CHKERRQ(ierr);
462     ierr = PetscFree(osm->y);CHKERRQ(ierr);
463     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
464   }
465   if (osm->is) {
466     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
467   }
468   ierr = PetscFree(osm);CHKERRQ(ierr);
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "PCSetFromOptions_ASM"
474 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
475 {
476   PC_ASM         *osm = (PC_ASM*)pc->data;
477   PetscErrorCode ierr;
478   PetscInt       blocks,ovl;
479   PetscTruth     symset,flg;
480   PCASMType      asmtype;
481 
482   PetscFunctionBegin;
483   /* set the type to symmetric if matrix is symmetric */
484   if (!osm->type_set && pc->pmat) {
485     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
486     if (symset && flg) { osm->type = PC_ASM_BASIC; }
487   }
488   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
489     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
490     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
491     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
492     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
493     flg  = PETSC_FALSE;
494     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
495     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
496   ierr = PetscOptionsTail();CHKERRQ(ierr);
497   PetscFunctionReturn(0);
498 }
499 
500 /*------------------------------------------------------------------------------------*/
501 
502 EXTERN_C_BEGIN
503 #undef __FUNCT__
504 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
505 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
506 {
507   PC_ASM         *osm = (PC_ASM*)pc->data;
508   PetscErrorCode ierr;
509   PetscInt       i;
510 
511   PetscFunctionBegin;
512   if (n < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
513   if (pc->setupcalled && (n != osm->n_local_true || is)) {
514     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
515   }
516   if (!pc->setupcalled) {
517     if (is) {
518       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
519     }
520     if (is_local) {
521       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
522     }
523     if (osm->is) {
524       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
525     }
526     osm->n_local_true = n;
527     osm->is           = 0;
528     osm->is_local     = 0;
529     if (is) {
530       ierr = PetscMalloc(n*sizeof(IS *),&osm->is);CHKERRQ(ierr);
531       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
532     }
533     if (is_local) {
534       ierr = PetscMalloc(n*sizeof(IS *),&osm->is_local);CHKERRQ(ierr);
535       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
536     }
537   }
538   PetscFunctionReturn(0);
539 }
540 EXTERN_C_END
541 
542 EXTERN_C_BEGIN
543 #undef __FUNCT__
544 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
545 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
546 {
547   PC_ASM         *osm = (PC_ASM*)pc->data;
548   PetscErrorCode ierr;
549   PetscMPIInt    rank,size;
550   PetscInt       n;
551 
552   PetscFunctionBegin;
553   if (N < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
554   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
555 
556   /*
557      Split the subdomains equally amoung all processors
558   */
559   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
560   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
561   n = N/size + ((N % size) > rank);
562   if (!n) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
563   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
564   if (!pc->setupcalled) {
565     if (osm->is) {
566       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
567     }
568     osm->n_local_true = n;
569     osm->is           = 0;
570     osm->is_local     = 0;
571   }
572   PetscFunctionReturn(0);
573 }
574 EXTERN_C_END
575 
576 EXTERN_C_BEGIN
577 #undef __FUNCT__
578 #define __FUNCT__ "PCASMSetOverlap_ASM"
579 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
580 {
581   PC_ASM *osm = (PC_ASM*)pc->data;
582 
583   PetscFunctionBegin;
584   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
585   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetup().");
586   if (!pc->setupcalled) {
587     osm->overlap = ovl;
588   }
589   PetscFunctionReturn(0);
590 }
591 EXTERN_C_END
592 
593 EXTERN_C_BEGIN
594 #undef __FUNCT__
595 #define __FUNCT__ "PCASMSetType_ASM"
596 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
597 {
598   PC_ASM *osm = (PC_ASM*)pc->data;
599 
600   PetscFunctionBegin;
601   osm->type     = type;
602   osm->type_set = PETSC_TRUE;
603   PetscFunctionReturn(0);
604 }
605 EXTERN_C_END
606 
607 EXTERN_C_BEGIN
608 #undef __FUNCT__
609 #define __FUNCT__ "PCASMSetSortIndices_ASM"
610 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices_ASM(PC pc,PetscTruth doSort)
611 {
612   PC_ASM *osm = (PC_ASM*)pc->data;
613 
614   PetscFunctionBegin;
615   osm->sort_indices = doSort;
616   PetscFunctionReturn(0);
617 }
618 EXTERN_C_END
619 
620 EXTERN_C_BEGIN
621 #undef __FUNCT__
622 #define __FUNCT__ "PCASMGetSubKSP_ASM"
623 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
624 {
625   PC_ASM         *osm = (PC_ASM*)pc->data;
626   PetscErrorCode ierr;
627 
628   PetscFunctionBegin;
629   if (osm->n_local_true < 1) {
630     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
631   }
632 
633   if (n_local) {
634     *n_local = osm->n_local_true;
635   }
636   if (first_local) {
637     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
638     *first_local -= osm->n_local_true;
639   }
640   if (ksp) {
641     /* Assume that local solves are now different; not necessarily
642        true though!  This flag is used only for PCView_ASM() */
643     *ksp                   = osm->ksp;
644     osm->same_local_solves = PETSC_FALSE;
645   }
646   PetscFunctionReturn(0);
647 }
648 EXTERN_C_END
649 
650 
651 #undef __FUNCT__
652 #define __FUNCT__ "PCASMSetLocalSubdomains"
653 /*@C
654     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
655     only) for the additive Schwarz preconditioner.
656 
657     Collective on PC
658 
659     Input Parameters:
660 +   pc - the preconditioner context
661 .   n - the number of subdomains for this processor (default value = 1)
662 .   is - the index sets that define the subdomains for this processor
663          (or PETSC_NULL for PETSc to determine subdomains)
664 -   is_local - the index sets that define the local part of the subdomains for this processor
665          (or PETSC_NULL to use the default of 1 subdomain per process)
666 
667     Notes:
668     The IS numbering is in the parallel, global numbering of the vector.
669 
670     By default the ASM preconditioner uses 1 block per processor.
671 
672     These index sets cannot be destroyed until after completion of the
673     linear solves for which the ASM preconditioner is being used.
674 
675     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
676 
677     Level: advanced
678 
679 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
680 
681 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
682           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
683 @*/
684 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
685 {
686   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
687 
688   PetscFunctionBegin;
689   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
690   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
691   if (f) {
692     ierr = (*f)(pc,n,is,is_local);CHKERRQ(ierr);
693   }
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "PCASMSetTotalSubdomains"
699 /*@C
700     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
701     additive Schwarz preconditioner.  Either all or no processors in the
702     PC communicator must call this routine, with the same index sets.
703 
704     Collective on PC
705 
706     Input Parameters:
707 +   pc - the preconditioner context
708 .   n - the number of subdomains for all processors
709 .   is - the index sets that define the subdomains for all processor
710          (or PETSC_NULL for PETSc to determine subdomains)
711 -   is_local - the index sets that define the local part of the subdomains for this processor
712          (or PETSC_NULL to use the default of 1 subdomain per process)
713 
714     Options Database Key:
715     To set the total number of subdomain blocks rather than specify the
716     index sets, use the option
717 .    -pc_asm_blocks <blks> - Sets total blocks
718 
719     Notes:
720     Currently you cannot use this to set the actual subdomains with the argument is.
721 
722     By default the ASM preconditioner uses 1 block per processor.
723 
724     These index sets cannot be destroyed until after completion of the
725     linear solves for which the ASM preconditioner is being used.
726 
727     Use PCASMSetLocalSubdomains() to set local subdomains.
728 
729     Level: advanced
730 
731 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
732 
733 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
734           PCASMCreateSubdomains2D()
735 @*/
736 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
737 {
738   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
739 
740   PetscFunctionBegin;
741   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
742   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
743   if (f) {
744     ierr = (*f)(pc,N,is,is_local);CHKERRQ(ierr);
745   }
746   PetscFunctionReturn(0);
747 }
748 
749 #undef __FUNCT__
750 #define __FUNCT__ "PCASMSetOverlap"
751 /*@
752     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
753     additive Schwarz preconditioner.  Either all or no processors in the
754     PC communicator must call this routine.
755 
756     Collective on PC
757 
758     Input Parameters:
759 +   pc  - the preconditioner context
760 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
761 
762     Options Database Key:
763 .   -pc_asm_overlap <ovl> - Sets overlap
764 
765     Notes:
766     By default the ASM preconditioner uses 1 block per processor.  To use
767     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
768     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
769 
770     The overlap defaults to 1, so if one desires that no additional
771     overlap be computed beyond what may have been set with a call to
772     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
773     must be set to be 0.  In particular, if one does not explicitly set
774     the subdomains an application code, then all overlap would be computed
775     internally by PETSc, and using an overlap of 0 would result in an ASM
776     variant that is equivalent to the block Jacobi preconditioner.
777 
778     Note that one can define initial index sets with any overlap via
779     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
780     PCASMSetOverlap() merely allows PETSc to extend that overlap further
781     if desired.
782 
783     Level: intermediate
784 
785 .keywords: PC, ASM, set, overlap
786 
787 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
788           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
789 @*/
790 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
791 {
792   PetscErrorCode ierr,(*f)(PC,PetscInt);
793 
794   PetscFunctionBegin;
795   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
796   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
797   if (f) {
798     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
799   }
800   PetscFunctionReturn(0);
801 }
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "PCASMSetType"
805 /*@
806     PCASMSetType - Sets the type of restriction and interpolation used
807     for local problems in the additive Schwarz method.
808 
809     Collective on PC
810 
811     Input Parameters:
812 +   pc  - the preconditioner context
813 -   type - variant of ASM, one of
814 .vb
815       PC_ASM_BASIC       - full interpolation and restriction
816       PC_ASM_RESTRICT    - full restriction, local processor interpolation
817       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
818       PC_ASM_NONE        - local processor restriction and interpolation
819 .ve
820 
821     Options Database Key:
822 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
823 
824     Level: intermediate
825 
826 .keywords: PC, ASM, set, type
827 
828 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
829           PCASMCreateSubdomains2D()
830 @*/
831 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
832 {
833   PetscErrorCode ierr,(*f)(PC,PCASMType);
834 
835   PetscFunctionBegin;
836   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
837   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
838   if (f) {
839     ierr = (*f)(pc,type);CHKERRQ(ierr);
840   }
841   PetscFunctionReturn(0);
842 }
843 
844 #undef __FUNCT__
845 #define __FUNCT__ "PCASMSetSortIndices"
846 /*@
847     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
848 
849     Collective on PC
850 
851     Input Parameters:
852 +   pc  - the preconditioner context
853 -   doSort - sort the subdomain indices
854 
855     Level: intermediate
856 
857 .keywords: PC, ASM, set, type
858 
859 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
860           PCASMCreateSubdomains2D()
861 @*/
862 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices(PC pc,PetscTruth doSort)
863 {
864   PetscErrorCode ierr,(*f)(PC,PetscTruth);
865 
866   PetscFunctionBegin;
867   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
868   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetSortIndices_C",(void (**)(void))&f);CHKERRQ(ierr);
869   if (f) {
870     ierr = (*f)(pc,doSort);CHKERRQ(ierr);
871   }
872   PetscFunctionReturn(0);
873 }
874 
875 #undef __FUNCT__
876 #define __FUNCT__ "PCASMGetSubKSP"
877 /*@C
878    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
879    this processor.
880 
881    Collective on PC iff first_local is requested
882 
883    Input Parameter:
884 .  pc - the preconditioner context
885 
886    Output Parameters:
887 +  n_local - the number of blocks on this processor or PETSC_NULL
888 .  first_local - the global number of the first block on this processor or PETSC_NULL,
889                  all processors must request or all must pass PETSC_NULL
890 -  ksp - the array of KSP contexts
891 
892    Note:
893    After PCASMGetSubKSP() the array of KSPes is not to be freed
894 
895    Currently for some matrix implementations only 1 block per processor
896    is supported.
897 
898    You must call KSPSetUp() before calling PCASMGetSubKSP().
899 
900    Level: advanced
901 
902 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
903 
904 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
905           PCASMCreateSubdomains2D(),
906 @*/
907 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
908 {
909   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
910 
911   PetscFunctionBegin;
912   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
913   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
914   if (f) {
915     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
916   } else {
917     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
918   }
919 
920  PetscFunctionReturn(0);
921 }
922 
923 /* -------------------------------------------------------------------------------------*/
924 /*MC
925    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
926            its own KSP object.
927 
928    Options Database Keys:
929 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
930 .  -pc_asm_blocks <blks> - Sets total blocks
931 .  -pc_asm_overlap <ovl> - Sets overlap
932 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
933 
934      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
935       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
936       -pc_asm_type basic to use the standard ASM.
937 
938    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
939      than one processor. Defaults to one block per processor.
940 
941      To set options on the solvers for each block append -sub_ to all the KSP, and PC
942         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
943 
944      To set the options on the solvers separate for each block call PCASMGetSubKSP()
945          and set the options directly on the resulting KSP object (you can access its PC
946          with KSPGetPC())
947 
948 
949    Level: beginner
950 
951    Concepts: additive Schwarz method
952 
953     References:
954     An additive variant of the Schwarz alternating method for the case of many subregions
955     M Dryja, OB Widlund - Courant Institute, New York University Technical report
956 
957     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
958     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
959 
960 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
961            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
962            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
963 
964 M*/
965 
966 EXTERN_C_BEGIN
967 #undef __FUNCT__
968 #define __FUNCT__ "PCCreate_ASM"
969 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
970 {
971   PetscErrorCode ierr;
972   PC_ASM         *osm;
973 
974   PetscFunctionBegin;
975   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
976   osm->n                 = PETSC_DECIDE;
977   osm->n_local           = 0;
978   osm->n_local_true      = 0;
979   osm->overlap           = 1;
980   osm->ksp               = 0;
981   osm->restriction       = 0;
982   osm->localization      = 0;
983   osm->prolongation      = 0;
984   osm->x                 = 0;
985   osm->y                 = 0;
986   osm->y_local           = 0;
987   osm->is                = 0;
988   osm->is_local          = 0;
989   osm->mat               = 0;
990   osm->pmat              = 0;
991   osm->type              = PC_ASM_RESTRICT;
992   osm->same_local_solves = PETSC_TRUE;
993   osm->sort_indices      = PETSC_TRUE;
994 
995   pc->data                   = (void*)osm;
996   pc->ops->apply             = PCApply_ASM;
997   pc->ops->applytranspose    = PCApplyTranspose_ASM;
998   pc->ops->setup             = PCSetUp_ASM;
999   pc->ops->destroy           = PCDestroy_ASM;
1000   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1001   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1002   pc->ops->view              = PCView_ASM;
1003   pc->ops->applyrichardson   = 0;
1004 
1005   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1006                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1007   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1008                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1009   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1010                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1011   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1012                     PCASMSetType_ASM);CHKERRQ(ierr);
1013   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1014                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1015   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1016                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1017   PetscFunctionReturn(0);
1018 }
1019 EXTERN_C_END
1020 
1021 
1022 #undef __FUNCT__
1023 #define __FUNCT__ "PCASMCreateSubdomains"
1024 /*@C
1025    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1026    preconditioner for a any problem on a general grid.
1027 
1028    Collective
1029 
1030    Input Parameters:
1031 +  A - The global matrix operator
1032 -  n - the number of local blocks
1033 
1034    Output Parameters:
1035 .  outis - the array of index sets defining the subdomains
1036 
1037    Level: advanced
1038 
1039    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1040     from these if you use PCASMSetLocalSubdomains()
1041 
1042     In the Fortran version you must provide the array outis[] already allocated of length n.
1043 
1044 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1045 
1046 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1047 @*/
1048 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1049 {
1050   MatPartitioning           mpart;
1051   const char                *prefix;
1052   PetscErrorCode            (*f)(Mat,PetscTruth*,MatReuse,Mat*);
1053   PetscMPIInt               size;
1054   PetscInt                  i,j,rstart,rend,bs;
1055   PetscTruth                iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1056   Mat                       Ad = PETSC_NULL, adj;
1057   IS                        ispart,isnumb,*is;
1058   PetscErrorCode            ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(A,MAT_COOKIE,1);
1062   PetscValidPointer(outis,3);
1063   if (n < 1) {SETERRQ1(PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);}
1064 
1065   /* Get prefix, row distribution, and block size */
1066   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1067   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1068   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1069   if (rstart/bs*bs != rstart || rend/bs*bs != rend) {
1070     SETERRQ3(PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1071   }
1072   /* Get diagonal block from matrix if possible */
1073   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1074   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1075   if (f) {
1076     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1077   } else if (size == 1) {
1078     iscopy = PETSC_FALSE; Ad = A;
1079   } else {
1080     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1081   }
1082   if (Ad) {
1083     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1084     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1085   }
1086   if (Ad && n > 1) {
1087     PetscTruth match,done;
1088     /* Try to setup a good matrix partitioning if available */
1089     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1090     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1091     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1092     ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_CURRENT,&match);CHKERRQ(ierr);
1093     if (!match) {
1094       ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_SQUARE,&match);CHKERRQ(ierr);
1095     }
1096     if (!match) { /* assume a "good" partitioner is available */
1097       PetscInt na,*ia,*ja;
1098       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1099       if (done) {
1100 	/* Build adjacency matrix by hand. Unfortunately a call to
1101 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1102 	   remove the block-aij structure and we cannot expect
1103 	   MatPartitioning to split vertices as we need */
1104 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1105 	nnz = 0;
1106 	for (i=0; i<na; i++) { /* count number of nonzeros */
1107 	  len = ia[i+1] - ia[i];
1108 	  row = ja + ia[i];
1109 	  for (j=0; j<len; j++) {
1110 	    if (row[j] == i) { /* don't count diagonal */
1111 	      len--; break;
1112 	    }
1113 	  }
1114 	  nnz += len;
1115 	}
1116 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1117 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1118 	nnz    = 0;
1119 	iia[0] = 0;
1120 	for (i=0; i<na; i++) { /* fill adjacency */
1121 	  cnt = 0;
1122 	  len = ia[i+1] - ia[i];
1123 	  row = ja + ia[i];
1124 	  for (j=0; j<len; j++) {
1125 	    if (row[j] != i) { /* if not diagonal */
1126 	      jja[nnz+cnt++] = row[j];
1127 	    }
1128 	  }
1129 	  nnz += cnt;
1130 	  iia[i+1] = nnz;
1131 	}
1132 	/* Partitioning of the adjacency matrix */
1133 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1134 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1135 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1136 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1137 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1138 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1139 	foundpart = PETSC_TRUE;
1140       }
1141       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1142     }
1143     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1144   }
1145   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1146 
1147   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1148   *outis = is;
1149 
1150   if (!foundpart) {
1151 
1152     /* Partitioning by contiguous chunks of rows */
1153 
1154     PetscInt mbs   = (rend-rstart)/bs;
1155     PetscInt start = rstart;
1156     for (i=0; i<n; i++) {
1157       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1158       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1159       start += count;
1160     }
1161 
1162   } else {
1163 
1164     /* Partitioning by adjacency of diagonal block  */
1165 
1166     const PetscInt *numbering;
1167     PetscInt       *count,nidx,*indices,*newidx,start=0;
1168     /* Get node count in each partition */
1169     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1170     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1171     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1172       for (i=0; i<n; i++) count[i] *= bs;
1173     }
1174     /* Build indices from node numbering */
1175     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1176     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1177     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1178     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1179     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1180     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1181     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1182       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1183       for (i=0; i<nidx; i++)
1184 	for (j=0; j<bs; j++)
1185 	  newidx[i*bs+j] = indices[i]*bs + j;
1186       ierr = PetscFree(indices);CHKERRQ(ierr);
1187       nidx   *= bs;
1188       indices = newidx;
1189     }
1190     /* Shift to get global indices */
1191     for (i=0; i<nidx; i++) indices[i] += rstart;
1192 
1193     /* Build the index sets for each block */
1194     for (i=0; i<n; i++) {
1195       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],&is[i]);CHKERRQ(ierr);
1196       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1197       start += count[i];
1198     }
1199 
1200     ierr = PetscFree(count);
1201     ierr = PetscFree(indices);
1202     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1203     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1204 
1205   }
1206 
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 #undef __FUNCT__
1211 #define __FUNCT__ "PCASMDestroySubdomains"
1212 /*@C
1213    PCASMDestroySubdomains - Destroys the index sets created with
1214    PCASMCreateSubdomains(). Should be called after setting subdomains
1215    with PCASMSetLocalSubdomains().
1216 
1217    Collective
1218 
1219    Input Parameters:
1220 +  n - the number of index sets
1221 .  is - the array of index sets
1222 -  is_local - the array of local index sets, can be PETSC_NULL
1223 
1224    Level: advanced
1225 
1226 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1227 
1228 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1229 @*/
1230 PetscErrorCode PETSCKSP_DLLEXPORT PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1231 {
1232   PetscInt       i;
1233   PetscErrorCode ierr;
1234   PetscFunctionBegin;
1235   if (n <= 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1236   PetscValidPointer(is,2);
1237   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1238   ierr = PetscFree(is);CHKERRQ(ierr);
1239   if (is_local) {
1240     PetscValidPointer(is_local,3);
1241     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1242     ierr = PetscFree(is_local);CHKERRQ(ierr);
1243   }
1244   PetscFunctionReturn(0);
1245 }
1246 
1247 #undef __FUNCT__
1248 #define __FUNCT__ "PCASMCreateSubdomains2D"
1249 /*@
1250    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1251    preconditioner for a two-dimensional problem on a regular grid.
1252 
1253    Not Collective
1254 
1255    Input Parameters:
1256 +  m, n - the number of mesh points in the x and y directions
1257 .  M, N - the number of subdomains in the x and y directions
1258 .  dof - degrees of freedom per node
1259 -  overlap - overlap in mesh lines
1260 
1261    Output Parameters:
1262 +  Nsub - the number of subdomains created
1263 -  is - the array of index sets defining the subdomains
1264 
1265    Note:
1266    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1267    preconditioners.  More general related routines are
1268    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1269 
1270    Level: advanced
1271 
1272 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1273 
1274 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1275           PCASMSetOverlap()
1276 @*/
1277 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
1278 {
1279   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
1280   PetscErrorCode ierr;
1281   PetscInt       nidx,*idx,loc,ii,jj,count;
1282 
1283   PetscFunctionBegin;
1284   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
1285 
1286   *Nsub = N*M;
1287   ierr = PetscMalloc((*Nsub)*sizeof(IS *),is);CHKERRQ(ierr);
1288   ystart = 0;
1289   loc_outter = 0;
1290   for (i=0; i<N; i++) {
1291     height = n/N + ((n % N) > i); /* height of subdomain */
1292     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1293     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1294     yright = ystart + height + overlap; if (yright > n) yright = n;
1295     xstart = 0;
1296     for (j=0; j<M; j++) {
1297       width = m/M + ((m % M) > j); /* width of subdomain */
1298       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1299       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1300       xright = xstart + width + overlap; if (xright > m) xright = m;
1301       nidx   = (xright - xleft)*(yright - yleft);
1302       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1303       loc    = 0;
1304       for (ii=yleft; ii<yright; ii++) {
1305         count = m*ii + xleft;
1306         for (jj=xleft; jj<xright; jj++) {
1307           idx[loc++] = count++;
1308         }
1309       }
1310       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
1311       ierr = PetscFree(idx);CHKERRQ(ierr);
1312       xstart += width;
1313     }
1314     ystart += height;
1315   }
1316   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 #undef __FUNCT__
1321 #define __FUNCT__ "PCASMGetLocalSubdomains"
1322 /*@C
1323     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1324     only) for the additive Schwarz preconditioner.
1325 
1326     Collective on PC
1327 
1328     Input Parameter:
1329 .   pc - the preconditioner context
1330 
1331     Output Parameters:
1332 +   n - the number of subdomains for this processor (default value = 1)
1333 .   is - the index sets that define the subdomains for this processor
1334 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1335 
1336 
1337     Notes:
1338     The IS numbering is in the parallel, global numbering of the vector.
1339 
1340     Level: advanced
1341 
1342 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1343 
1344 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1345           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1346 @*/
1347 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1348 {
1349   PC_ASM         *osm;
1350   PetscErrorCode ierr;
1351   PetscTruth     match;
1352 
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1355   PetscValidIntPointer(n,2);
1356   if (is) PetscValidPointer(is,3);
1357   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1358   if (!match) {
1359     if (n)  *n  = 0;
1360     if (is) *is = PETSC_NULL;
1361   } else {
1362     osm = (PC_ASM*)pc->data;
1363     if (n)  *n  = osm->n_local_true;
1364     if (is) *is = osm->is;
1365     if (is_local) *is_local = osm->is_local;
1366   }
1367   PetscFunctionReturn(0);
1368 }
1369 
1370 #undef __FUNCT__
1371 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1372 /*@C
1373     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1374     only) for the additive Schwarz preconditioner.
1375 
1376     Collective on PC
1377 
1378     Input Parameter:
1379 .   pc - the preconditioner context
1380 
1381     Output Parameters:
1382 +   n - the number of matrices for this processor (default value = 1)
1383 -   mat - the matrices
1384 
1385 
1386     Level: advanced
1387 
1388 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1389 
1390 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1391           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1392 @*/
1393 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1394 {
1395   PC_ASM         *osm;
1396   PetscErrorCode ierr;
1397   PetscTruth     match;
1398 
1399   PetscFunctionBegin;
1400   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1401   PetscValidIntPointer(n,2);
1402   if (mat) PetscValidPointer(mat,3);
1403   if (!pc->setupcalled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1404   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1405   if (!match) {
1406     if (n)   *n   = 0;
1407     if (mat) *mat = PETSC_NULL;
1408   } else {
1409     osm = (PC_ASM*)pc->data;
1410     if (n)   *n   = osm->n_local_true;
1411     if (mat) *mat = osm->pmat;
1412   }
1413   PetscFunctionReturn(0);
1414 }
1415