xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 84df9cb40eca90ea9b18a456fab7a4ecc7f6c1a4)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <private/pcimpl.h>     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter *restriction;        /* mapping from global to subregion */
20   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
21   VecScatter *prolongation;       /* mapping from subregion to global */
22   Vec        *x,*y,*y_local;      /* work vectors */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
25   Mat        *mat,*pmat;          /* mat is not currently used */
26   PCASMType  type;                /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_ASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_ASM"
34 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
35 {
36   PC_ASM         *osm = (PC_ASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof blocks,"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
54     if (osm->same_local_solves) {
55       if (osm->ksp) {
56         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
57         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
58         if (!rank) {
59           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
60           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
61           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
62         }
63         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
64       }
65     } else {
66       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
72       for (i=0; i<osm->n_local; i++) {
73         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
74         if (i < osm->n_local_true) {
75           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
76           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
77           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
78           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79         }
80         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
81       }
82       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
83       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
85     }
86   } else if (isstring) {
87     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
88     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
89     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
90     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
91   } else {
92     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCASMPrintSubdomains"
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm  = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
112   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
113   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
114   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
115   for (i=0;i<osm->n_local_true;i++) {
116     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
117     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
118     for (j=0; j<nidx; j++) {
119       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
120     }
121     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
122     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
123     if (osm->is_local) {
124       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
125       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
126       for (j=0; j<nidx; j++) {
127         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
128       }
129       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
130       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
131     }
132   }
133   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
134   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
135   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
136   PetscFunctionReturn(0);
137 }
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "PCSetUp_ASM"
141 static PetscErrorCode PCSetUp_ASM(PC pc)
142 {
143   PC_ASM         *osm  = (PC_ASM*)pc->data;
144   PetscErrorCode ierr;
145   PetscBool      symset,flg;
146   PetscInt       i,m,m_local,firstRow,lastRow;
147   PetscMPIInt    size;
148   MatReuse       scall = MAT_REUSE_MATRIX;
149   IS             isl;
150   KSP            ksp;
151   PC             subpc;
152   const char     *prefix,*pprefix;
153   Vec            vec;
154 
155   PetscFunctionBegin;
156   if (!pc->setupcalled) {
157 
158     if (!osm->type_set) {
159       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
160       if (symset && flg) { osm->type = PC_ASM_BASIC; }
161     }
162 
163     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
164       /* no subdomains given, use one per processor */
165       osm->n_local = osm->n_local_true = 1;
166       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
167       osm->n = size;
168     } else if (osm->n == PETSC_DECIDE) {
169       /* determine global number of subdomains */
170       PetscInt inwork[2],outwork[2];
171       inwork[0] = inwork[1] = osm->n_local_true;
172       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
173       osm->n_local = outwork[0];
174       osm->n       = outwork[1];
175     }
176 
177     if (!osm->is){ /* create the index sets */
178       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
179     }
180     if (osm->n_local_true > 1 && !osm->is_local) {
181       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
182       for (i=0; i<osm->n_local_true; i++) {
183         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
184           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
185           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
186         } else {
187           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
188           osm->is_local[i] = osm->is[i];
189         }
190       }
191     }
192     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
193     flg  = PETSC_FALSE;
194     ierr = PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
195     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
196 
197     if (osm->overlap > 0) {
198       /* Extend the "overlapping" regions by a number of steps */
199       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
200     }
201     if (osm->sort_indices) {
202       for (i=0; i<osm->n_local_true; i++) {
203         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
204         if (osm->is_local) {
205           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
206         }
207       }
208     }
209 
210     /* Create the local work vectors and scatter contexts */
211     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->restriction);CHKERRQ(ierr);
213     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->localization);CHKERRQ(ierr);}
214     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->prolongation);CHKERRQ(ierr);
215     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->x);CHKERRQ(ierr);
216     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y);CHKERRQ(ierr);
217     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y_local);CHKERRQ(ierr);
218     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
219     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
220       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
221       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
222       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
223       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
224       ierr = ISDestroy(&isl);CHKERRQ(ierr);
225       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
226       if (osm->is_local) {
227         ISLocalToGlobalMapping ltog;
228         IS                     isll;
229         const PetscInt         *idx_local;
230         PetscInt               *idx,nout;
231 
232         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
233         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
234         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
235         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
236         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
237         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
238         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
239         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
240         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
241         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
242         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
243         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
244         ierr = ISDestroy(&isll);CHKERRQ(ierr);
245 
246         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
247         ierr = ISDestroy(&isl);CHKERRQ(ierr);
248       } else {
249         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
250         osm->y_local[i] = osm->y[i];
251         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
252         osm->prolongation[i] = osm->restriction[i];
253         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
254       }
255     }
256     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
257     for (i=osm->n_local_true; i<osm->n_local; i++) {
258       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
259       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
260       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
261       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
262       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
263       if (osm->is_local) {
264         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
265         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
266       } else {
267         osm->prolongation[i] = osm->restriction[i];
268         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
269       }
270       ierr = ISDestroy(&isl);CHKERRQ(ierr);
271     }
272     ierr = VecDestroy(&vec);CHKERRQ(ierr);
273 
274     if (!osm->ksp) {
275       /* Create the local solvers */
276       ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
277       for (i=0; i<osm->n_local_true; i++) {
278         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
279         ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
280         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
281         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
282         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
283         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
284         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
285         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
286         osm->ksp[i] = ksp;
287       }
288     }
289     scall = MAT_INITIAL_MATRIX;
290   } else {
291     /*
292        Destroy the blocks from the previous iteration
293     */
294     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
295       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
296       scall = MAT_INITIAL_MATRIX;
297     }
298   }
299 
300   /*
301      Extract out the submatrices
302   */
303   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
304   if (scall == MAT_INITIAL_MATRIX) {
305     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
306     for (i=0; i<osm->n_local_true; i++) {
307       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
308       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
309     }
310   }
311 
312   /* Return control to the user so that the submatrices can be modified (e.g., to apply
313      different boundary conditions for the submatrices than for the global problem) */
314   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
315 
316   /*
317      Loop over subdomains putting them into local ksp
318   */
319   for (i=0; i<osm->n_local_true; i++) {
320     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
321     if (!pc->setupcalled) {
322       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
323     }
324   }
325 
326   PetscFunctionReturn(0);
327 }
328 
329 #undef __FUNCT__
330 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
331 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
332 {
333   PC_ASM         *osm = (PC_ASM*)pc->data;
334   PetscErrorCode ierr;
335   PetscInt       i;
336 
337   PetscFunctionBegin;
338   for (i=0; i<osm->n_local_true; i++) {
339     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
340   }
341   PetscFunctionReturn(0);
342 }
343 
344 #undef __FUNCT__
345 #define __FUNCT__ "PCApply_ASM"
346 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
347 {
348   PC_ASM         *osm = (PC_ASM*)pc->data;
349   PetscErrorCode ierr;
350   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
351   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
352 
353   PetscFunctionBegin;
354   /*
355      Support for limiting the restriction or interpolation to only local
356      subdomain values (leaving the other values 0).
357   */
358   if (!(osm->type & PC_ASM_RESTRICT)) {
359     forward = SCATTER_FORWARD_LOCAL;
360     /* have to zero the work RHS since scatter may leave some slots empty */
361     for (i=0; i<n_local_true; i++) {
362       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
363     }
364   }
365   if (!(osm->type & PC_ASM_INTERPOLATE)) {
366     reverse = SCATTER_REVERSE_LOCAL;
367   }
368 
369   for (i=0; i<n_local; i++) {
370     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
371   }
372   ierr = VecZeroEntries(y);CHKERRQ(ierr);
373   /* do the local solves */
374   for (i=0; i<n_local_true; i++) {
375     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
376     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
377     if (osm->localization) {
378       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
379       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
380     }
381     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
382   }
383   /* handle the rest of the scatters that do not have local solves */
384   for (i=n_local_true; i<n_local; i++) {
385     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
386     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
387   }
388   for (i=0; i<n_local; i++) {
389     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
390   }
391   PetscFunctionReturn(0);
392 }
393 
394 #undef __FUNCT__
395 #define __FUNCT__ "PCApplyTranspose_ASM"
396 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
397 {
398   PC_ASM         *osm = (PC_ASM*)pc->data;
399   PetscErrorCode ierr;
400   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
401   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
402 
403   PetscFunctionBegin;
404   /*
405      Support for limiting the restriction or interpolation to only local
406      subdomain values (leaving the other values 0).
407 
408      Note: these are reversed from the PCApply_ASM() because we are applying the
409      transpose of the three terms
410   */
411   if (!(osm->type & PC_ASM_INTERPOLATE)) {
412     forward = SCATTER_FORWARD_LOCAL;
413     /* have to zero the work RHS since scatter may leave some slots empty */
414     for (i=0; i<n_local_true; i++) {
415       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
416     }
417   }
418   if (!(osm->type & PC_ASM_RESTRICT)) {
419     reverse = SCATTER_REVERSE_LOCAL;
420   }
421 
422   for (i=0; i<n_local; i++) {
423     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
424   }
425   ierr = VecZeroEntries(y);CHKERRQ(ierr);
426   /* do the local solves */
427   for (i=0; i<n_local_true; i++) {
428     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
429     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
430     if (osm->localization) {
431       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
432       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
433     }
434     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
435   }
436   /* handle the rest of the scatters that do not have local solves */
437   for (i=n_local_true; i<n_local; i++) {
438     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
439     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
440   }
441   for (i=0; i<n_local; i++) {
442     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
443   }
444   PetscFunctionReturn(0);
445 }
446 
447 #undef __FUNCT__
448 #define __FUNCT__ "PCReset_ASM"
449 static PetscErrorCode PCReset_ASM(PC pc)
450 {
451   PC_ASM         *osm = (PC_ASM*)pc->data;
452   PetscErrorCode ierr;
453   PetscInt       i;
454 
455   PetscFunctionBegin;
456   if (osm->ksp) {
457     for (i=0; i<osm->n_local_true; i++) {
458       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
459     }
460   }
461   if (osm->pmat) {
462     if (osm->n_local_true > 0) {
463       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
464     }
465   }
466   if (osm->restriction) {
467     for (i=0; i<osm->n_local; i++) {
468       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
469       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
470       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
471       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
472       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
473       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
474     }
475     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
476     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
477     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
478     ierr = PetscFree(osm->x);CHKERRQ(ierr);
479     ierr = PetscFree(osm->y);CHKERRQ(ierr);
480     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
481   }
482   if (osm->is) {
483     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
484     osm->is       = 0;
485     osm->is_local = 0;
486   }
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "PCDestroy_ASM"
492 static PetscErrorCode PCDestroy_ASM(PC pc)
493 {
494   PC_ASM         *osm = (PC_ASM*)pc->data;
495   PetscErrorCode ierr;
496   PetscInt       i;
497 
498   PetscFunctionBegin;
499   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
500   if (osm->ksp) {
501     for (i=0; i<osm->n_local_true; i++) {
502       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
503     }
504     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
505   }
506   ierr = PetscFree(pc->data);CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "PCSetFromOptions_ASM"
512 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
513 {
514   PC_ASM         *osm = (PC_ASM*)pc->data;
515   PetscErrorCode ierr;
516   PetscInt       blocks,ovl;
517   PetscBool      symset,flg;
518   PCASMType      asmtype;
519 
520   PetscFunctionBegin;
521   /* set the type to symmetric if matrix is symmetric */
522   if (!osm->type_set && pc->pmat) {
523     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
524     if (symset && flg) { osm->type = PC_ASM_BASIC; }
525   }
526   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
527     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
528     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
529     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
530     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
531     flg  = PETSC_FALSE;
532     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
533     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
534   ierr = PetscOptionsTail();CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 /*------------------------------------------------------------------------------------*/
539 
540 EXTERN_C_BEGIN
541 #undef __FUNCT__
542 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
543 PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
544 {
545   PC_ASM         *osm = (PC_ASM*)pc->data;
546   PetscErrorCode ierr;
547   PetscInt       i;
548 
549   PetscFunctionBegin;
550   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
551   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
552 
553   if (!pc->setupcalled) {
554     if (is) {
555       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
556     }
557     if (is_local) {
558       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
559     }
560     if (osm->is) {
561       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
562     }
563     osm->n_local_true = n;
564     osm->is           = 0;
565     osm->is_local     = 0;
566     if (is) {
567       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
568       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
569       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
570       osm->overlap = -1;
571     }
572     if (is_local) {
573       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
574       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
575     }
576   }
577   PetscFunctionReturn(0);
578 }
579 EXTERN_C_END
580 
581 EXTERN_C_BEGIN
582 #undef __FUNCT__
583 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
584 PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
585 {
586   PC_ASM         *osm = (PC_ASM*)pc->data;
587   PetscErrorCode ierr;
588   PetscMPIInt    rank,size;
589   PetscInt       n;
590 
591   PetscFunctionBegin;
592   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
593   if (is || is_local) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
594 
595   /*
596      Split the subdomains equally among all processors
597   */
598   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
599   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
600   n = N/size + ((N % size) > rank);
601   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
602   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
603   if (!pc->setupcalled) {
604     if (osm->is) {
605       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
606     }
607     osm->n_local_true = n;
608     osm->is           = 0;
609     osm->is_local     = 0;
610   }
611   PetscFunctionReturn(0);
612 }
613 EXTERN_C_END
614 
615 EXTERN_C_BEGIN
616 #undef __FUNCT__
617 #define __FUNCT__ "PCASMSetOverlap_ASM"
618 PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
619 {
620   PC_ASM *osm = (PC_ASM*)pc->data;
621 
622   PetscFunctionBegin;
623   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
624   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
625   if (!pc->setupcalled) {
626     osm->overlap = ovl;
627   }
628   PetscFunctionReturn(0);
629 }
630 EXTERN_C_END
631 
632 EXTERN_C_BEGIN
633 #undef __FUNCT__
634 #define __FUNCT__ "PCASMSetType_ASM"
635 PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
636 {
637   PC_ASM *osm = (PC_ASM*)pc->data;
638 
639   PetscFunctionBegin;
640   osm->type     = type;
641   osm->type_set = PETSC_TRUE;
642   PetscFunctionReturn(0);
643 }
644 EXTERN_C_END
645 
646 EXTERN_C_BEGIN
647 #undef __FUNCT__
648 #define __FUNCT__ "PCASMSetSortIndices_ASM"
649 PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
650 {
651   PC_ASM *osm = (PC_ASM*)pc->data;
652 
653   PetscFunctionBegin;
654   osm->sort_indices = doSort;
655   PetscFunctionReturn(0);
656 }
657 EXTERN_C_END
658 
659 EXTERN_C_BEGIN
660 #undef __FUNCT__
661 #define __FUNCT__ "PCASMGetSubKSP_ASM"
662 PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
663 {
664   PC_ASM         *osm = (PC_ASM*)pc->data;
665   PetscErrorCode ierr;
666 
667   PetscFunctionBegin;
668   if (osm->n_local_true < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
669 
670   if (n_local) {
671     *n_local = osm->n_local_true;
672   }
673   if (first_local) {
674     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
675     *first_local -= osm->n_local_true;
676   }
677   if (ksp) {
678     /* Assume that local solves are now different; not necessarily
679        true though!  This flag is used only for PCView_ASM() */
680     *ksp                   = osm->ksp;
681     osm->same_local_solves = PETSC_FALSE;
682   }
683   PetscFunctionReturn(0);
684 }
685 EXTERN_C_END
686 
687 
688 #undef __FUNCT__
689 #define __FUNCT__ "PCASMSetLocalSubdomains"
690 /*@C
691     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
692 
693     Collective on PC
694 
695     Input Parameters:
696 +   pc - the preconditioner context
697 .   n - the number of subdomains for this processor (default value = 1)
698 .   is - the index set that defines the subdomains for this processor
699          (or PETSC_NULL for PETSc to determine subdomains)
700 -   is_local - the index sets that define the local part of the subdomains for this processor
701          (or PETSC_NULL to use the default of 1 subdomain per process)
702 
703     Notes:
704     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
705 
706     By default the ASM preconditioner uses 1 block per processor.
707 
708     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
709 
710     Level: advanced
711 
712 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
713 
714 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
715           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
716 @*/
717 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
718 {
719   PetscErrorCode ierr;
720 
721   PetscFunctionBegin;
722   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
723   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726 
727 #undef __FUNCT__
728 #define __FUNCT__ "PCASMSetTotalSubdomains"
729 /*@C
730     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
731     additive Schwarz preconditioner.  Either all or no processors in the
732     PC communicator must call this routine, with the same index sets.
733 
734     Collective on PC
735 
736     Input Parameters:
737 +   pc - the preconditioner context
738 .   n - the number of subdomains for all processors
739 .   is - the index sets that define the subdomains for all processor
740          (or PETSC_NULL for PETSc to determine subdomains)
741 -   is_local - the index sets that define the local part of the subdomains for this processor
742          (or PETSC_NULL to use the default of 1 subdomain per process)
743 
744     Options Database Key:
745     To set the total number of subdomain blocks rather than specify the
746     index sets, use the option
747 .    -pc_asm_blocks <blks> - Sets total blocks
748 
749     Notes:
750     Currently you cannot use this to set the actual subdomains with the argument is.
751 
752     By default the ASM preconditioner uses 1 block per processor.
753 
754     These index sets cannot be destroyed until after completion of the
755     linear solves for which the ASM preconditioner is being used.
756 
757     Use PCASMSetLocalSubdomains() to set local subdomains.
758 
759     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
760 
761     Level: advanced
762 
763 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
764 
765 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
766           PCASMCreateSubdomains2D()
767 @*/
768 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
769 {
770   PetscErrorCode ierr;
771 
772   PetscFunctionBegin;
773   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
774   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
775   PetscFunctionReturn(0);
776 }
777 
778 #undef __FUNCT__
779 #define __FUNCT__ "PCASMSetOverlap"
780 /*@
781     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
782     additive Schwarz preconditioner.  Either all or no processors in the
783     PC communicator must call this routine.
784 
785     Logically Collective on PC
786 
787     Input Parameters:
788 +   pc  - the preconditioner context
789 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
790 
791     Options Database Key:
792 .   -pc_asm_overlap <ovl> - Sets overlap
793 
794     Notes:
795     By default the ASM preconditioner uses 1 block per processor.  To use
796     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
797     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
798 
799     The overlap defaults to 1, so if one desires that no additional
800     overlap be computed beyond what may have been set with a call to
801     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
802     must be set to be 0.  In particular, if one does not explicitly set
803     the subdomains an application code, then all overlap would be computed
804     internally by PETSc, and using an overlap of 0 would result in an ASM
805     variant that is equivalent to the block Jacobi preconditioner.
806 
807     Note that one can define initial index sets with any overlap via
808     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
809     PCASMSetOverlap() merely allows PETSc to extend that overlap further
810     if desired.
811 
812     Level: intermediate
813 
814 .keywords: PC, ASM, set, overlap
815 
816 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
817           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
818 @*/
819 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
820 {
821   PetscErrorCode ierr;
822 
823   PetscFunctionBegin;
824   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
825   PetscValidLogicalCollectiveInt(pc,ovl,2);
826   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "PCASMSetType"
832 /*@
833     PCASMSetType - Sets the type of restriction and interpolation used
834     for local problems in the additive Schwarz method.
835 
836     Logically Collective on PC
837 
838     Input Parameters:
839 +   pc  - the preconditioner context
840 -   type - variant of ASM, one of
841 .vb
842       PC_ASM_BASIC       - full interpolation and restriction
843       PC_ASM_RESTRICT    - full restriction, local processor interpolation
844       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
845       PC_ASM_NONE        - local processor restriction and interpolation
846 .ve
847 
848     Options Database Key:
849 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
850 
851     Level: intermediate
852 
853 .keywords: PC, ASM, set, type
854 
855 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
856           PCASMCreateSubdomains2D()
857 @*/
858 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
859 {
860   PetscErrorCode ierr;
861 
862   PetscFunctionBegin;
863   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
864   PetscValidLogicalCollectiveEnum(pc,type,2);
865   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
866   PetscFunctionReturn(0);
867 }
868 
869 #undef __FUNCT__
870 #define __FUNCT__ "PCASMSetSortIndices"
871 /*@
872     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
873 
874     Logically Collective on PC
875 
876     Input Parameters:
877 +   pc  - the preconditioner context
878 -   doSort - sort the subdomain indices
879 
880     Level: intermediate
881 
882 .keywords: PC, ASM, set, type
883 
884 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
885           PCASMCreateSubdomains2D()
886 @*/
887 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool  doSort)
888 {
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
893   PetscValidLogicalCollectiveBool(pc,doSort,2);
894   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "PCASMGetSubKSP"
900 /*@C
901    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
902    this processor.
903 
904    Collective on PC iff first_local is requested
905 
906    Input Parameter:
907 .  pc - the preconditioner context
908 
909    Output Parameters:
910 +  n_local - the number of blocks on this processor or PETSC_NULL
911 .  first_local - the global number of the first block on this processor or PETSC_NULL,
912                  all processors must request or all must pass PETSC_NULL
913 -  ksp - the array of KSP contexts
914 
915    Note:
916    After PCASMGetSubKSP() the array of KSPes is not to be freed
917 
918    Currently for some matrix implementations only 1 block per processor
919    is supported.
920 
921    You must call KSPSetUp() before calling PCASMGetSubKSP().
922 
923    Level: advanced
924 
925 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
926 
927 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
928           PCASMCreateSubdomains2D(),
929 @*/
930 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
931 {
932   PetscErrorCode ierr;
933 
934   PetscFunctionBegin;
935   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
936   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 /* -------------------------------------------------------------------------------------*/
941 /*MC
942    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
943            its own KSP object.
944 
945    Options Database Keys:
946 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
947 .  -pc_asm_blocks <blks> - Sets total blocks
948 .  -pc_asm_overlap <ovl> - Sets overlap
949 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
950 
951      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
952       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
953       -pc_asm_type basic to use the standard ASM.
954 
955    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
956      than one processor. Defaults to one block per processor.
957 
958      To set options on the solvers for each block append -sub_ to all the KSP, and PC
959         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
960 
961      To set the options on the solvers separate for each block call PCASMGetSubKSP()
962          and set the options directly on the resulting KSP object (you can access its PC
963          with KSPGetPC())
964 
965 
966    Level: beginner
967 
968    Concepts: additive Schwarz method
969 
970     References:
971     An additive variant of the Schwarz alternating method for the case of many subregions
972     M Dryja, OB Widlund - Courant Institute, New York University Technical report
973 
974     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
975     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
976 
977 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
978            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
979            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
980 
981 M*/
982 
983 EXTERN_C_BEGIN
984 #undef __FUNCT__
985 #define __FUNCT__ "PCCreate_ASM"
986 PetscErrorCode  PCCreate_ASM(PC pc)
987 {
988   PetscErrorCode ierr;
989   PC_ASM         *osm;
990 
991   PetscFunctionBegin;
992   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
993   osm->n                 = PETSC_DECIDE;
994   osm->n_local           = 0;
995   osm->n_local_true      = 0;
996   osm->overlap           = 1;
997   osm->ksp               = 0;
998   osm->restriction       = 0;
999   osm->localization      = 0;
1000   osm->prolongation      = 0;
1001   osm->x                 = 0;
1002   osm->y                 = 0;
1003   osm->y_local           = 0;
1004   osm->is                = 0;
1005   osm->is_local          = 0;
1006   osm->mat               = 0;
1007   osm->pmat              = 0;
1008   osm->type              = PC_ASM_RESTRICT;
1009   osm->same_local_solves = PETSC_TRUE;
1010   osm->sort_indices      = PETSC_TRUE;
1011 
1012   pc->data                   = (void*)osm;
1013   pc->ops->apply             = PCApply_ASM;
1014   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1015   pc->ops->setup             = PCSetUp_ASM;
1016   pc->ops->reset             = PCReset_ASM;
1017   pc->ops->destroy           = PCDestroy_ASM;
1018   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1019   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1020   pc->ops->view              = PCView_ASM;
1021   pc->ops->applyrichardson   = 0;
1022 
1023   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1024                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1025   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1026                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1027   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1028                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1029   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1030                     PCASMSetType_ASM);CHKERRQ(ierr);
1031   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1032                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1033   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1034                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 EXTERN_C_END
1038 
1039 
1040 #undef __FUNCT__
1041 #define __FUNCT__ "PCASMCreateSubdomains"
1042 /*@C
1043    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1044    preconditioner for a any problem on a general grid.
1045 
1046    Collective
1047 
1048    Input Parameters:
1049 +  A - The global matrix operator
1050 -  n - the number of local blocks
1051 
1052    Output Parameters:
1053 .  outis - the array of index sets defining the subdomains
1054 
1055    Level: advanced
1056 
1057    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1058     from these if you use PCASMSetLocalSubdomains()
1059 
1060     In the Fortran version you must provide the array outis[] already allocated of length n.
1061 
1062 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1063 
1064 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1065 @*/
1066 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1067 {
1068   MatPartitioning           mpart;
1069   const char                *prefix;
1070   PetscErrorCode            (*f)(Mat,Mat*);
1071   PetscMPIInt               size;
1072   PetscInt                  i,j,rstart,rend,bs;
1073   PetscBool                 isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1074   Mat                       Ad = PETSC_NULL, adj;
1075   IS                        ispart,isnumb,*is;
1076   PetscErrorCode            ierr;
1077 
1078   PetscFunctionBegin;
1079   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1080   PetscValidPointer(outis,3);
1081   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1082 
1083   /* Get prefix, row distribution, and block size */
1084   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1085   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1086   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1087   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1088 
1089   /* Get diagonal block from matrix if possible */
1090   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1091   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1092   if (f) {
1093     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1094   } else if (size == 1) {
1095     Ad = A;
1096   }
1097   if (Ad) {
1098     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1099     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1100   }
1101   if (Ad && n > 1) {
1102     PetscBool  match,done;
1103     /* Try to setup a good matrix partitioning if available */
1104     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1105     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1106     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1107     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1108     if (!match) {
1109       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1110     }
1111     if (!match) { /* assume a "good" partitioner is available */
1112       PetscInt na,*ia,*ja;
1113       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1114       if (done) {
1115 	/* Build adjacency matrix by hand. Unfortunately a call to
1116 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1117 	   remove the block-aij structure and we cannot expect
1118 	   MatPartitioning to split vertices as we need */
1119 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1120 	nnz = 0;
1121 	for (i=0; i<na; i++) { /* count number of nonzeros */
1122 	  len = ia[i+1] - ia[i];
1123 	  row = ja + ia[i];
1124 	  for (j=0; j<len; j++) {
1125 	    if (row[j] == i) { /* don't count diagonal */
1126 	      len--; break;
1127 	    }
1128 	  }
1129 	  nnz += len;
1130 	}
1131 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1132 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1133 	nnz    = 0;
1134 	iia[0] = 0;
1135 	for (i=0; i<na; i++) { /* fill adjacency */
1136 	  cnt = 0;
1137 	  len = ia[i+1] - ia[i];
1138 	  row = ja + ia[i];
1139 	  for (j=0; j<len; j++) {
1140 	    if (row[j] != i) { /* if not diagonal */
1141 	      jja[nnz+cnt++] = row[j];
1142 	    }
1143 	  }
1144 	  nnz += cnt;
1145 	  iia[i+1] = nnz;
1146 	}
1147 	/* Partitioning of the adjacency matrix */
1148 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1149 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1150 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1151 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1152 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1153 	ierr = MatDestroy(&adj);CHKERRQ(ierr);
1154 	foundpart = PETSC_TRUE;
1155       }
1156       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1157     }
1158     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1159   }
1160 
1161   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1162   *outis = is;
1163 
1164   if (!foundpart) {
1165 
1166     /* Partitioning by contiguous chunks of rows */
1167 
1168     PetscInt mbs   = (rend-rstart)/bs;
1169     PetscInt start = rstart;
1170     for (i=0; i<n; i++) {
1171       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1172       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1173       start += count;
1174     }
1175 
1176   } else {
1177 
1178     /* Partitioning by adjacency of diagonal block  */
1179 
1180     const PetscInt *numbering;
1181     PetscInt       *count,nidx,*indices,*newidx,start=0;
1182     /* Get node count in each partition */
1183     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1184     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1185     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1186       for (i=0; i<n; i++) count[i] *= bs;
1187     }
1188     /* Build indices from node numbering */
1189     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1190     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1191     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1192     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1193     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1194     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1195     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1196       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1197       for (i=0; i<nidx; i++)
1198         for (j=0; j<bs; j++)
1199           newidx[i*bs+j] = indices[i]*bs + j;
1200       ierr = PetscFree(indices);CHKERRQ(ierr);
1201       nidx   *= bs;
1202       indices = newidx;
1203     }
1204     /* Shift to get global indices */
1205     for (i=0; i<nidx; i++) indices[i] += rstart;
1206 
1207     /* Build the index sets for each block */
1208     for (i=0; i<n; i++) {
1209       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1210       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1211       start += count[i];
1212     }
1213 
1214     ierr = PetscFree(count);
1215     ierr = PetscFree(indices);
1216     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1217     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1218 
1219   }
1220 
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 #undef __FUNCT__
1225 #define __FUNCT__ "PCASMDestroySubdomains"
1226 /*@C
1227    PCASMDestroySubdomains - Destroys the index sets created with
1228    PCASMCreateSubdomains(). Should be called after setting subdomains
1229    with PCASMSetLocalSubdomains().
1230 
1231    Collective
1232 
1233    Input Parameters:
1234 +  n - the number of index sets
1235 .  is - the array of index sets
1236 -  is_local - the array of local index sets, can be PETSC_NULL
1237 
1238    Level: advanced
1239 
1240 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1241 
1242 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1243 @*/
1244 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1245 {
1246   PetscInt       i;
1247   PetscErrorCode ierr;
1248   PetscFunctionBegin;
1249   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1250   PetscValidPointer(is,2);
1251   for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1252   ierr = PetscFree(is);CHKERRQ(ierr);
1253   if (is_local) {
1254     PetscValidPointer(is_local,3);
1255     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1256     ierr = PetscFree(is_local);CHKERRQ(ierr);
1257   }
1258   PetscFunctionReturn(0);
1259 }
1260 
1261 #undef __FUNCT__
1262 #define __FUNCT__ "PCASMCreateSubdomains2D"
1263 /*@
1264    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1265    preconditioner for a two-dimensional problem on a regular grid.
1266 
1267    Not Collective
1268 
1269    Input Parameters:
1270 +  m, n - the number of mesh points in the x and y directions
1271 .  M, N - the number of subdomains in the x and y directions
1272 .  dof - degrees of freedom per node
1273 -  overlap - overlap in mesh lines
1274 
1275    Output Parameters:
1276 +  Nsub - the number of subdomains created
1277 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1278 -  is_local - array of index sets defining non-overlapping subdomains
1279 
1280    Note:
1281    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1282    preconditioners.  More general related routines are
1283    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1284 
1285    Level: advanced
1286 
1287 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1288 
1289 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1290           PCASMSetOverlap()
1291 @*/
1292 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1293 {
1294   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1295   PetscErrorCode ierr;
1296   PetscInt       nidx,*idx,loc,ii,jj,count;
1297 
1298   PetscFunctionBegin;
1299   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1300 
1301   *Nsub = N*M;
1302   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1303   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1304   ystart = 0;
1305   loc_outer = 0;
1306   for (i=0; i<N; i++) {
1307     height = n/N + ((n % N) > i); /* height of subdomain */
1308     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1309     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1310     yright = ystart + height + overlap; if (yright > n) yright = n;
1311     xstart = 0;
1312     for (j=0; j<M; j++) {
1313       width = m/M + ((m % M) > j); /* width of subdomain */
1314       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1315       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1316       xright = xstart + width + overlap; if (xright > m) xright = m;
1317       nidx   = (xright - xleft)*(yright - yleft);
1318       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1319       loc    = 0;
1320       for (ii=yleft; ii<yright; ii++) {
1321         count = m*ii + xleft;
1322         for (jj=xleft; jj<xright; jj++) {
1323           idx[loc++] = count++;
1324         }
1325       }
1326       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1327       if (overlap == 0) {
1328         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1329         (*is_local)[loc_outer] = (*is)[loc_outer];
1330       } else {
1331         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1332           for (jj=xstart; jj<xstart+width; jj++) {
1333             idx[loc++] = m*ii + jj;
1334           }
1335         }
1336         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1337       }
1338       ierr = PetscFree(idx);CHKERRQ(ierr);
1339       xstart += width;
1340       loc_outer++;
1341     }
1342     ystart += height;
1343   }
1344   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1345   PetscFunctionReturn(0);
1346 }
1347 
1348 #undef __FUNCT__
1349 #define __FUNCT__ "PCASMGetLocalSubdomains"
1350 /*@C
1351     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1352     only) for the additive Schwarz preconditioner.
1353 
1354     Not Collective
1355 
1356     Input Parameter:
1357 .   pc - the preconditioner context
1358 
1359     Output Parameters:
1360 +   n - the number of subdomains for this processor (default value = 1)
1361 .   is - the index sets that define the subdomains for this processor
1362 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1363 
1364 
1365     Notes:
1366     The IS numbering is in the parallel, global numbering of the vector.
1367 
1368     Level: advanced
1369 
1370 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1371 
1372 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1373           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1374 @*/
1375 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1376 {
1377   PC_ASM         *osm;
1378   PetscErrorCode ierr;
1379   PetscBool      match;
1380 
1381   PetscFunctionBegin;
1382   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1383   PetscValidIntPointer(n,2);
1384   if (is) PetscValidPointer(is,3);
1385   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1386   if (!match) {
1387     if (n)  *n  = 0;
1388     if (is) *is = PETSC_NULL;
1389   } else {
1390     osm = (PC_ASM*)pc->data;
1391     if (n)  *n  = osm->n_local_true;
1392     if (is) *is = osm->is;
1393     if (is_local) *is_local = osm->is_local;
1394   }
1395   PetscFunctionReturn(0);
1396 }
1397 
1398 #undef __FUNCT__
1399 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1400 /*@C
1401     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1402     only) for the additive Schwarz preconditioner.
1403 
1404     Not Collective
1405 
1406     Input Parameter:
1407 .   pc - the preconditioner context
1408 
1409     Output Parameters:
1410 +   n - the number of matrices for this processor (default value = 1)
1411 -   mat - the matrices
1412 
1413 
1414     Level: advanced
1415 
1416     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1417 
1418            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1419 
1420 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1421 
1422 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1423           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1424 @*/
1425 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1426 {
1427   PC_ASM         *osm;
1428   PetscErrorCode ierr;
1429   PetscBool      match;
1430 
1431   PetscFunctionBegin;
1432   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1433   PetscValidIntPointer(n,2);
1434   if (mat) PetscValidPointer(mat,3);
1435   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1436   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1437   if (!match) {
1438     if (n)   *n   = 0;
1439     if (mat) *mat = PETSC_NULL;
1440   } else {
1441     osm = (PC_ASM*)pc->data;
1442     if (n)   *n   = osm->n_local_true;
1443     if (mat) *mat = osm->pmat;
1444   }
1445   PetscFunctionReturn(0);
1446 }
1447