xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision d155d4104c12796cc6e93a7e2b3cd74e97fcda36)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <private/pcimpl.h>     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter *restriction;        /* mapping from global to subregion */
20   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
21   VecScatter *prolongation;       /* mapping from subregion to global */
22   Vec        *x,*y,*y_local;      /* work vectors */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
25   Mat        *mat,*pmat;          /* mat is not currently used */
26   PCASMType  type;                /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_ASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_ASM"
34 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
35 {
36   PC_ASM         *osm = (PC_ASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof blocks,"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
54     if (osm->same_local_solves) {
55       if (osm->ksp) {
56         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
57         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
58         if (!rank) {
59           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
60           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
61           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
62         }
63         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
64       }
65     } else {
66       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
72       for (i=0; i<osm->n_local; i++) {
73         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
74         if (i < osm->n_local_true) {
75           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
76           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
77           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
78           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79         }
80         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
81       }
82       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
83       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
85     }
86   } else if (isstring) {
87     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
88     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
89     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
90     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
91   } else {
92     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCASMPrintSubdomains"
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm  = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
112   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
113   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
114   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
115   for (i=0;i<osm->n_local_true;i++) {
116     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
117     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
118     for (j=0; j<nidx; j++) {
119       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
120     }
121     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
122     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
123     if (osm->is_local) {
124       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
125       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
126       for (j=0; j<nidx; j++) {
127         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
128       }
129       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
130       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
131     }
132   }
133   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
134   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
135   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
136   PetscFunctionReturn(0);
137 }
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "PCSetUp_ASM"
141 static PetscErrorCode PCSetUp_ASM(PC pc)
142 {
143   PC_ASM         *osm  = (PC_ASM*)pc->data;
144   PetscErrorCode ierr;
145   PetscBool      symset,flg;
146   PetscInt       i,m,m_local,firstRow,lastRow;
147   PetscMPIInt    size;
148   MatReuse       scall = MAT_REUSE_MATRIX;
149   IS             isl;
150   KSP            ksp;
151   PC             subpc;
152   const char     *prefix,*pprefix;
153   Vec            vec;
154 
155   PetscFunctionBegin;
156   if (!pc->setupcalled) {
157 
158     if (!osm->type_set) {
159       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
160       if (symset && flg) { osm->type = PC_ASM_BASIC; }
161     }
162 
163     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
164       /* no subdomains given, use one per processor */
165       osm->n_local = osm->n_local_true = 1;
166       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
167       osm->n = size;
168     } else if (osm->n == PETSC_DECIDE) {
169       /* determine global number of subdomains */
170       PetscInt inwork[2],outwork[2];
171       inwork[0] = inwork[1] = osm->n_local_true;
172       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
173       osm->n_local = outwork[0];
174       osm->n       = outwork[1];
175     }
176 
177     if (!osm->is){ /* create the index sets */
178       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
179     }
180     if (osm->n_local_true > 1 && !osm->is_local) {
181       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
182       for (i=0; i<osm->n_local_true; i++) {
183         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
184           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
185           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
186         } else {
187           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
188           osm->is_local[i] = osm->is[i];
189         }
190       }
191     }
192     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
193     flg  = PETSC_FALSE;
194     ierr = PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
195     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
196 
197     if (osm->overlap > 0) {
198       /* Extend the "overlapping" regions by a number of steps */
199       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
200     }
201     if (osm->sort_indices) {
202       for (i=0; i<osm->n_local_true; i++) {
203         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
204         if (osm->is_local) {
205           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
206         }
207       }
208     }
209 
210     /* Create the local work vectors and scatter contexts */
211     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->restriction);CHKERRQ(ierr);
213     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->localization);CHKERRQ(ierr);}
214     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->prolongation);CHKERRQ(ierr);
215     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->x);CHKERRQ(ierr);
216     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y);CHKERRQ(ierr);
217     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y_local);CHKERRQ(ierr);
218     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
219     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
220       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
221       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
222       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
223       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
224       ierr = ISDestroy(&isl);CHKERRQ(ierr);
225       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
226       if (osm->is_local) {
227         ISLocalToGlobalMapping ltog;
228         IS                     isll;
229         const PetscInt         *idx_local;
230         PetscInt               *idx,nout;
231 
232         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
233         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
234         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
235         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
236         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
237         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
238         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
239         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
240         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
241         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
242         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
243         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
244         ierr = ISDestroy(&isll);CHKERRQ(ierr);
245 
246         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
247         ierr = ISDestroy(&isl);CHKERRQ(ierr);
248       } else {
249         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
250         osm->y_local[i] = osm->y[i];
251         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
252         osm->prolongation[i] = osm->restriction[i];
253         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
254       }
255     }
256     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
257     for (i=osm->n_local_true; i<osm->n_local; i++) {
258       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
259       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
260       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
261       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
262       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
263       if (osm->is_local) {
264         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
265         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
266       } else {
267         osm->prolongation[i] = osm->restriction[i];
268         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
269       }
270       ierr = ISDestroy(&isl);CHKERRQ(ierr);
271     }
272     ierr = VecDestroy(&vec);CHKERRQ(ierr);
273 
274     /* Create the local solvers */
275     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
276     for (i=0; i<osm->n_local_true; i++) {
277       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
278       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
279       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
280       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
281       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
282       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
283       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
284       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
285       osm->ksp[i] = ksp;
286     }
287     scall = MAT_INITIAL_MATRIX;
288 
289   } else {
290     /*
291        Destroy the blocks from the previous iteration
292     */
293     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
294       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
295       scall = MAT_INITIAL_MATRIX;
296     }
297   }
298 
299   /*
300      Extract out the submatrices
301   */
302   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
303   if (scall == MAT_INITIAL_MATRIX) {
304     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
305     for (i=0; i<osm->n_local_true; i++) {
306       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
307       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
308     }
309   }
310 
311   /* Return control to the user so that the submatrices can be modified (e.g., to apply
312      different boundary conditions for the submatrices than for the global problem) */
313   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
314 
315   /*
316      Loop over subdomains putting them into local ksp
317   */
318   for (i=0; i<osm->n_local_true; i++) {
319     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
320     if (!pc->setupcalled) {
321       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
322     }
323   }
324 
325   PetscFunctionReturn(0);
326 }
327 
328 #undef __FUNCT__
329 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
330 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
331 {
332   PC_ASM         *osm = (PC_ASM*)pc->data;
333   PetscErrorCode ierr;
334   PetscInt       i;
335 
336   PetscFunctionBegin;
337   for (i=0; i<osm->n_local_true; i++) {
338     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
339   }
340   PetscFunctionReturn(0);
341 }
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "PCApply_ASM"
345 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
346 {
347   PC_ASM         *osm = (PC_ASM*)pc->data;
348   PetscErrorCode ierr;
349   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
350   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
351 
352   PetscFunctionBegin;
353   /*
354      Support for limiting the restriction or interpolation to only local
355      subdomain values (leaving the other values 0).
356   */
357   if (!(osm->type & PC_ASM_RESTRICT)) {
358     forward = SCATTER_FORWARD_LOCAL;
359     /* have to zero the work RHS since scatter may leave some slots empty */
360     for (i=0; i<n_local_true; i++) {
361       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
362     }
363   }
364   if (!(osm->type & PC_ASM_INTERPOLATE)) {
365     reverse = SCATTER_REVERSE_LOCAL;
366   }
367 
368   for (i=0; i<n_local; i++) {
369     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
370   }
371   ierr = VecZeroEntries(y);CHKERRQ(ierr);
372   /* do the local solves */
373   for (i=0; i<n_local_true; i++) {
374     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
375     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
376     if (osm->localization) {
377       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
378       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
379     }
380     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
381   }
382   /* handle the rest of the scatters that do not have local solves */
383   for (i=n_local_true; i<n_local; i++) {
384     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
385     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
386   }
387   for (i=0; i<n_local; i++) {
388     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
389   }
390   PetscFunctionReturn(0);
391 }
392 
393 #undef __FUNCT__
394 #define __FUNCT__ "PCApplyTranspose_ASM"
395 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
396 {
397   PC_ASM         *osm = (PC_ASM*)pc->data;
398   PetscErrorCode ierr;
399   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
400   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
401 
402   PetscFunctionBegin;
403   /*
404      Support for limiting the restriction or interpolation to only local
405      subdomain values (leaving the other values 0).
406 
407      Note: these are reversed from the PCApply_ASM() because we are applying the
408      transpose of the three terms
409   */
410   if (!(osm->type & PC_ASM_INTERPOLATE)) {
411     forward = SCATTER_FORWARD_LOCAL;
412     /* have to zero the work RHS since scatter may leave some slots empty */
413     for (i=0; i<n_local_true; i++) {
414       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
415     }
416   }
417   if (!(osm->type & PC_ASM_RESTRICT)) {
418     reverse = SCATTER_REVERSE_LOCAL;
419   }
420 
421   for (i=0; i<n_local; i++) {
422     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
423   }
424   ierr = VecZeroEntries(y);CHKERRQ(ierr);
425   /* do the local solves */
426   for (i=0; i<n_local_true; i++) {
427     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
428     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
429     if (osm->localization) {
430       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
431       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
432     }
433     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
434   }
435   /* handle the rest of the scatters that do not have local solves */
436   for (i=n_local_true; i<n_local; i++) {
437     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
438     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
439   }
440   for (i=0; i<n_local; i++) {
441     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
442   }
443   PetscFunctionReturn(0);
444 }
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "PCReset_ASM"
448 static PetscErrorCode PCReset_ASM(PC pc)
449 {
450   PC_ASM         *osm = (PC_ASM*)pc->data;
451   PetscErrorCode ierr;
452   PetscInt       i;
453 
454   PetscFunctionBegin;
455   if (osm->ksp) {
456     for (i=0; i<osm->n_local_true; i++) {
457       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
458     }
459   }
460   if (osm->pmat) {
461     if (osm->n_local_true > 0) {
462       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
463     }
464   }
465   if (osm->restriction) {
466     for (i=0; i<osm->n_local; i++) {
467       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
468       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
469       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
470       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
471       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
472       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
473     }
474     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
475     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
476     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
477     ierr = PetscFree(osm->x);CHKERRQ(ierr);
478     ierr = PetscFree(osm->y);CHKERRQ(ierr);
479     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
480   }
481   if (osm->is) {
482     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
483     osm->is       = 0;
484     osm->is_local = 0;
485   }
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "PCDestroy_ASM"
491 static PetscErrorCode PCDestroy_ASM(PC pc)
492 {
493   PC_ASM         *osm = (PC_ASM*)pc->data;
494   PetscErrorCode ierr;
495   PetscInt       i;
496 
497   PetscFunctionBegin;
498   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
499   if (osm->ksp) {
500     for (i=0; i<osm->n_local_true; i++) {
501       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
502     }
503     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
504   }
505   ierr = PetscFree(pc->data);CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "PCSetFromOptions_ASM"
511 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
512 {
513   PC_ASM         *osm = (PC_ASM*)pc->data;
514   PetscErrorCode ierr;
515   PetscInt       blocks,ovl;
516   PetscBool      symset,flg;
517   PCASMType      asmtype;
518 
519   PetscFunctionBegin;
520   /* set the type to symmetric if matrix is symmetric */
521   if (!osm->type_set && pc->pmat) {
522     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
523     if (symset && flg) { osm->type = PC_ASM_BASIC; }
524   }
525   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
526     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
527     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
528     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
529     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
530     flg  = PETSC_FALSE;
531     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
532     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
533   ierr = PetscOptionsTail();CHKERRQ(ierr);
534   PetscFunctionReturn(0);
535 }
536 
537 /*------------------------------------------------------------------------------------*/
538 
539 EXTERN_C_BEGIN
540 #undef __FUNCT__
541 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
542 PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
543 {
544   PC_ASM         *osm = (PC_ASM*)pc->data;
545   PetscErrorCode ierr;
546   PetscInt       i;
547 
548   PetscFunctionBegin;
549   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
550   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
551 
552   if (!pc->setupcalled) {
553     if (is) {
554       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
555     }
556     if (is_local) {
557       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
558     }
559     if (osm->is) {
560       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
561     }
562     osm->n_local_true = n;
563     osm->is           = 0;
564     osm->is_local     = 0;
565     if (is) {
566       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
567       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
568       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
569       osm->overlap = -1;
570     }
571     if (is_local) {
572       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
573       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
574     }
575   }
576   PetscFunctionReturn(0);
577 }
578 EXTERN_C_END
579 
580 EXTERN_C_BEGIN
581 #undef __FUNCT__
582 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
583 PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
584 {
585   PC_ASM         *osm = (PC_ASM*)pc->data;
586   PetscErrorCode ierr;
587   PetscMPIInt    rank,size;
588   PetscInt       n;
589 
590   PetscFunctionBegin;
591   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
592   if (is || is_local) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
593 
594   /*
595      Split the subdomains equally among all processors
596   */
597   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
598   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
599   n = N/size + ((N % size) > rank);
600   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
601   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
602   if (!pc->setupcalled) {
603     if (osm->is) {
604       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
605     }
606     osm->n_local_true = n;
607     osm->is           = 0;
608     osm->is_local     = 0;
609   }
610   PetscFunctionReturn(0);
611 }
612 EXTERN_C_END
613 
614 EXTERN_C_BEGIN
615 #undef __FUNCT__
616 #define __FUNCT__ "PCASMSetOverlap_ASM"
617 PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
618 {
619   PC_ASM *osm = (PC_ASM*)pc->data;
620 
621   PetscFunctionBegin;
622   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
623   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
624   if (!pc->setupcalled) {
625     osm->overlap = ovl;
626   }
627   PetscFunctionReturn(0);
628 }
629 EXTERN_C_END
630 
631 EXTERN_C_BEGIN
632 #undef __FUNCT__
633 #define __FUNCT__ "PCASMSetType_ASM"
634 PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
635 {
636   PC_ASM *osm = (PC_ASM*)pc->data;
637 
638   PetscFunctionBegin;
639   osm->type     = type;
640   osm->type_set = PETSC_TRUE;
641   PetscFunctionReturn(0);
642 }
643 EXTERN_C_END
644 
645 EXTERN_C_BEGIN
646 #undef __FUNCT__
647 #define __FUNCT__ "PCASMSetSortIndices_ASM"
648 PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
649 {
650   PC_ASM *osm = (PC_ASM*)pc->data;
651 
652   PetscFunctionBegin;
653   osm->sort_indices = doSort;
654   PetscFunctionReturn(0);
655 }
656 EXTERN_C_END
657 
658 EXTERN_C_BEGIN
659 #undef __FUNCT__
660 #define __FUNCT__ "PCASMGetSubKSP_ASM"
661 PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
662 {
663   PC_ASM         *osm = (PC_ASM*)pc->data;
664   PetscErrorCode ierr;
665 
666   PetscFunctionBegin;
667   if (osm->n_local_true < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
668 
669   if (n_local) {
670     *n_local = osm->n_local_true;
671   }
672   if (first_local) {
673     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
674     *first_local -= osm->n_local_true;
675   }
676   if (ksp) {
677     /* Assume that local solves are now different; not necessarily
678        true though!  This flag is used only for PCView_ASM() */
679     *ksp                   = osm->ksp;
680     osm->same_local_solves = PETSC_FALSE;
681   }
682   PetscFunctionReturn(0);
683 }
684 EXTERN_C_END
685 
686 
687 #undef __FUNCT__
688 #define __FUNCT__ "PCASMSetLocalSubdomains"
689 /*@C
690     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
691 
692     Collective on PC
693 
694     Input Parameters:
695 +   pc - the preconditioner context
696 .   n - the number of subdomains for this processor (default value = 1)
697 .   is - the index set that defines the subdomains for this processor
698          (or PETSC_NULL for PETSc to determine subdomains)
699 -   is_local - the index sets that define the local part of the subdomains for this processor
700          (or PETSC_NULL to use the default of 1 subdomain per process)
701 
702     Notes:
703     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
704 
705     By default the ASM preconditioner uses 1 block per processor.
706 
707     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
708 
709     Level: advanced
710 
711 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
712 
713 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
714           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
715 @*/
716 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
717 {
718   PetscErrorCode ierr;
719 
720   PetscFunctionBegin;
721   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
722   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "PCASMSetTotalSubdomains"
728 /*@C
729     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
730     additive Schwarz preconditioner.  Either all or no processors in the
731     PC communicator must call this routine, with the same index sets.
732 
733     Collective on PC
734 
735     Input Parameters:
736 +   pc - the preconditioner context
737 .   n - the number of subdomains for all processors
738 .   is - the index sets that define the subdomains for all processor
739          (or PETSC_NULL for PETSc to determine subdomains)
740 -   is_local - the index sets that define the local part of the subdomains for this processor
741          (or PETSC_NULL to use the default of 1 subdomain per process)
742 
743     Options Database Key:
744     To set the total number of subdomain blocks rather than specify the
745     index sets, use the option
746 .    -pc_asm_blocks <blks> - Sets total blocks
747 
748     Notes:
749     Currently you cannot use this to set the actual subdomains with the argument is.
750 
751     By default the ASM preconditioner uses 1 block per processor.
752 
753     These index sets cannot be destroyed until after completion of the
754     linear solves for which the ASM preconditioner is being used.
755 
756     Use PCASMSetLocalSubdomains() to set local subdomains.
757 
758     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
759 
760     Level: advanced
761 
762 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
763 
764 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
765           PCASMCreateSubdomains2D()
766 @*/
767 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
768 {
769   PetscErrorCode ierr;
770 
771   PetscFunctionBegin;
772   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
773   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
774   PetscFunctionReturn(0);
775 }
776 
777 #undef __FUNCT__
778 #define __FUNCT__ "PCASMSetOverlap"
779 /*@
780     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
781     additive Schwarz preconditioner.  Either all or no processors in the
782     PC communicator must call this routine.
783 
784     Logically Collective on PC
785 
786     Input Parameters:
787 +   pc  - the preconditioner context
788 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
789 
790     Options Database Key:
791 .   -pc_asm_overlap <ovl> - Sets overlap
792 
793     Notes:
794     By default the ASM preconditioner uses 1 block per processor.  To use
795     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
796     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
797 
798     The overlap defaults to 1, so if one desires that no additional
799     overlap be computed beyond what may have been set with a call to
800     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
801     must be set to be 0.  In particular, if one does not explicitly set
802     the subdomains an application code, then all overlap would be computed
803     internally by PETSc, and using an overlap of 0 would result in an ASM
804     variant that is equivalent to the block Jacobi preconditioner.
805 
806     Note that one can define initial index sets with any overlap via
807     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
808     PCASMSetOverlap() merely allows PETSc to extend that overlap further
809     if desired.
810 
811     Level: intermediate
812 
813 .keywords: PC, ASM, set, overlap
814 
815 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
816           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
817 @*/
818 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
819 {
820   PetscErrorCode ierr;
821 
822   PetscFunctionBegin;
823   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
824   PetscValidLogicalCollectiveInt(pc,ovl,2);
825   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
826   PetscFunctionReturn(0);
827 }
828 
829 #undef __FUNCT__
830 #define __FUNCT__ "PCASMSetType"
831 /*@
832     PCASMSetType - Sets the type of restriction and interpolation used
833     for local problems in the additive Schwarz method.
834 
835     Logically Collective on PC
836 
837     Input Parameters:
838 +   pc  - the preconditioner context
839 -   type - variant of ASM, one of
840 .vb
841       PC_ASM_BASIC       - full interpolation and restriction
842       PC_ASM_RESTRICT    - full restriction, local processor interpolation
843       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
844       PC_ASM_NONE        - local processor restriction and interpolation
845 .ve
846 
847     Options Database Key:
848 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
849 
850     Level: intermediate
851 
852 .keywords: PC, ASM, set, type
853 
854 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
855           PCASMCreateSubdomains2D()
856 @*/
857 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
858 {
859   PetscErrorCode ierr;
860 
861   PetscFunctionBegin;
862   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
863   PetscValidLogicalCollectiveEnum(pc,type,2);
864   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
865   PetscFunctionReturn(0);
866 }
867 
868 #undef __FUNCT__
869 #define __FUNCT__ "PCASMSetSortIndices"
870 /*@
871     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
872 
873     Logically Collective on PC
874 
875     Input Parameters:
876 +   pc  - the preconditioner context
877 -   doSort - sort the subdomain indices
878 
879     Level: intermediate
880 
881 .keywords: PC, ASM, set, type
882 
883 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
884           PCASMCreateSubdomains2D()
885 @*/
886 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool  doSort)
887 {
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
892   PetscValidLogicalCollectiveBool(pc,doSort,2);
893   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "PCASMGetSubKSP"
899 /*@C
900    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
901    this processor.
902 
903    Collective on PC iff first_local is requested
904 
905    Input Parameter:
906 .  pc - the preconditioner context
907 
908    Output Parameters:
909 +  n_local - the number of blocks on this processor or PETSC_NULL
910 .  first_local - the global number of the first block on this processor or PETSC_NULL,
911                  all processors must request or all must pass PETSC_NULL
912 -  ksp - the array of KSP contexts
913 
914    Note:
915    After PCASMGetSubKSP() the array of KSPes is not to be freed
916 
917    Currently for some matrix implementations only 1 block per processor
918    is supported.
919 
920    You must call KSPSetUp() before calling PCASMGetSubKSP().
921 
922    Level: advanced
923 
924 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
925 
926 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
927           PCASMCreateSubdomains2D(),
928 @*/
929 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
930 {
931   PetscErrorCode ierr;
932 
933   PetscFunctionBegin;
934   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
935   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 /* -------------------------------------------------------------------------------------*/
940 /*MC
941    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
942            its own KSP object.
943 
944    Options Database Keys:
945 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
946 .  -pc_asm_blocks <blks> - Sets total blocks
947 .  -pc_asm_overlap <ovl> - Sets overlap
948 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
949 
950      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
951       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
952       -pc_asm_type basic to use the standard ASM.
953 
954    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
955      than one processor. Defaults to one block per processor.
956 
957      To set options on the solvers for each block append -sub_ to all the KSP, and PC
958         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
959 
960      To set the options on the solvers separate for each block call PCASMGetSubKSP()
961          and set the options directly on the resulting KSP object (you can access its PC
962          with KSPGetPC())
963 
964 
965    Level: beginner
966 
967    Concepts: additive Schwarz method
968 
969     References:
970     An additive variant of the Schwarz alternating method for the case of many subregions
971     M Dryja, OB Widlund - Courant Institute, New York University Technical report
972 
973     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
974     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
975 
976 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
977            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
978            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
979 
980 M*/
981 
982 EXTERN_C_BEGIN
983 #undef __FUNCT__
984 #define __FUNCT__ "PCCreate_ASM"
985 PetscErrorCode  PCCreate_ASM(PC pc)
986 {
987   PetscErrorCode ierr;
988   PC_ASM         *osm;
989 
990   PetscFunctionBegin;
991   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
992   osm->n                 = PETSC_DECIDE;
993   osm->n_local           = 0;
994   osm->n_local_true      = 0;
995   osm->overlap           = 1;
996   osm->ksp               = 0;
997   osm->restriction       = 0;
998   osm->localization      = 0;
999   osm->prolongation      = 0;
1000   osm->x                 = 0;
1001   osm->y                 = 0;
1002   osm->y_local           = 0;
1003   osm->is                = 0;
1004   osm->is_local          = 0;
1005   osm->mat               = 0;
1006   osm->pmat              = 0;
1007   osm->type              = PC_ASM_RESTRICT;
1008   osm->same_local_solves = PETSC_TRUE;
1009   osm->sort_indices      = PETSC_TRUE;
1010 
1011   pc->data                   = (void*)osm;
1012   pc->ops->apply             = PCApply_ASM;
1013   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1014   pc->ops->setup             = PCSetUp_ASM;
1015   pc->ops->reset             = PCReset_ASM;
1016   pc->ops->destroy           = PCDestroy_ASM;
1017   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1018   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1019   pc->ops->view              = PCView_ASM;
1020   pc->ops->applyrichardson   = 0;
1021 
1022   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1023                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1024   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1025                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1026   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1027                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1028   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1029                     PCASMSetType_ASM);CHKERRQ(ierr);
1030   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1031                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1032   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1033                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1034   PetscFunctionReturn(0);
1035 }
1036 EXTERN_C_END
1037 
1038 
1039 #undef __FUNCT__
1040 #define __FUNCT__ "PCASMCreateSubdomains"
1041 /*@C
1042    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1043    preconditioner for a any problem on a general grid.
1044 
1045    Collective
1046 
1047    Input Parameters:
1048 +  A - The global matrix operator
1049 -  n - the number of local blocks
1050 
1051    Output Parameters:
1052 .  outis - the array of index sets defining the subdomains
1053 
1054    Level: advanced
1055 
1056    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1057     from these if you use PCASMSetLocalSubdomains()
1058 
1059     In the Fortran version you must provide the array outis[] already allocated of length n.
1060 
1061 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1062 
1063 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1064 @*/
1065 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1066 {
1067   MatPartitioning           mpart;
1068   const char                *prefix;
1069   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1070   PetscMPIInt               size;
1071   PetscInt                  i,j,rstart,rend,bs;
1072   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1073   Mat                       Ad = PETSC_NULL, adj;
1074   IS                        ispart,isnumb,*is;
1075   PetscErrorCode            ierr;
1076 
1077   PetscFunctionBegin;
1078   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1079   PetscValidPointer(outis,3);
1080   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1081 
1082   /* Get prefix, row distribution, and block size */
1083   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1084   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1085   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1086   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1087 
1088   /* Get diagonal block from matrix if possible */
1089   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1090   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1091   if (f) {
1092     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1093   } else if (size == 1) {
1094     iscopy = PETSC_FALSE; Ad = A;
1095   } else {
1096     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1097   }
1098   if (Ad) {
1099     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1100     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1101   }
1102   if (Ad && n > 1) {
1103     PetscBool  match,done;
1104     /* Try to setup a good matrix partitioning if available */
1105     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1106     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1107     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1108     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1109     if (!match) {
1110       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1111     }
1112     if (!match) { /* assume a "good" partitioner is available */
1113       PetscInt na,*ia,*ja;
1114       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1115       if (done) {
1116 	/* Build adjacency matrix by hand. Unfortunately a call to
1117 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1118 	   remove the block-aij structure and we cannot expect
1119 	   MatPartitioning to split vertices as we need */
1120 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1121 	nnz = 0;
1122 	for (i=0; i<na; i++) { /* count number of nonzeros */
1123 	  len = ia[i+1] - ia[i];
1124 	  row = ja + ia[i];
1125 	  for (j=0; j<len; j++) {
1126 	    if (row[j] == i) { /* don't count diagonal */
1127 	      len--; break;
1128 	    }
1129 	  }
1130 	  nnz += len;
1131 	}
1132 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1133 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1134 	nnz    = 0;
1135 	iia[0] = 0;
1136 	for (i=0; i<na; i++) { /* fill adjacency */
1137 	  cnt = 0;
1138 	  len = ia[i+1] - ia[i];
1139 	  row = ja + ia[i];
1140 	  for (j=0; j<len; j++) {
1141 	    if (row[j] != i) { /* if not diagonal */
1142 	      jja[nnz+cnt++] = row[j];
1143 	    }
1144 	  }
1145 	  nnz += cnt;
1146 	  iia[i+1] = nnz;
1147 	}
1148 	/* Partitioning of the adjacency matrix */
1149 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1150 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1151 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1152 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1153 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1154 	ierr = MatDestroy(&adj);CHKERRQ(ierr);
1155 	foundpart = PETSC_TRUE;
1156       }
1157       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1158     }
1159     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1160   }
1161   if (iscopy) {ierr = MatDestroy(&Ad);CHKERRQ(ierr);}
1162 
1163   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1164   *outis = is;
1165 
1166   if (!foundpart) {
1167 
1168     /* Partitioning by contiguous chunks of rows */
1169 
1170     PetscInt mbs   = (rend-rstart)/bs;
1171     PetscInt start = rstart;
1172     for (i=0; i<n; i++) {
1173       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1174       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1175       start += count;
1176     }
1177 
1178   } else {
1179 
1180     /* Partitioning by adjacency of diagonal block  */
1181 
1182     const PetscInt *numbering;
1183     PetscInt       *count,nidx,*indices,*newidx,start=0;
1184     /* Get node count in each partition */
1185     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1186     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1187     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1188       for (i=0; i<n; i++) count[i] *= bs;
1189     }
1190     /* Build indices from node numbering */
1191     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1192     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1193     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1194     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1195     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1196     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1197     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1198       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1199       for (i=0; i<nidx; i++)
1200 	for (j=0; j<bs; j++)
1201 	  newidx[i*bs+j] = indices[i]*bs + j;
1202       ierr = PetscFree(indices);CHKERRQ(ierr);
1203       nidx   *= bs;
1204       indices = newidx;
1205     }
1206     /* Shift to get global indices */
1207     for (i=0; i<nidx; i++) indices[i] += rstart;
1208 
1209     /* Build the index sets for each block */
1210     for (i=0; i<n; i++) {
1211       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1212       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1213       start += count[i];
1214     }
1215 
1216     ierr = PetscFree(count);
1217     ierr = PetscFree(indices);
1218     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1219     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1220 
1221   }
1222 
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 #undef __FUNCT__
1227 #define __FUNCT__ "PCASMDestroySubdomains"
1228 /*@C
1229    PCASMDestroySubdomains - Destroys the index sets created with
1230    PCASMCreateSubdomains(). Should be called after setting subdomains
1231    with PCASMSetLocalSubdomains().
1232 
1233    Collective
1234 
1235    Input Parameters:
1236 +  n - the number of index sets
1237 .  is - the array of index sets
1238 -  is_local - the array of local index sets, can be PETSC_NULL
1239 
1240    Level: advanced
1241 
1242 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1243 
1244 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1245 @*/
1246 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1247 {
1248   PetscInt       i;
1249   PetscErrorCode ierr;
1250   PetscFunctionBegin;
1251   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1252   PetscValidPointer(is,2);
1253   for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1254   ierr = PetscFree(is);CHKERRQ(ierr);
1255   if (is_local) {
1256     PetscValidPointer(is_local,3);
1257     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1258     ierr = PetscFree(is_local);CHKERRQ(ierr);
1259   }
1260   PetscFunctionReturn(0);
1261 }
1262 
1263 #undef __FUNCT__
1264 #define __FUNCT__ "PCASMCreateSubdomains2D"
1265 /*@
1266    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1267    preconditioner for a two-dimensional problem on a regular grid.
1268 
1269    Not Collective
1270 
1271    Input Parameters:
1272 +  m, n - the number of mesh points in the x and y directions
1273 .  M, N - the number of subdomains in the x and y directions
1274 .  dof - degrees of freedom per node
1275 -  overlap - overlap in mesh lines
1276 
1277    Output Parameters:
1278 +  Nsub - the number of subdomains created
1279 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1280 -  is_local - array of index sets defining non-overlapping subdomains
1281 
1282    Note:
1283    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1284    preconditioners.  More general related routines are
1285    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1286 
1287    Level: advanced
1288 
1289 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1290 
1291 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1292           PCASMSetOverlap()
1293 @*/
1294 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1295 {
1296   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1297   PetscErrorCode ierr;
1298   PetscInt       nidx,*idx,loc,ii,jj,count;
1299 
1300   PetscFunctionBegin;
1301   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1302 
1303   *Nsub = N*M;
1304   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1305   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1306   ystart = 0;
1307   loc_outer = 0;
1308   for (i=0; i<N; i++) {
1309     height = n/N + ((n % N) > i); /* height of subdomain */
1310     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1311     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1312     yright = ystart + height + overlap; if (yright > n) yright = n;
1313     xstart = 0;
1314     for (j=0; j<M; j++) {
1315       width = m/M + ((m % M) > j); /* width of subdomain */
1316       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1317       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1318       xright = xstart + width + overlap; if (xright > m) xright = m;
1319       nidx   = (xright - xleft)*(yright - yleft);
1320       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1321       loc    = 0;
1322       for (ii=yleft; ii<yright; ii++) {
1323         count = m*ii + xleft;
1324         for (jj=xleft; jj<xright; jj++) {
1325           idx[loc++] = count++;
1326         }
1327       }
1328       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1329       if (overlap == 0) {
1330         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1331         (*is_local)[loc_outer] = (*is)[loc_outer];
1332       } else {
1333         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1334           for (jj=xstart; jj<xstart+width; jj++) {
1335             idx[loc++] = m*ii + jj;
1336           }
1337         }
1338         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1339       }
1340       ierr = PetscFree(idx);CHKERRQ(ierr);
1341       xstart += width;
1342       loc_outer++;
1343     }
1344     ystart += height;
1345   }
1346   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1347   PetscFunctionReturn(0);
1348 }
1349 
1350 #undef __FUNCT__
1351 #define __FUNCT__ "PCASMGetLocalSubdomains"
1352 /*@C
1353     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1354     only) for the additive Schwarz preconditioner.
1355 
1356     Not Collective
1357 
1358     Input Parameter:
1359 .   pc - the preconditioner context
1360 
1361     Output Parameters:
1362 +   n - the number of subdomains for this processor (default value = 1)
1363 .   is - the index sets that define the subdomains for this processor
1364 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1365 
1366 
1367     Notes:
1368     The IS numbering is in the parallel, global numbering of the vector.
1369 
1370     Level: advanced
1371 
1372 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1373 
1374 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1375           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1376 @*/
1377 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1378 {
1379   PC_ASM         *osm;
1380   PetscErrorCode ierr;
1381   PetscBool      match;
1382 
1383   PetscFunctionBegin;
1384   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1385   PetscValidIntPointer(n,2);
1386   if (is) PetscValidPointer(is,3);
1387   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1388   if (!match) {
1389     if (n)  *n  = 0;
1390     if (is) *is = PETSC_NULL;
1391   } else {
1392     osm = (PC_ASM*)pc->data;
1393     if (n)  *n  = osm->n_local_true;
1394     if (is) *is = osm->is;
1395     if (is_local) *is_local = osm->is_local;
1396   }
1397   PetscFunctionReturn(0);
1398 }
1399 
1400 #undef __FUNCT__
1401 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1402 /*@C
1403     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1404     only) for the additive Schwarz preconditioner.
1405 
1406     Not Collective
1407 
1408     Input Parameter:
1409 .   pc - the preconditioner context
1410 
1411     Output Parameters:
1412 +   n - the number of matrices for this processor (default value = 1)
1413 -   mat - the matrices
1414 
1415 
1416     Level: advanced
1417 
1418 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1419 
1420 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1421           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1422 @*/
1423 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1424 {
1425   PC_ASM         *osm;
1426   PetscErrorCode ierr;
1427   PetscBool      match;
1428 
1429   PetscFunctionBegin;
1430   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1431   PetscValidIntPointer(n,2);
1432   if (mat) PetscValidPointer(mat,3);
1433   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1434   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1435   if (!match) {
1436     if (n)   *n   = 0;
1437     if (mat) *mat = PETSC_NULL;
1438   } else {
1439     osm = (PC_ASM*)pc->data;
1440     if (n)   *n   = osm->n_local_true;
1441     if (mat) *mat = osm->pmat;
1442   }
1443   PetscFunctionReturn(0);
1444 }
1445