xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision ad4df1005a67cf9575a5c4294f11fd96229b2bb4)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscTruth sort_indices;        /* flag to sort subdomain indices */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *osm = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i,bsz;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
50     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
51     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof blocks,"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
72       for (i=0; i<osm->n_local; i++) {
73         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
74         if (i < osm->n_local_true) {
75           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
76           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
77           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
78           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79         }
80         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
81       }
82       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
83       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
84     }
85   } else if (isstring) {
86     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
87     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
88     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
89     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
90   } else {
91     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 #undef __FUNCT__
97 #define __FUNCT__ "PCASMPrintSubdomains"
98 static PetscErrorCode PCASMPrintSubdomains(PC pc)
99 {
100   PC_ASM         *osm  = (PC_ASM*)pc->data;
101   const char     *prefix;
102   char           fname[PETSC_MAX_PATH_LEN+1];
103   PetscViewer    viewer;
104   PetscInt       i,j,nidx;
105   const PetscInt *idx;
106   PetscErrorCode ierr;
107 
108   PetscFunctionBegin;
109   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
110   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
111   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
112   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
113   for (i=0;i<osm->n_local_true;i++) {
114     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
115     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
116     for (j=0; j<nidx; j++) {
117       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
118     }
119     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
120     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
121     if (osm->is_local) {
122       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
123       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
124       for (j=0; j<nidx; j++) {
125         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
126       }
127       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
128       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
129     }
130   }
131   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
132   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "PCSetUp_ASM"
138 static PetscErrorCode PCSetUp_ASM(PC pc)
139 {
140   PC_ASM         *osm  = (PC_ASM*)pc->data;
141   PetscErrorCode ierr;
142   PetscTruth     symset,flg;
143   PetscInt       i,m,m_local,firstRow,lastRow;
144   PetscMPIInt    size;
145   MatReuse       scall = MAT_REUSE_MATRIX;
146   IS             isl;
147   KSP            ksp;
148   PC             subpc;
149   const char     *prefix,*pprefix;
150   Vec            vec;
151 
152   PetscFunctionBegin;
153   if (!pc->setupcalled) {
154 
155     if (!osm->type_set) {
156       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
157       if (symset && flg) { osm->type = PC_ASM_BASIC; }
158     }
159 
160     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
161       /* no subdomains given, use one per processor */
162       osm->n_local = osm->n_local_true = 1;
163       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
164       osm->n = size;
165     } else if (osm->n == PETSC_DECIDE) {
166       /* determine global number of subdomains */
167       PetscInt inwork[2],outwork[2];
168       inwork[0] = inwork[1] = osm->n_local_true;
169       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
170       osm->n_local = outwork[0];
171       osm->n       = outwork[1];
172     }
173 
174     if (!osm->is){ /* create the index sets */
175       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
176     }
177     if (osm->n_local_true > 1 && !osm->is_local) {
178       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
179       for (i=0; i<osm->n_local_true; i++) {
180         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
181           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
182           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
183         } else {
184           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
185           osm->is_local[i] = osm->is[i];
186         }
187       }
188     }
189     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
190     flg  = PETSC_FALSE;
191     ierr = PetscOptionsGetTruth(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
192     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
193 
194     if (osm->overlap > 0) {
195       /* Extend the "overlapping" regions by a number of steps */
196       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
197     }
198     if (osm->sort_indices) {
199       for (i=0; i<osm->n_local_true; i++) {
200         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
201         if (osm->is_local) {
202           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
203         }
204       }
205     }
206 
207     /* Create the local work vectors and scatter contexts */
208     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
209     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->restriction);CHKERRQ(ierr);
210     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->localization);CHKERRQ(ierr);}
211     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->prolongation);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->x);CHKERRQ(ierr);
213     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y);CHKERRQ(ierr);
214     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y_local);CHKERRQ(ierr);
215     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
216     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
217       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
218       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
219       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
220       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
221       ierr = ISDestroy(isl);CHKERRQ(ierr);
222       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
223       if (osm->is_local) {
224         ISLocalToGlobalMapping ltog;
225         IS                     isll;
226         const PetscInt         *idx_local;
227         PetscInt               *idx,nout;
228 
229         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
230         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
231         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
232         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
233         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
234         ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
235         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
236         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
237         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,&isll);CHKERRQ(ierr);
238         ierr = PetscFree(idx);CHKERRQ(ierr);
239         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
240         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
241         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
242         ierr = ISDestroy(isll);CHKERRQ(ierr);
243 
244         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
245         ierr = ISDestroy(isl);CHKERRQ(ierr);
246       } else {
247         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
248         osm->y_local[i] = osm->y[i];
249         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
250         osm->prolongation[i] = osm->restriction[i];
251         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
252       }
253     }
254     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
255     for (i=osm->n_local_true; i<osm->n_local; i++) {
256       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
257       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
258       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
259       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
260       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
261       if (osm->is_local) {
262         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
263         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
264       } else {
265         osm->prolongation[i] = osm->restriction[i];
266         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
267       }
268       ierr = ISDestroy(isl);CHKERRQ(ierr);
269     }
270     ierr = VecDestroy(vec);CHKERRQ(ierr);
271 
272     /* Create the local solvers */
273     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
274     for (i=0; i<osm->n_local_true; i++) {
275       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
276       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
277       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
278       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
279       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
280       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
281       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
282       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
283       osm->ksp[i] = ksp;
284     }
285     scall = MAT_INITIAL_MATRIX;
286 
287   } else {
288     /*
289        Destroy the blocks from the previous iteration
290     */
291     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
292       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
293       scall = MAT_INITIAL_MATRIX;
294     }
295   }
296 
297   /*
298      Extract out the submatrices
299   */
300   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
301   if (scall == MAT_INITIAL_MATRIX) {
302     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
303     for (i=0; i<osm->n_local_true; i++) {
304       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
305       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
306     }
307   }
308 
309   /* Return control to the user so that the submatrices can be modified (e.g., to apply
310      different boundary conditions for the submatrices than for the global problem) */
311   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
312 
313   /*
314      Loop over subdomains putting them into local ksp
315   */
316   for (i=0; i<osm->n_local_true; i++) {
317     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
318     if (!pc->setupcalled) {
319       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
320     }
321   }
322 
323   PetscFunctionReturn(0);
324 }
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
328 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
329 {
330   PC_ASM         *osm = (PC_ASM*)pc->data;
331   PetscErrorCode ierr;
332   PetscInt       i;
333 
334   PetscFunctionBegin;
335   for (i=0; i<osm->n_local_true; i++) {
336     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "PCApply_ASM"
343 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
344 {
345   PC_ASM         *osm = (PC_ASM*)pc->data;
346   PetscErrorCode ierr;
347   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
348   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
349 
350   PetscFunctionBegin;
351   /*
352      Support for limiting the restriction or interpolation to only local
353      subdomain values (leaving the other values 0).
354   */
355   if (!(osm->type & PC_ASM_RESTRICT)) {
356     forward = SCATTER_FORWARD_LOCAL;
357     /* have to zero the work RHS since scatter may leave some slots empty */
358     for (i=0; i<n_local_true; i++) {
359       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
360     }
361   }
362   if (!(osm->type & PC_ASM_INTERPOLATE)) {
363     reverse = SCATTER_REVERSE_LOCAL;
364   }
365 
366   for (i=0; i<n_local; i++) {
367     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
368   }
369   ierr = VecZeroEntries(y);CHKERRQ(ierr);
370   /* do the local solves */
371   for (i=0; i<n_local_true; i++) {
372     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
373     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
374     if (osm->localization) {
375       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
376       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
377     }
378     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
379   }
380   /* handle the rest of the scatters that do not have local solves */
381   for (i=n_local_true; i<n_local; i++) {
382     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
383     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
384   }
385   for (i=0; i<n_local; i++) {
386     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
387   }
388   PetscFunctionReturn(0);
389 }
390 
391 #undef __FUNCT__
392 #define __FUNCT__ "PCApplyTranspose_ASM"
393 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
394 {
395   PC_ASM         *osm = (PC_ASM*)pc->data;
396   PetscErrorCode ierr;
397   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
398   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
399 
400   PetscFunctionBegin;
401   /*
402      Support for limiting the restriction or interpolation to only local
403      subdomain values (leaving the other values 0).
404 
405      Note: these are reversed from the PCApply_ASM() because we are applying the
406      transpose of the three terms
407   */
408   if (!(osm->type & PC_ASM_INTERPOLATE)) {
409     forward = SCATTER_FORWARD_LOCAL;
410     /* have to zero the work RHS since scatter may leave some slots empty */
411     for (i=0; i<n_local_true; i++) {
412       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
413     }
414   }
415   if (!(osm->type & PC_ASM_RESTRICT)) {
416     reverse = SCATTER_REVERSE_LOCAL;
417   }
418 
419   for (i=0; i<n_local; i++) {
420     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
421   }
422   ierr = VecZeroEntries(y);CHKERRQ(ierr);
423   /* do the local solves */
424   for (i=0; i<n_local_true; i++) {
425     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
426     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
427     if (osm->localization) {
428       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
429       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
430     }
431     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
432   }
433   /* handle the rest of the scatters that do not have local solves */
434   for (i=n_local_true; i<n_local; i++) {
435     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
436     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
437   }
438   for (i=0; i<n_local; i++) {
439     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
440   }
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "PCDestroy_ASM"
446 static PetscErrorCode PCDestroy_ASM(PC pc)
447 {
448   PC_ASM         *osm = (PC_ASM*)pc->data;
449   PetscErrorCode ierr;
450   PetscInt       i;
451 
452   PetscFunctionBegin;
453   if (osm->ksp) {
454     for (i=0; i<osm->n_local_true; i++) {
455       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
456     }
457     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
458   }
459   if (osm->pmat) {
460     if (osm->n_local_true > 0) {
461       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
462     }
463   }
464   if (osm->restriction) {
465     for (i=0; i<osm->n_local; i++) {
466       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
467       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
468       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
469       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
470       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
471       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
472     }
473     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
474     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
475     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
476     ierr = PetscFree(osm->x);CHKERRQ(ierr);
477     ierr = PetscFree(osm->y);CHKERRQ(ierr);
478     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
479   }
480   if (osm->is) {
481     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
482   }
483   ierr = PetscFree(osm);CHKERRQ(ierr);
484   PetscFunctionReturn(0);
485 }
486 
487 #undef __FUNCT__
488 #define __FUNCT__ "PCSetFromOptions_ASM"
489 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
490 {
491   PC_ASM         *osm = (PC_ASM*)pc->data;
492   PetscErrorCode ierr;
493   PetscInt       blocks,ovl;
494   PetscTruth     symset,flg;
495   PCASMType      asmtype;
496 
497   PetscFunctionBegin;
498   /* set the type to symmetric if matrix is symmetric */
499   if (!osm->type_set && pc->pmat) {
500     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
501     if (symset && flg) { osm->type = PC_ASM_BASIC; }
502   }
503   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
504     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
505     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
506     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
507     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
508     flg  = PETSC_FALSE;
509     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
510     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
511   ierr = PetscOptionsTail();CHKERRQ(ierr);
512   PetscFunctionReturn(0);
513 }
514 
515 /*------------------------------------------------------------------------------------*/
516 
517 EXTERN_C_BEGIN
518 #undef __FUNCT__
519 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
520 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
521 {
522   PC_ASM         *osm = (PC_ASM*)pc->data;
523   PetscErrorCode ierr;
524   PetscInt       i;
525 
526   PetscFunctionBegin;
527   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
528   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
529 
530   if (!pc->setupcalled) {
531     if (is) {
532       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
533     }
534     if (is_local) {
535       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
536     }
537     if (osm->is) {
538       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
539     }
540     osm->n_local_true = n;
541     osm->is           = 0;
542     osm->is_local     = 0;
543     if (is) {
544       ierr = PetscMalloc(n*sizeof(IS *),&osm->is);CHKERRQ(ierr);
545       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
546       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
547       osm->overlap = -1;
548     }
549     if (is_local) {
550       ierr = PetscMalloc(n*sizeof(IS *),&osm->is_local);CHKERRQ(ierr);
551       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
552     }
553   }
554   PetscFunctionReturn(0);
555 }
556 EXTERN_C_END
557 
558 EXTERN_C_BEGIN
559 #undef __FUNCT__
560 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
561 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
562 {
563   PC_ASM         *osm = (PC_ASM*)pc->data;
564   PetscErrorCode ierr;
565   PetscMPIInt    rank,size;
566   PetscInt       n;
567 
568   PetscFunctionBegin;
569   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
570   if (is || is_local) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
571 
572   /*
573      Split the subdomains equally among all processors
574   */
575   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
576   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
577   n = N/size + ((N % size) > rank);
578   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
579   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
580   if (!pc->setupcalled) {
581     if (osm->is) {
582       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
583     }
584     osm->n_local_true = n;
585     osm->is           = 0;
586     osm->is_local     = 0;
587   }
588   PetscFunctionReturn(0);
589 }
590 EXTERN_C_END
591 
592 EXTERN_C_BEGIN
593 #undef __FUNCT__
594 #define __FUNCT__ "PCASMSetOverlap_ASM"
595 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
596 {
597   PC_ASM *osm = (PC_ASM*)pc->data;
598 
599   PetscFunctionBegin;
600   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
601   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
602   if (!pc->setupcalled) {
603     osm->overlap = ovl;
604   }
605   PetscFunctionReturn(0);
606 }
607 EXTERN_C_END
608 
609 EXTERN_C_BEGIN
610 #undef __FUNCT__
611 #define __FUNCT__ "PCASMSetType_ASM"
612 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
613 {
614   PC_ASM *osm = (PC_ASM*)pc->data;
615 
616   PetscFunctionBegin;
617   osm->type     = type;
618   osm->type_set = PETSC_TRUE;
619   PetscFunctionReturn(0);
620 }
621 EXTERN_C_END
622 
623 EXTERN_C_BEGIN
624 #undef __FUNCT__
625 #define __FUNCT__ "PCASMSetSortIndices_ASM"
626 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices_ASM(PC pc,PetscTruth doSort)
627 {
628   PC_ASM *osm = (PC_ASM*)pc->data;
629 
630   PetscFunctionBegin;
631   osm->sort_indices = doSort;
632   PetscFunctionReturn(0);
633 }
634 EXTERN_C_END
635 
636 EXTERN_C_BEGIN
637 #undef __FUNCT__
638 #define __FUNCT__ "PCASMGetSubKSP_ASM"
639 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
640 {
641   PC_ASM         *osm = (PC_ASM*)pc->data;
642   PetscErrorCode ierr;
643 
644   PetscFunctionBegin;
645   if (osm->n_local_true < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
646 
647   if (n_local) {
648     *n_local = osm->n_local_true;
649   }
650   if (first_local) {
651     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
652     *first_local -= osm->n_local_true;
653   }
654   if (ksp) {
655     /* Assume that local solves are now different; not necessarily
656        true though!  This flag is used only for PCView_ASM() */
657     *ksp                   = osm->ksp;
658     osm->same_local_solves = PETSC_FALSE;
659   }
660   PetscFunctionReturn(0);
661 }
662 EXTERN_C_END
663 
664 
665 #undef __FUNCT__
666 #define __FUNCT__ "PCASMSetLocalSubdomains"
667 /*@C
668     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
669     only) for the additive Schwarz preconditioner.
670 
671     Collective on PC
672 
673     Input Parameters:
674 +   pc - the preconditioner context
675 .   n - the number of subdomains for this processor (default value = 1)
676 .   is - the index sets that define the subdomains for this processor
677          (or PETSC_NULL for PETSc to determine subdomains)
678 -   is_local - the index sets that define the local part of the subdomains for this processor
679          (or PETSC_NULL to use the default of 1 subdomain per process)
680 
681     Notes:
682     The IS numbering is in the parallel, global numbering of the vector.
683 
684     By default the ASM preconditioner uses 1 block per processor.
685 
686     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
687 
688     Level: advanced
689 
690 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
691 
692 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
693           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
694 @*/
695 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
696 {
697   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
698 
699   PetscFunctionBegin;
700   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
701   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
702   if (f) {
703     ierr = (*f)(pc,n,is,is_local);CHKERRQ(ierr);
704   }
705   PetscFunctionReturn(0);
706 }
707 
708 #undef __FUNCT__
709 #define __FUNCT__ "PCASMSetTotalSubdomains"
710 /*@C
711     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
712     additive Schwarz preconditioner.  Either all or no processors in the
713     PC communicator must call this routine, with the same index sets.
714 
715     Collective on PC
716 
717     Input Parameters:
718 +   pc - the preconditioner context
719 .   n - the number of subdomains for all processors
720 .   is - the index sets that define the subdomains for all processor
721          (or PETSC_NULL for PETSc to determine subdomains)
722 -   is_local - the index sets that define the local part of the subdomains for this processor
723          (or PETSC_NULL to use the default of 1 subdomain per process)
724 
725     Options Database Key:
726     To set the total number of subdomain blocks rather than specify the
727     index sets, use the option
728 .    -pc_asm_blocks <blks> - Sets total blocks
729 
730     Notes:
731     Currently you cannot use this to set the actual subdomains with the argument is.
732 
733     By default the ASM preconditioner uses 1 block per processor.
734 
735     These index sets cannot be destroyed until after completion of the
736     linear solves for which the ASM preconditioner is being used.
737 
738     Use PCASMSetLocalSubdomains() to set local subdomains.
739 
740     Level: advanced
741 
742 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
743 
744 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
745           PCASMCreateSubdomains2D()
746 @*/
747 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
748 {
749   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
750 
751   PetscFunctionBegin;
752   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
753   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
754   if (f) {
755     ierr = (*f)(pc,N,is,is_local);CHKERRQ(ierr);
756   }
757   PetscFunctionReturn(0);
758 }
759 
760 #undef __FUNCT__
761 #define __FUNCT__ "PCASMSetOverlap"
762 /*@
763     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
764     additive Schwarz preconditioner.  Either all or no processors in the
765     PC communicator must call this routine.
766 
767     Logically Collective on PC
768 
769     Input Parameters:
770 +   pc  - the preconditioner context
771 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
772 
773     Options Database Key:
774 .   -pc_asm_overlap <ovl> - Sets overlap
775 
776     Notes:
777     By default the ASM preconditioner uses 1 block per processor.  To use
778     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
779     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
780 
781     The overlap defaults to 1, so if one desires that no additional
782     overlap be computed beyond what may have been set with a call to
783     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
784     must be set to be 0.  In particular, if one does not explicitly set
785     the subdomains an application code, then all overlap would be computed
786     internally by PETSc, and using an overlap of 0 would result in an ASM
787     variant that is equivalent to the block Jacobi preconditioner.
788 
789     Note that one can define initial index sets with any overlap via
790     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
791     PCASMSetOverlap() merely allows PETSc to extend that overlap further
792     if desired.
793 
794     Level: intermediate
795 
796 .keywords: PC, ASM, set, overlap
797 
798 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
799           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
800 @*/
801 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
802 {
803   PetscErrorCode ierr,(*f)(PC,PetscInt);
804 
805   PetscFunctionBegin;
806   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
807   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
808   if (f) {
809     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
810   }
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "PCASMSetType"
816 /*@
817     PCASMSetType - Sets the type of restriction and interpolation used
818     for local problems in the additive Schwarz method.
819 
820     Logically Collective on PC
821 
822     Input Parameters:
823 +   pc  - the preconditioner context
824 -   type - variant of ASM, one of
825 .vb
826       PC_ASM_BASIC       - full interpolation and restriction
827       PC_ASM_RESTRICT    - full restriction, local processor interpolation
828       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
829       PC_ASM_NONE        - local processor restriction and interpolation
830 .ve
831 
832     Options Database Key:
833 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
834 
835     Level: intermediate
836 
837 .keywords: PC, ASM, set, type
838 
839 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
840           PCASMCreateSubdomains2D()
841 @*/
842 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
843 {
844   PetscErrorCode ierr,(*f)(PC,PCASMType);
845 
846   PetscFunctionBegin;
847   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
848   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
849   if (f) {
850     ierr = (*f)(pc,type);CHKERRQ(ierr);
851   }
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "PCASMSetSortIndices"
857 /*@
858     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
859 
860     Logically Collective on PC
861 
862     Input Parameters:
863 +   pc  - the preconditioner context
864 -   doSort - sort the subdomain indices
865 
866     Level: intermediate
867 
868 .keywords: PC, ASM, set, type
869 
870 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
871           PCASMCreateSubdomains2D()
872 @*/
873 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices(PC pc,PetscTruth doSort)
874 {
875   PetscErrorCode ierr,(*f)(PC,PetscTruth);
876 
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
879   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetSortIndices_C",(void (**)(void))&f);CHKERRQ(ierr);
880   if (f) {
881     ierr = (*f)(pc,doSort);CHKERRQ(ierr);
882   }
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "PCASMGetSubKSP"
888 /*@C
889    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
890    this processor.
891 
892    Collective on PC iff first_local is requested
893 
894    Input Parameter:
895 .  pc - the preconditioner context
896 
897    Output Parameters:
898 +  n_local - the number of blocks on this processor or PETSC_NULL
899 .  first_local - the global number of the first block on this processor or PETSC_NULL,
900                  all processors must request or all must pass PETSC_NULL
901 -  ksp - the array of KSP contexts
902 
903    Note:
904    After PCASMGetSubKSP() the array of KSPes is not to be freed
905 
906    Currently for some matrix implementations only 1 block per processor
907    is supported.
908 
909    You must call KSPSetUp() before calling PCASMGetSubKSP().
910 
911    Level: advanced
912 
913 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
914 
915 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
916           PCASMCreateSubdomains2D(),
917 @*/
918 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
919 {
920   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
921 
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
924   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
925   if (f) {
926     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
927   } else SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
928   PetscFunctionReturn(0);
929 }
930 
931 /* -------------------------------------------------------------------------------------*/
932 /*MC
933    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
934            its own KSP object.
935 
936    Options Database Keys:
937 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
938 .  -pc_asm_blocks <blks> - Sets total blocks
939 .  -pc_asm_overlap <ovl> - Sets overlap
940 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
941 
942      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
943       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
944       -pc_asm_type basic to use the standard ASM.
945 
946    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
947      than one processor. Defaults to one block per processor.
948 
949      To set options on the solvers for each block append -sub_ to all the KSP, and PC
950         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
951 
952      To set the options on the solvers separate for each block call PCASMGetSubKSP()
953          and set the options directly on the resulting KSP object (you can access its PC
954          with KSPGetPC())
955 
956 
957    Level: beginner
958 
959    Concepts: additive Schwarz method
960 
961     References:
962     An additive variant of the Schwarz alternating method for the case of many subregions
963     M Dryja, OB Widlund - Courant Institute, New York University Technical report
964 
965     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
966     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
967 
968 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
969            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
970            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
971 
972 M*/
973 
974 EXTERN_C_BEGIN
975 #undef __FUNCT__
976 #define __FUNCT__ "PCCreate_ASM"
977 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
978 {
979   PetscErrorCode ierr;
980   PC_ASM         *osm;
981 
982   PetscFunctionBegin;
983   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
984   osm->n                 = PETSC_DECIDE;
985   osm->n_local           = 0;
986   osm->n_local_true      = 0;
987   osm->overlap           = 1;
988   osm->ksp               = 0;
989   osm->restriction       = 0;
990   osm->localization      = 0;
991   osm->prolongation      = 0;
992   osm->x                 = 0;
993   osm->y                 = 0;
994   osm->y_local           = 0;
995   osm->is                = 0;
996   osm->is_local          = 0;
997   osm->mat               = 0;
998   osm->pmat              = 0;
999   osm->type              = PC_ASM_RESTRICT;
1000   osm->same_local_solves = PETSC_TRUE;
1001   osm->sort_indices      = PETSC_TRUE;
1002 
1003   pc->data                   = (void*)osm;
1004   pc->ops->apply             = PCApply_ASM;
1005   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1006   pc->ops->setup             = PCSetUp_ASM;
1007   pc->ops->destroy           = PCDestroy_ASM;
1008   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1009   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1010   pc->ops->view              = PCView_ASM;
1011   pc->ops->applyrichardson   = 0;
1012 
1013   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1014                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1015   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1016                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1017   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1018                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1019   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1020                     PCASMSetType_ASM);CHKERRQ(ierr);
1021   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1022                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1023   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1024                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1025   PetscFunctionReturn(0);
1026 }
1027 EXTERN_C_END
1028 
1029 
1030 #undef __FUNCT__
1031 #define __FUNCT__ "PCASMCreateSubdomains"
1032 /*@C
1033    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1034    preconditioner for a any problem on a general grid.
1035 
1036    Collective
1037 
1038    Input Parameters:
1039 +  A - The global matrix operator
1040 -  n - the number of local blocks
1041 
1042    Output Parameters:
1043 .  outis - the array of index sets defining the subdomains
1044 
1045    Level: advanced
1046 
1047    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1048     from these if you use PCASMSetLocalSubdomains()
1049 
1050     In the Fortran version you must provide the array outis[] already allocated of length n.
1051 
1052 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1053 
1054 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1055 @*/
1056 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1057 {
1058   MatPartitioning           mpart;
1059   const char                *prefix;
1060   PetscErrorCode            (*f)(Mat,PetscTruth*,MatReuse,Mat*);
1061   PetscMPIInt               size;
1062   PetscInt                  i,j,rstart,rend,bs;
1063   PetscTruth                iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1064   Mat                       Ad = PETSC_NULL, adj;
1065   IS                        ispart,isnumb,*is;
1066   PetscErrorCode            ierr;
1067 
1068   PetscFunctionBegin;
1069   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1070   PetscValidPointer(outis,3);
1071   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1072 
1073   /* Get prefix, row distribution, and block size */
1074   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1075   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1076   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1077   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1078 
1079   /* Get diagonal block from matrix if possible */
1080   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1081   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1082   if (f) {
1083     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1084   } else if (size == 1) {
1085     iscopy = PETSC_FALSE; Ad = A;
1086   } else {
1087     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1088   }
1089   if (Ad) {
1090     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1091     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1092   }
1093   if (Ad && n > 1) {
1094     PetscTruth match,done;
1095     /* Try to setup a good matrix partitioning if available */
1096     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1097     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1098     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1099     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1100     if (!match) {
1101       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1102     }
1103     if (!match) { /* assume a "good" partitioner is available */
1104       PetscInt na,*ia,*ja;
1105       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1106       if (done) {
1107 	/* Build adjacency matrix by hand. Unfortunately a call to
1108 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1109 	   remove the block-aij structure and we cannot expect
1110 	   MatPartitioning to split vertices as we need */
1111 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1112 	nnz = 0;
1113 	for (i=0; i<na; i++) { /* count number of nonzeros */
1114 	  len = ia[i+1] - ia[i];
1115 	  row = ja + ia[i];
1116 	  for (j=0; j<len; j++) {
1117 	    if (row[j] == i) { /* don't count diagonal */
1118 	      len--; break;
1119 	    }
1120 	  }
1121 	  nnz += len;
1122 	}
1123 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1124 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1125 	nnz    = 0;
1126 	iia[0] = 0;
1127 	for (i=0; i<na; i++) { /* fill adjacency */
1128 	  cnt = 0;
1129 	  len = ia[i+1] - ia[i];
1130 	  row = ja + ia[i];
1131 	  for (j=0; j<len; j++) {
1132 	    if (row[j] != i) { /* if not diagonal */
1133 	      jja[nnz+cnt++] = row[j];
1134 	    }
1135 	  }
1136 	  nnz += cnt;
1137 	  iia[i+1] = nnz;
1138 	}
1139 	/* Partitioning of the adjacency matrix */
1140 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1141 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1142 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1143 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1144 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1145 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1146 	foundpart = PETSC_TRUE;
1147       }
1148       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1149     }
1150     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1151   }
1152   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1153 
1154   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1155   *outis = is;
1156 
1157   if (!foundpart) {
1158 
1159     /* Partitioning by contiguous chunks of rows */
1160 
1161     PetscInt mbs   = (rend-rstart)/bs;
1162     PetscInt start = rstart;
1163     for (i=0; i<n; i++) {
1164       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1165       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1166       start += count;
1167     }
1168 
1169   } else {
1170 
1171     /* Partitioning by adjacency of diagonal block  */
1172 
1173     const PetscInt *numbering;
1174     PetscInt       *count,nidx,*indices,*newidx,start=0;
1175     /* Get node count in each partition */
1176     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1177     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1178     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1179       for (i=0; i<n; i++) count[i] *= bs;
1180     }
1181     /* Build indices from node numbering */
1182     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1183     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1184     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1185     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1186     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1187     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1188     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1189       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1190       for (i=0; i<nidx; i++)
1191 	for (j=0; j<bs; j++)
1192 	  newidx[i*bs+j] = indices[i]*bs + j;
1193       ierr = PetscFree(indices);CHKERRQ(ierr);
1194       nidx   *= bs;
1195       indices = newidx;
1196     }
1197     /* Shift to get global indices */
1198     for (i=0; i<nidx; i++) indices[i] += rstart;
1199 
1200     /* Build the index sets for each block */
1201     for (i=0; i<n; i++) {
1202       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],&is[i]);CHKERRQ(ierr);
1203       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1204       start += count[i];
1205     }
1206 
1207     ierr = PetscFree(count);
1208     ierr = PetscFree(indices);
1209     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1210     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1211 
1212   }
1213 
1214   PetscFunctionReturn(0);
1215 }
1216 
1217 #undef __FUNCT__
1218 #define __FUNCT__ "PCASMDestroySubdomains"
1219 /*@C
1220    PCASMDestroySubdomains - Destroys the index sets created with
1221    PCASMCreateSubdomains(). Should be called after setting subdomains
1222    with PCASMSetLocalSubdomains().
1223 
1224    Collective
1225 
1226    Input Parameters:
1227 +  n - the number of index sets
1228 .  is - the array of index sets
1229 -  is_local - the array of local index sets, can be PETSC_NULL
1230 
1231    Level: advanced
1232 
1233 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1234 
1235 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1236 @*/
1237 PetscErrorCode PETSCKSP_DLLEXPORT PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1238 {
1239   PetscInt       i;
1240   PetscErrorCode ierr;
1241   PetscFunctionBegin;
1242   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1243   PetscValidPointer(is,2);
1244   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1245   ierr = PetscFree(is);CHKERRQ(ierr);
1246   if (is_local) {
1247     PetscValidPointer(is_local,3);
1248     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1249     ierr = PetscFree(is_local);CHKERRQ(ierr);
1250   }
1251   PetscFunctionReturn(0);
1252 }
1253 
1254 #undef __FUNCT__
1255 #define __FUNCT__ "PCASMCreateSubdomains2D"
1256 /*@
1257    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1258    preconditioner for a two-dimensional problem on a regular grid.
1259 
1260    Not Collective
1261 
1262    Input Parameters:
1263 +  m, n - the number of mesh points in the x and y directions
1264 .  M, N - the number of subdomains in the x and y directions
1265 .  dof - degrees of freedom per node
1266 -  overlap - overlap in mesh lines
1267 
1268    Output Parameters:
1269 +  Nsub - the number of subdomains created
1270 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1271 -  is_local - array of index sets defining non-overlapping subdomains
1272 
1273    Note:
1274    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1275    preconditioners.  More general related routines are
1276    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1277 
1278    Level: advanced
1279 
1280 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1281 
1282 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1283           PCASMSetOverlap()
1284 @*/
1285 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1286 {
1287   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1288   PetscErrorCode ierr;
1289   PetscInt       nidx,*idx,loc,ii,jj,count;
1290 
1291   PetscFunctionBegin;
1292   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1293 
1294   *Nsub = N*M;
1295   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1296   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1297   ystart = 0;
1298   loc_outer = 0;
1299   for (i=0; i<N; i++) {
1300     height = n/N + ((n % N) > i); /* height of subdomain */
1301     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1302     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1303     yright = ystart + height + overlap; if (yright > n) yright = n;
1304     xstart = 0;
1305     for (j=0; j<M; j++) {
1306       width = m/M + ((m % M) > j); /* width of subdomain */
1307       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1308       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1309       xright = xstart + width + overlap; if (xright > m) xright = m;
1310       nidx   = (xright - xleft)*(yright - yleft);
1311       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1312       loc    = 0;
1313       for (ii=yleft; ii<yright; ii++) {
1314         count = m*ii + xleft;
1315         for (jj=xleft; jj<xright; jj++) {
1316           idx[loc++] = count++;
1317         }
1318       }
1319       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outer);CHKERRQ(ierr);
1320       if (overlap == 0) {
1321         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1322         (*is_local)[loc_outer] = (*is)[loc_outer];
1323       } else {
1324         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1325           for (jj=xstart; jj<xstart+width; jj++) {
1326             idx[loc++] = m*ii + jj;
1327           }
1328         }
1329         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,*is_local+loc_outer);CHKERRQ(ierr);
1330       }
1331       ierr = PetscFree(idx);CHKERRQ(ierr);
1332       xstart += width;
1333       loc_outer++;
1334     }
1335     ystart += height;
1336   }
1337   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "PCASMGetLocalSubdomains"
1343 /*@C
1344     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1345     only) for the additive Schwarz preconditioner.
1346 
1347     Not Collective
1348 
1349     Input Parameter:
1350 .   pc - the preconditioner context
1351 
1352     Output Parameters:
1353 +   n - the number of subdomains for this processor (default value = 1)
1354 .   is - the index sets that define the subdomains for this processor
1355 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1356 
1357 
1358     Notes:
1359     The IS numbering is in the parallel, global numbering of the vector.
1360 
1361     Level: advanced
1362 
1363 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1364 
1365 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1366           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1367 @*/
1368 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1369 {
1370   PC_ASM         *osm;
1371   PetscErrorCode ierr;
1372   PetscTruth     match;
1373 
1374   PetscFunctionBegin;
1375   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1376   PetscValidIntPointer(n,2);
1377   if (is) PetscValidPointer(is,3);
1378   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1379   if (!match) {
1380     if (n)  *n  = 0;
1381     if (is) *is = PETSC_NULL;
1382   } else {
1383     osm = (PC_ASM*)pc->data;
1384     if (n)  *n  = osm->n_local_true;
1385     if (is) *is = osm->is;
1386     if (is_local) *is_local = osm->is_local;
1387   }
1388   PetscFunctionReturn(0);
1389 }
1390 
1391 #undef __FUNCT__
1392 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1393 /*@C
1394     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1395     only) for the additive Schwarz preconditioner.
1396 
1397     Not Collective
1398 
1399     Input Parameter:
1400 .   pc - the preconditioner context
1401 
1402     Output Parameters:
1403 +   n - the number of matrices for this processor (default value = 1)
1404 -   mat - the matrices
1405 
1406 
1407     Level: advanced
1408 
1409 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1410 
1411 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1412           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1413 @*/
1414 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1415 {
1416   PC_ASM         *osm;
1417   PetscErrorCode ierr;
1418   PetscTruth     match;
1419 
1420   PetscFunctionBegin;
1421   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1422   PetscValidIntPointer(n,2);
1423   if (mat) PetscValidPointer(mat,3);
1424   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1425   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1426   if (!match) {
1427     if (n)   *n   = 0;
1428     if (mat) *mat = PETSC_NULL;
1429   } else {
1430     osm = (PC_ASM*)pc->data;
1431     if (n)   *n   = osm->n_local_true;
1432     if (mat) *mat = osm->pmat;
1433   }
1434   PetscFunctionReturn(0);
1435 }
1436