xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 7d0a6c19129e7069c8a40e210b34ed62989173db)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter *restriction;        /* mapping from global to subregion */
20   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
21   VecScatter *prolongation;       /* mapping from subregion to global */
22   Vec        *x,*y,*y_local;      /* work vectors */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
25   Mat        *mat,*pmat;          /* mat is not currently used */
26   PCASMType  type;                /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_ASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_ASM"
34 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
35 {
36   PC_ASM         *osm = (PC_ASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof blocks,"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
54     if (osm->same_local_solves) {
55       if (osm->ksp) {
56         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
57         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
58         if (!rank) {
59           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
60           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
61           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
62         }
63         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
64       }
65     } else {
66       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->n_local; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n_local_true) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCASMPrintSubdomains"
97 static PetscErrorCode PCASMPrintSubdomains(PC pc)
98 {
99   PC_ASM         *osm  = (PC_ASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n_local_true;i++) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_ASM"
137 static PetscErrorCode PCSetUp_ASM(PC pc)
138 {
139   PC_ASM         *osm  = (PC_ASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i,m,m_local,firstRow,lastRow;
143   PetscMPIInt    size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   IS             isl;
146   KSP            ksp;
147   PC             subpc;
148   const char     *prefix,*pprefix;
149   Vec            vec;
150 
151   PetscFunctionBegin;
152   if (!pc->setupcalled) {
153 
154     if (!osm->type_set) {
155       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
156       if (symset && flg) { osm->type = PC_ASM_BASIC; }
157     }
158 
159     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
160       /* no subdomains given, use one per processor */
161       osm->n_local = osm->n_local_true = 1;
162       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
163       osm->n = size;
164     } else if (osm->n == PETSC_DECIDE) {
165       /* determine global number of subdomains */
166       PetscInt inwork[2],outwork[2];
167       inwork[0] = inwork[1] = osm->n_local_true;
168       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
169       osm->n_local = outwork[0];
170       osm->n       = outwork[1];
171     }
172 
173     if (!osm->is){ /* create the index sets */
174       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
175     }
176     if (osm->n_local_true > 1 && !osm->is_local) {
177       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
178       for (i=0; i<osm->n_local_true; i++) {
179         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
180           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
181           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
182         } else {
183           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
184           osm->is_local[i] = osm->is[i];
185         }
186       }
187     }
188     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
189     flg  = PETSC_FALSE;
190     ierr = PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
191     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
192 
193     if (osm->overlap > 0) {
194       /* Extend the "overlapping" regions by a number of steps */
195       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
196     }
197     if (osm->sort_indices) {
198       for (i=0; i<osm->n_local_true; i++) {
199         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
200         if (osm->is_local) {
201           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
202         }
203       }
204     }
205 
206     /* Create the local work vectors and scatter contexts */
207     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
208     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->restriction);CHKERRQ(ierr);
209     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->localization);CHKERRQ(ierr);}
210     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->prolongation);CHKERRQ(ierr);
211     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->x);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y);CHKERRQ(ierr);
213     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y_local);CHKERRQ(ierr);
214     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
215     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
216       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
217       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
218       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
219       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
220       ierr = ISDestroy(isl);CHKERRQ(ierr);
221       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
222       if (osm->is_local) {
223         ISLocalToGlobalMapping ltog;
224         IS                     isll;
225         const PetscInt         *idx_local;
226         PetscInt               *idx,nout;
227 
228         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
229         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
230         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
231         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
232         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
233         ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
234         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
235         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
236         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
237         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
238         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
239         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
240         ierr = ISDestroy(isll);CHKERRQ(ierr);
241 
242         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
243         ierr = ISDestroy(isl);CHKERRQ(ierr);
244       } else {
245         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
246         osm->y_local[i] = osm->y[i];
247         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
248         osm->prolongation[i] = osm->restriction[i];
249         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
250       }
251     }
252     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
253     for (i=osm->n_local_true; i<osm->n_local; i++) {
254       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
255       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
256       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
257       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
258       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
259       if (osm->is_local) {
260         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
261         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
262       } else {
263         osm->prolongation[i] = osm->restriction[i];
264         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
265       }
266       ierr = ISDestroy(isl);CHKERRQ(ierr);
267     }
268     ierr = VecDestroy(vec);CHKERRQ(ierr);
269 
270     /* Create the local solvers */
271     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
272     for (i=0; i<osm->n_local_true; i++) {
273       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
274       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
275       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
276       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
277       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
278       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
279       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
280       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
281       osm->ksp[i] = ksp;
282     }
283     scall = MAT_INITIAL_MATRIX;
284 
285   } else {
286     /*
287        Destroy the blocks from the previous iteration
288     */
289     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
290       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
291       scall = MAT_INITIAL_MATRIX;
292     }
293   }
294 
295   /*
296      Extract out the submatrices
297   */
298   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
299   if (scall == MAT_INITIAL_MATRIX) {
300     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
301     for (i=0; i<osm->n_local_true; i++) {
302       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
303       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
304     }
305   }
306 
307   /* Return control to the user so that the submatrices can be modified (e.g., to apply
308      different boundary conditions for the submatrices than for the global problem) */
309   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
310 
311   /*
312      Loop over subdomains putting them into local ksp
313   */
314   for (i=0; i<osm->n_local_true; i++) {
315     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
316     if (!pc->setupcalled) {
317       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
318     }
319   }
320 
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
326 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
327 {
328   PC_ASM         *osm = (PC_ASM*)pc->data;
329   PetscErrorCode ierr;
330   PetscInt       i;
331 
332   PetscFunctionBegin;
333   for (i=0; i<osm->n_local_true; i++) {
334     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 #undef __FUNCT__
340 #define __FUNCT__ "PCApply_ASM"
341 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
342 {
343   PC_ASM         *osm = (PC_ASM*)pc->data;
344   PetscErrorCode ierr;
345   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
346   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
347 
348   PetscFunctionBegin;
349   /*
350      Support for limiting the restriction or interpolation to only local
351      subdomain values (leaving the other values 0).
352   */
353   if (!(osm->type & PC_ASM_RESTRICT)) {
354     forward = SCATTER_FORWARD_LOCAL;
355     /* have to zero the work RHS since scatter may leave some slots empty */
356     for (i=0; i<n_local_true; i++) {
357       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
358     }
359   }
360   if (!(osm->type & PC_ASM_INTERPOLATE)) {
361     reverse = SCATTER_REVERSE_LOCAL;
362   }
363 
364   for (i=0; i<n_local; i++) {
365     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
366   }
367   ierr = VecZeroEntries(y);CHKERRQ(ierr);
368   /* do the local solves */
369   for (i=0; i<n_local_true; i++) {
370     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
371     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
372     if (osm->localization) {
373       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
374       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
375     }
376     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
377   }
378   /* handle the rest of the scatters that do not have local solves */
379   for (i=n_local_true; i<n_local; i++) {
380     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
381     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
382   }
383   for (i=0; i<n_local; i++) {
384     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
385   }
386   PetscFunctionReturn(0);
387 }
388 
389 #undef __FUNCT__
390 #define __FUNCT__ "PCApplyTranspose_ASM"
391 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
392 {
393   PC_ASM         *osm = (PC_ASM*)pc->data;
394   PetscErrorCode ierr;
395   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
396   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
397 
398   PetscFunctionBegin;
399   /*
400      Support for limiting the restriction or interpolation to only local
401      subdomain values (leaving the other values 0).
402 
403      Note: these are reversed from the PCApply_ASM() because we are applying the
404      transpose of the three terms
405   */
406   if (!(osm->type & PC_ASM_INTERPOLATE)) {
407     forward = SCATTER_FORWARD_LOCAL;
408     /* have to zero the work RHS since scatter may leave some slots empty */
409     for (i=0; i<n_local_true; i++) {
410       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
411     }
412   }
413   if (!(osm->type & PC_ASM_RESTRICT)) {
414     reverse = SCATTER_REVERSE_LOCAL;
415   }
416 
417   for (i=0; i<n_local; i++) {
418     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
419   }
420   ierr = VecZeroEntries(y);CHKERRQ(ierr);
421   /* do the local solves */
422   for (i=0; i<n_local_true; i++) {
423     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
424     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
425     if (osm->localization) {
426       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
427       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
428     }
429     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
430   }
431   /* handle the rest of the scatters that do not have local solves */
432   for (i=n_local_true; i<n_local; i++) {
433     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
434     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
435   }
436   for (i=0; i<n_local; i++) {
437     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCReset_ASM"
444 static PetscErrorCode PCReset_ASM(PC pc)
445 {
446   PC_ASM         *osm = (PC_ASM*)pc->data;
447   PetscErrorCode ierr;
448   PetscInt       i;
449 
450   PetscFunctionBegin;
451   if (osm->ksp) {
452     for (i=0; i<osm->n_local_true; i++) {
453       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
454     }
455   }
456   if (osm->pmat) {
457     if (osm->n_local_true > 0) {
458       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
459     }
460   }
461   if (osm->restriction) {
462     for (i=0; i<osm->n_local; i++) {
463       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
464       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
465       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
466       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
467       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
468       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
469     }
470     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
471     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
472     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
473     ierr = PetscFree(osm->x);CHKERRQ(ierr);
474     ierr = PetscFree(osm->y);CHKERRQ(ierr);
475     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
476   }
477   if (osm->is) {
478     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
479     osm->is       = 0;
480     osm->is_local = 0;
481   }
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "PCDestroy_ASM"
487 static PetscErrorCode PCDestroy_ASM(PC pc)
488 {
489   PC_ASM         *osm = (PC_ASM*)pc->data;
490   PetscErrorCode ierr;
491   PetscInt       i;
492 
493   PetscFunctionBegin;
494   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
495   if (osm->ksp) {
496     for (i=0; i<osm->n_local_true; i++) {
497       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
498     }
499     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
500   }
501   ierr = PetscFree(pc->data);CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "PCSetFromOptions_ASM"
507 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
508 {
509   PC_ASM         *osm = (PC_ASM*)pc->data;
510   PetscErrorCode ierr;
511   PetscInt       blocks,ovl;
512   PetscBool      symset,flg;
513   PCASMType      asmtype;
514 
515   PetscFunctionBegin;
516   /* set the type to symmetric if matrix is symmetric */
517   if (!osm->type_set && pc->pmat) {
518     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
519     if (symset && flg) { osm->type = PC_ASM_BASIC; }
520   }
521   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
522     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
523     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
524     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
525     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
526     flg  = PETSC_FALSE;
527     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
528     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
529   ierr = PetscOptionsTail();CHKERRQ(ierr);
530   PetscFunctionReturn(0);
531 }
532 
533 /*------------------------------------------------------------------------------------*/
534 
535 EXTERN_C_BEGIN
536 #undef __FUNCT__
537 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
538 PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
539 {
540   PC_ASM         *osm = (PC_ASM*)pc->data;
541   PetscErrorCode ierr;
542   PetscInt       i;
543 
544   PetscFunctionBegin;
545   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
546   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
547 
548   if (!pc->setupcalled) {
549     if (is) {
550       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
551     }
552     if (is_local) {
553       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
554     }
555     if (osm->is) {
556       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
557     }
558     osm->n_local_true = n;
559     osm->is           = 0;
560     osm->is_local     = 0;
561     if (is) {
562       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
563       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
564       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
565       osm->overlap = -1;
566     }
567     if (is_local) {
568       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
569       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
570     }
571   }
572   PetscFunctionReturn(0);
573 }
574 EXTERN_C_END
575 
576 EXTERN_C_BEGIN
577 #undef __FUNCT__
578 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
579 PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
580 {
581   PC_ASM         *osm = (PC_ASM*)pc->data;
582   PetscErrorCode ierr;
583   PetscMPIInt    rank,size;
584   PetscInt       n;
585 
586   PetscFunctionBegin;
587   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
588   if (is || is_local) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
589 
590   /*
591      Split the subdomains equally among all processors
592   */
593   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
594   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
595   n = N/size + ((N % size) > rank);
596   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
597   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
598   if (!pc->setupcalled) {
599     if (osm->is) {
600       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
601     }
602     osm->n_local_true = n;
603     osm->is           = 0;
604     osm->is_local     = 0;
605   }
606   PetscFunctionReturn(0);
607 }
608 EXTERN_C_END
609 
610 EXTERN_C_BEGIN
611 #undef __FUNCT__
612 #define __FUNCT__ "PCASMSetOverlap_ASM"
613 PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
614 {
615   PC_ASM *osm = (PC_ASM*)pc->data;
616 
617   PetscFunctionBegin;
618   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
619   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
620   if (!pc->setupcalled) {
621     osm->overlap = ovl;
622   }
623   PetscFunctionReturn(0);
624 }
625 EXTERN_C_END
626 
627 EXTERN_C_BEGIN
628 #undef __FUNCT__
629 #define __FUNCT__ "PCASMSetType_ASM"
630 PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
631 {
632   PC_ASM *osm = (PC_ASM*)pc->data;
633 
634   PetscFunctionBegin;
635   osm->type     = type;
636   osm->type_set = PETSC_TRUE;
637   PetscFunctionReturn(0);
638 }
639 EXTERN_C_END
640 
641 EXTERN_C_BEGIN
642 #undef __FUNCT__
643 #define __FUNCT__ "PCASMSetSortIndices_ASM"
644 PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
645 {
646   PC_ASM *osm = (PC_ASM*)pc->data;
647 
648   PetscFunctionBegin;
649   osm->sort_indices = doSort;
650   PetscFunctionReturn(0);
651 }
652 EXTERN_C_END
653 
654 EXTERN_C_BEGIN
655 #undef __FUNCT__
656 #define __FUNCT__ "PCASMGetSubKSP_ASM"
657 PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
658 {
659   PC_ASM         *osm = (PC_ASM*)pc->data;
660   PetscErrorCode ierr;
661 
662   PetscFunctionBegin;
663   if (osm->n_local_true < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
664 
665   if (n_local) {
666     *n_local = osm->n_local_true;
667   }
668   if (first_local) {
669     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
670     *first_local -= osm->n_local_true;
671   }
672   if (ksp) {
673     /* Assume that local solves are now different; not necessarily
674        true though!  This flag is used only for PCView_ASM() */
675     *ksp                   = osm->ksp;
676     osm->same_local_solves = PETSC_FALSE;
677   }
678   PetscFunctionReturn(0);
679 }
680 EXTERN_C_END
681 
682 
683 #undef __FUNCT__
684 #define __FUNCT__ "PCASMSetLocalSubdomains"
685 /*@C
686     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
687     only) for the additive Schwarz preconditioner.
688 
689     Collective on PC
690 
691     Input Parameters:
692 +   pc - the preconditioner context
693 .   n - the number of subdomains for this processor (default value = 1)
694 .   is - the index set that defines the subdomains for this processor
695          (or PETSC_NULL for PETSc to determine subdomains)
696 -   is_local - the index sets that define the local part of the subdomains for this processor
697          (or PETSC_NULL to use the default of 1 subdomain per process)
698 
699     Notes:
700     The IS numbering is in the parallel, global numbering of the vector.
701 
702     By default the ASM preconditioner uses 1 block per processor.
703 
704     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
705 
706     Level: advanced
707 
708 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
709 
710 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
711           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
712 @*/
713 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
714 {
715   PetscErrorCode ierr;
716 
717   PetscFunctionBegin;
718   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
719   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "PCASMSetTotalSubdomains"
725 /*@C
726     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
727     additive Schwarz preconditioner.  Either all or no processors in the
728     PC communicator must call this routine, with the same index sets.
729 
730     Collective on PC
731 
732     Input Parameters:
733 +   pc - the preconditioner context
734 .   n - the number of subdomains for all processors
735 .   is - the index sets that define the subdomains for all processor
736          (or PETSC_NULL for PETSc to determine subdomains)
737 -   is_local - the index sets that define the local part of the subdomains for this processor
738          (or PETSC_NULL to use the default of 1 subdomain per process)
739 
740     Options Database Key:
741     To set the total number of subdomain blocks rather than specify the
742     index sets, use the option
743 .    -pc_asm_blocks <blks> - Sets total blocks
744 
745     Notes:
746     Currently you cannot use this to set the actual subdomains with the argument is.
747 
748     By default the ASM preconditioner uses 1 block per processor.
749 
750     These index sets cannot be destroyed until after completion of the
751     linear solves for which the ASM preconditioner is being used.
752 
753     Use PCASMSetLocalSubdomains() to set local subdomains.
754 
755     Level: advanced
756 
757 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
758 
759 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
760           PCASMCreateSubdomains2D()
761 @*/
762 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
763 {
764   PetscErrorCode ierr;
765 
766   PetscFunctionBegin;
767   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
768   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
769   PetscFunctionReturn(0);
770 }
771 
772 #undef __FUNCT__
773 #define __FUNCT__ "PCASMSetOverlap"
774 /*@
775     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
776     additive Schwarz preconditioner.  Either all or no processors in the
777     PC communicator must call this routine.
778 
779     Logically Collective on PC
780 
781     Input Parameters:
782 +   pc  - the preconditioner context
783 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
784 
785     Options Database Key:
786 .   -pc_asm_overlap <ovl> - Sets overlap
787 
788     Notes:
789     By default the ASM preconditioner uses 1 block per processor.  To use
790     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
791     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
792 
793     The overlap defaults to 1, so if one desires that no additional
794     overlap be computed beyond what may have been set with a call to
795     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
796     must be set to be 0.  In particular, if one does not explicitly set
797     the subdomains an application code, then all overlap would be computed
798     internally by PETSc, and using an overlap of 0 would result in an ASM
799     variant that is equivalent to the block Jacobi preconditioner.
800 
801     Note that one can define initial index sets with any overlap via
802     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
803     PCASMSetOverlap() merely allows PETSc to extend that overlap further
804     if desired.
805 
806     Level: intermediate
807 
808 .keywords: PC, ASM, set, overlap
809 
810 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
811           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
812 @*/
813 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
814 {
815   PetscErrorCode ierr;
816 
817   PetscFunctionBegin;
818   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
819   PetscValidLogicalCollectiveInt(pc,ovl,2);
820   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "PCASMSetType"
826 /*@
827     PCASMSetType - Sets the type of restriction and interpolation used
828     for local problems in the additive Schwarz method.
829 
830     Logically Collective on PC
831 
832     Input Parameters:
833 +   pc  - the preconditioner context
834 -   type - variant of ASM, one of
835 .vb
836       PC_ASM_BASIC       - full interpolation and restriction
837       PC_ASM_RESTRICT    - full restriction, local processor interpolation
838       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
839       PC_ASM_NONE        - local processor restriction and interpolation
840 .ve
841 
842     Options Database Key:
843 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
844 
845     Level: intermediate
846 
847 .keywords: PC, ASM, set, type
848 
849 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
850           PCASMCreateSubdomains2D()
851 @*/
852 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
853 {
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
858   PetscValidLogicalCollectiveEnum(pc,type,2);
859   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "PCASMSetSortIndices"
865 /*@
866     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
867 
868     Logically Collective on PC
869 
870     Input Parameters:
871 +   pc  - the preconditioner context
872 -   doSort - sort the subdomain indices
873 
874     Level: intermediate
875 
876 .keywords: PC, ASM, set, type
877 
878 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
879           PCASMCreateSubdomains2D()
880 @*/
881 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool  doSort)
882 {
883   PetscErrorCode ierr;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
887   PetscValidLogicalCollectiveBool(pc,doSort,2);
888   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "PCASMGetSubKSP"
894 /*@C
895    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
896    this processor.
897 
898    Collective on PC iff first_local is requested
899 
900    Input Parameter:
901 .  pc - the preconditioner context
902 
903    Output Parameters:
904 +  n_local - the number of blocks on this processor or PETSC_NULL
905 .  first_local - the global number of the first block on this processor or PETSC_NULL,
906                  all processors must request or all must pass PETSC_NULL
907 -  ksp - the array of KSP contexts
908 
909    Note:
910    After PCASMGetSubKSP() the array of KSPes is not to be freed
911 
912    Currently for some matrix implementations only 1 block per processor
913    is supported.
914 
915    You must call KSPSetUp() before calling PCASMGetSubKSP().
916 
917    Level: advanced
918 
919 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
920 
921 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
922           PCASMCreateSubdomains2D(),
923 @*/
924 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
925 {
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
930   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 /* -------------------------------------------------------------------------------------*/
935 /*MC
936    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
937            its own KSP object.
938 
939    Options Database Keys:
940 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
941 .  -pc_asm_blocks <blks> - Sets total blocks
942 .  -pc_asm_overlap <ovl> - Sets overlap
943 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
944 
945      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
946       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
947       -pc_asm_type basic to use the standard ASM.
948 
949    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
950      than one processor. Defaults to one block per processor.
951 
952      To set options on the solvers for each block append -sub_ to all the KSP, and PC
953         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
954 
955      To set the options on the solvers separate for each block call PCASMGetSubKSP()
956          and set the options directly on the resulting KSP object (you can access its PC
957          with KSPGetPC())
958 
959 
960    Level: beginner
961 
962    Concepts: additive Schwarz method
963 
964     References:
965     An additive variant of the Schwarz alternating method for the case of many subregions
966     M Dryja, OB Widlund - Courant Institute, New York University Technical report
967 
968     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
969     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
970 
971 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
972            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
973            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
974 
975 M*/
976 
977 EXTERN_C_BEGIN
978 #undef __FUNCT__
979 #define __FUNCT__ "PCCreate_ASM"
980 PetscErrorCode  PCCreate_ASM(PC pc)
981 {
982   PetscErrorCode ierr;
983   PC_ASM         *osm;
984 
985   PetscFunctionBegin;
986   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
987   osm->n                 = PETSC_DECIDE;
988   osm->n_local           = 0;
989   osm->n_local_true      = 0;
990   osm->overlap           = 1;
991   osm->ksp               = 0;
992   osm->restriction       = 0;
993   osm->localization      = 0;
994   osm->prolongation      = 0;
995   osm->x                 = 0;
996   osm->y                 = 0;
997   osm->y_local           = 0;
998   osm->is                = 0;
999   osm->is_local          = 0;
1000   osm->mat               = 0;
1001   osm->pmat              = 0;
1002   osm->type              = PC_ASM_RESTRICT;
1003   osm->same_local_solves = PETSC_TRUE;
1004   osm->sort_indices      = PETSC_TRUE;
1005 
1006   pc->data                   = (void*)osm;
1007   pc->ops->apply             = PCApply_ASM;
1008   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1009   pc->ops->setup             = PCSetUp_ASM;
1010   pc->ops->reset             = PCReset_ASM;
1011   pc->ops->destroy           = PCDestroy_ASM;
1012   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1013   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1014   pc->ops->view              = PCView_ASM;
1015   pc->ops->applyrichardson   = 0;
1016 
1017   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1018                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1019   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1020                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1021   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1022                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1023   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1024                     PCASMSetType_ASM);CHKERRQ(ierr);
1025   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1026                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1027   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1028                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1029   PetscFunctionReturn(0);
1030 }
1031 EXTERN_C_END
1032 
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "PCASMCreateSubdomains"
1036 /*@C
1037    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1038    preconditioner for a any problem on a general grid.
1039 
1040    Collective
1041 
1042    Input Parameters:
1043 +  A - The global matrix operator
1044 -  n - the number of local blocks
1045 
1046    Output Parameters:
1047 .  outis - the array of index sets defining the subdomains
1048 
1049    Level: advanced
1050 
1051    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1052     from these if you use PCASMSetLocalSubdomains()
1053 
1054     In the Fortran version you must provide the array outis[] already allocated of length n.
1055 
1056 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1057 
1058 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1059 @*/
1060 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1061 {
1062   MatPartitioning           mpart;
1063   const char                *prefix;
1064   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1065   PetscMPIInt               size;
1066   PetscInt                  i,j,rstart,rend,bs;
1067   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1068   Mat                       Ad = PETSC_NULL, adj;
1069   IS                        ispart,isnumb,*is;
1070   PetscErrorCode            ierr;
1071 
1072   PetscFunctionBegin;
1073   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1074   PetscValidPointer(outis,3);
1075   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1076 
1077   /* Get prefix, row distribution, and block size */
1078   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1079   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1080   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1081   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1082 
1083   /* Get diagonal block from matrix if possible */
1084   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1085   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1086   if (f) {
1087     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1088   } else if (size == 1) {
1089     iscopy = PETSC_FALSE; Ad = A;
1090   } else {
1091     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1092   }
1093   if (Ad) {
1094     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1095     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1096   }
1097   if (Ad && n > 1) {
1098     PetscBool  match,done;
1099     /* Try to setup a good matrix partitioning if available */
1100     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1101     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1102     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1103     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1104     if (!match) {
1105       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1106     }
1107     if (!match) { /* assume a "good" partitioner is available */
1108       PetscInt na,*ia,*ja;
1109       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1110       if (done) {
1111 	/* Build adjacency matrix by hand. Unfortunately a call to
1112 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1113 	   remove the block-aij structure and we cannot expect
1114 	   MatPartitioning to split vertices as we need */
1115 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1116 	nnz = 0;
1117 	for (i=0; i<na; i++) { /* count number of nonzeros */
1118 	  len = ia[i+1] - ia[i];
1119 	  row = ja + ia[i];
1120 	  for (j=0; j<len; j++) {
1121 	    if (row[j] == i) { /* don't count diagonal */
1122 	      len--; break;
1123 	    }
1124 	  }
1125 	  nnz += len;
1126 	}
1127 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1128 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1129 	nnz    = 0;
1130 	iia[0] = 0;
1131 	for (i=0; i<na; i++) { /* fill adjacency */
1132 	  cnt = 0;
1133 	  len = ia[i+1] - ia[i];
1134 	  row = ja + ia[i];
1135 	  for (j=0; j<len; j++) {
1136 	    if (row[j] != i) { /* if not diagonal */
1137 	      jja[nnz+cnt++] = row[j];
1138 	    }
1139 	  }
1140 	  nnz += cnt;
1141 	  iia[i+1] = nnz;
1142 	}
1143 	/* Partitioning of the adjacency matrix */
1144 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1145 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1146 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1147 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1148 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1149 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1150 	foundpart = PETSC_TRUE;
1151       }
1152       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1153     }
1154     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1155   }
1156   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1157 
1158   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1159   *outis = is;
1160 
1161   if (!foundpart) {
1162 
1163     /* Partitioning by contiguous chunks of rows */
1164 
1165     PetscInt mbs   = (rend-rstart)/bs;
1166     PetscInt start = rstart;
1167     for (i=0; i<n; i++) {
1168       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1169       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1170       start += count;
1171     }
1172 
1173   } else {
1174 
1175     /* Partitioning by adjacency of diagonal block  */
1176 
1177     const PetscInt *numbering;
1178     PetscInt       *count,nidx,*indices,*newidx,start=0;
1179     /* Get node count in each partition */
1180     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1181     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1182     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1183       for (i=0; i<n; i++) count[i] *= bs;
1184     }
1185     /* Build indices from node numbering */
1186     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1187     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1188     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1189     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1190     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1191     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1192     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1193       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1194       for (i=0; i<nidx; i++)
1195 	for (j=0; j<bs; j++)
1196 	  newidx[i*bs+j] = indices[i]*bs + j;
1197       ierr = PetscFree(indices);CHKERRQ(ierr);
1198       nidx   *= bs;
1199       indices = newidx;
1200     }
1201     /* Shift to get global indices */
1202     for (i=0; i<nidx; i++) indices[i] += rstart;
1203 
1204     /* Build the index sets for each block */
1205     for (i=0; i<n; i++) {
1206       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1207       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1208       start += count[i];
1209     }
1210 
1211     ierr = PetscFree(count);
1212     ierr = PetscFree(indices);
1213     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1214     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1215 
1216   }
1217 
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "PCASMDestroySubdomains"
1223 /*@C
1224    PCASMDestroySubdomains - Destroys the index sets created with
1225    PCASMCreateSubdomains(). Should be called after setting subdomains
1226    with PCASMSetLocalSubdomains().
1227 
1228    Collective
1229 
1230    Input Parameters:
1231 +  n - the number of index sets
1232 .  is - the array of index sets
1233 -  is_local - the array of local index sets, can be PETSC_NULL
1234 
1235    Level: advanced
1236 
1237 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1238 
1239 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1240 @*/
1241 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1242 {
1243   PetscInt       i;
1244   PetscErrorCode ierr;
1245   PetscFunctionBegin;
1246   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1247   PetscValidPointer(is,2);
1248   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1249   ierr = PetscFree(is);CHKERRQ(ierr);
1250   if (is_local) {
1251     PetscValidPointer(is_local,3);
1252     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1253     ierr = PetscFree(is_local);CHKERRQ(ierr);
1254   }
1255   PetscFunctionReturn(0);
1256 }
1257 
1258 #undef __FUNCT__
1259 #define __FUNCT__ "PCASMCreateSubdomains2D"
1260 /*@
1261    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1262    preconditioner for a two-dimensional problem on a regular grid.
1263 
1264    Not Collective
1265 
1266    Input Parameters:
1267 +  m, n - the number of mesh points in the x and y directions
1268 .  M, N - the number of subdomains in the x and y directions
1269 .  dof - degrees of freedom per node
1270 -  overlap - overlap in mesh lines
1271 
1272    Output Parameters:
1273 +  Nsub - the number of subdomains created
1274 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1275 -  is_local - array of index sets defining non-overlapping subdomains
1276 
1277    Note:
1278    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1279    preconditioners.  More general related routines are
1280    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1281 
1282    Level: advanced
1283 
1284 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1285 
1286 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1287           PCASMSetOverlap()
1288 @*/
1289 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1290 {
1291   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1292   PetscErrorCode ierr;
1293   PetscInt       nidx,*idx,loc,ii,jj,count;
1294 
1295   PetscFunctionBegin;
1296   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1297 
1298   *Nsub = N*M;
1299   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1300   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1301   ystart = 0;
1302   loc_outer = 0;
1303   for (i=0; i<N; i++) {
1304     height = n/N + ((n % N) > i); /* height of subdomain */
1305     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1306     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1307     yright = ystart + height + overlap; if (yright > n) yright = n;
1308     xstart = 0;
1309     for (j=0; j<M; j++) {
1310       width = m/M + ((m % M) > j); /* width of subdomain */
1311       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1312       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1313       xright = xstart + width + overlap; if (xright > m) xright = m;
1314       nidx   = (xright - xleft)*(yright - yleft);
1315       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1316       loc    = 0;
1317       for (ii=yleft; ii<yright; ii++) {
1318         count = m*ii + xleft;
1319         for (jj=xleft; jj<xright; jj++) {
1320           idx[loc++] = count++;
1321         }
1322       }
1323       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1324       if (overlap == 0) {
1325         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1326         (*is_local)[loc_outer] = (*is)[loc_outer];
1327       } else {
1328         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1329           for (jj=xstart; jj<xstart+width; jj++) {
1330             idx[loc++] = m*ii + jj;
1331           }
1332         }
1333         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1334       }
1335       ierr = PetscFree(idx);CHKERRQ(ierr);
1336       xstart += width;
1337       loc_outer++;
1338     }
1339     ystart += height;
1340   }
1341   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 #undef __FUNCT__
1346 #define __FUNCT__ "PCASMGetLocalSubdomains"
1347 /*@C
1348     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1349     only) for the additive Schwarz preconditioner.
1350 
1351     Not Collective
1352 
1353     Input Parameter:
1354 .   pc - the preconditioner context
1355 
1356     Output Parameters:
1357 +   n - the number of subdomains for this processor (default value = 1)
1358 .   is - the index sets that define the subdomains for this processor
1359 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1360 
1361 
1362     Notes:
1363     The IS numbering is in the parallel, global numbering of the vector.
1364 
1365     Level: advanced
1366 
1367 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1368 
1369 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1370           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1371 @*/
1372 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1373 {
1374   PC_ASM         *osm;
1375   PetscErrorCode ierr;
1376   PetscBool      match;
1377 
1378   PetscFunctionBegin;
1379   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1380   PetscValidIntPointer(n,2);
1381   if (is) PetscValidPointer(is,3);
1382   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1383   if (!match) {
1384     if (n)  *n  = 0;
1385     if (is) *is = PETSC_NULL;
1386   } else {
1387     osm = (PC_ASM*)pc->data;
1388     if (n)  *n  = osm->n_local_true;
1389     if (is) *is = osm->is;
1390     if (is_local) *is_local = osm->is_local;
1391   }
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 #undef __FUNCT__
1396 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1397 /*@C
1398     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1399     only) for the additive Schwarz preconditioner.
1400 
1401     Not Collective
1402 
1403     Input Parameter:
1404 .   pc - the preconditioner context
1405 
1406     Output Parameters:
1407 +   n - the number of matrices for this processor (default value = 1)
1408 -   mat - the matrices
1409 
1410 
1411     Level: advanced
1412 
1413 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1414 
1415 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1416           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1417 @*/
1418 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1419 {
1420   PC_ASM         *osm;
1421   PetscErrorCode ierr;
1422   PetscBool      match;
1423 
1424   PetscFunctionBegin;
1425   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1426   PetscValidIntPointer(n,2);
1427   if (mat) PetscValidPointer(mat,3);
1428   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1429   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1430   if (!match) {
1431     if (n)   *n   = 0;
1432     if (mat) *mat = PETSC_NULL;
1433   } else {
1434     osm = (PC_ASM*)pc->data;
1435     if (n)   *n   = osm->n_local_true;
1436     if (mat) *mat = osm->pmat;
1437   }
1438   PetscFunctionReturn(0);
1439 }
1440