xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision d736bfeb4d37a01fcbdf00fe73fb60d6f0ba2142)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscTruth sort_indices;        /* flag to sort subdomain indices */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *osm = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i,bsz;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (osm->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",osm->n,osm->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",osm->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
56     if (osm->same_local_solves) {
57       if (osm->ksp) {
58         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
59         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
60         if (!rank) {
61           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
62           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
63           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
64         }
65         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
66       }
67     } else {
68       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
69       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
72       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
73       for (i=0; i<osm->n_local; i++) {
74         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
75         if (i < osm->n_local_true) {
76           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
78           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
79           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80         }
81         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       }
83       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
85     }
86   } else if (isstring) {
87     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
88     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
89     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
90     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
91   } else {
92     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCASMPrintSubdomains"
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm  = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
112   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
113   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
114   for (i=0;i<osm->n_local_true;i++) {
115     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
116     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
117     for (j=0; j<nidx; j++) {
118       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
119     }
120     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
121     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
122     if (osm->is_local) {
123       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
125       for (j=0; j<nidx; j++) {
126         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
127       }
128       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
129       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
130     }
131   }
132   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
133   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "PCSetUp_ASM"
139 static PetscErrorCode PCSetUp_ASM(PC pc)
140 {
141   PC_ASM         *osm  = (PC_ASM*)pc->data;
142   PetscErrorCode ierr;
143   PetscTruth     symset,flg;
144   PetscInt       i,m,m_local,firstRow,lastRow;
145   PetscMPIInt    size;
146   MatReuse       scall = MAT_REUSE_MATRIX;
147   IS             isl;
148   KSP            ksp;
149   PC             subpc;
150   const char     *prefix,*pprefix;
151   Vec            vec;
152 
153   PetscFunctionBegin;
154   if (!pc->setupcalled) {
155 
156     if (!osm->type_set) {
157       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
158       if (symset && flg) { osm->type = PC_ASM_BASIC; }
159     }
160 
161     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
162       /* no subdomains given, use one per processor */
163       osm->n_local = osm->n_local_true = 1;
164       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
165       osm->n = size;
166     } else if (osm->n == PETSC_DECIDE) {
167       /* determine global number of subdomains */
168       PetscInt inwork[2],outwork[2];
169       inwork[0] = inwork[1] = osm->n_local_true;
170       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
171       osm->n_local = outwork[0];
172       osm->n       = outwork[1];
173     }
174 
175     if (!osm->is){ /* create the index sets */
176       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
177     }
178     if (osm->n_local_true > 1 && !osm->is_local) {
179       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
180       for (i=0; i<osm->n_local_true; i++) {
181         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
182           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
183           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
184         } else {
185           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
186           osm->is_local[i] = osm->is[i];
187         }
188       }
189     }
190     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
191     flg  = PETSC_FALSE;
192     ierr = PetscOptionsGetTruth(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
193     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
194 
195     /*  Extend the "overlapping" regions by a number of steps  */
196     ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
197     if (osm->sort_indices) {
198       for (i=0; i<osm->n_local_true; i++) {
199         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
200         if (osm->is_local) {
201           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
202         }
203       }
204     }
205 
206     /* Create the local work vectors and scatter contexts */
207     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
208     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->restriction);CHKERRQ(ierr);
209     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->localization);CHKERRQ(ierr);}
210     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->prolongation);CHKERRQ(ierr);
211     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->x);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y);CHKERRQ(ierr);
213     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y_local);CHKERRQ(ierr);
214     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
215     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
216       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
217       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
218       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
219       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
220       ierr = ISDestroy(isl);CHKERRQ(ierr);
221       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
222       if (osm->is_local) {
223         ISLocalToGlobalMapping ltog;
224         IS                     isll;
225         const PetscInt         *idx_local;
226         PetscInt               *idx,nout;
227 
228         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
229         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
230         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
231         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
232         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
233         ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
234         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
235         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
236         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,&isll);CHKERRQ(ierr);
237         ierr = PetscFree(idx);CHKERRQ(ierr);
238         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
239         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
240         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
241         ierr = ISDestroy(isll);CHKERRQ(ierr);
242 
243         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
244         ierr = ISDestroy(isl);CHKERRQ(ierr);
245       } else {
246         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
247         osm->y_local[i] = osm->y[i];
248         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
249         osm->prolongation[i] = osm->restriction[i];
250         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
251       }
252     }
253     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
254     for (i=osm->n_local_true; i<osm->n_local; i++) {
255       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
256       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
257       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
258       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
259       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
260       if (osm->is_local) {
261         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
262         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
263       } else {
264         osm->prolongation[i] = osm->restriction[i];
265         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
266       }
267       ierr = ISDestroy(isl);CHKERRQ(ierr);
268     }
269     ierr = VecDestroy(vec);CHKERRQ(ierr);
270 
271     /* Create the local solvers */
272     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
273     for (i=0; i<osm->n_local_true; i++) {
274       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
275       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
276       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
277       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
278       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
279       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
280       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
281       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
282       osm->ksp[i] = ksp;
283     }
284     scall = MAT_INITIAL_MATRIX;
285 
286   } else {
287     /*
288        Destroy the blocks from the previous iteration
289     */
290     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
291       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
292       scall = MAT_INITIAL_MATRIX;
293     }
294   }
295 
296   /*
297      Extract out the submatrices
298   */
299   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
300   if (scall == MAT_INITIAL_MATRIX) {
301     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
302     for (i=0; i<osm->n_local_true; i++) {
303       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
304       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
305     }
306   }
307 
308   /* Return control to the user so that the submatrices can be modified (e.g., to apply
309      different boundary conditions for the submatrices than for the global problem) */
310   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
311 
312   /*
313      Loop over subdomains putting them into local ksp
314   */
315   for (i=0; i<osm->n_local_true; i++) {
316     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
317     if (!pc->setupcalled) {
318       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
319     }
320   }
321 
322   PetscFunctionReturn(0);
323 }
324 
325 #undef __FUNCT__
326 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
327 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
328 {
329   PC_ASM         *osm = (PC_ASM*)pc->data;
330   PetscErrorCode ierr;
331   PetscInt       i;
332 
333   PetscFunctionBegin;
334   for (i=0; i<osm->n_local_true; i++) {
335     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "PCApply_ASM"
342 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
343 {
344   PC_ASM         *osm = (PC_ASM*)pc->data;
345   PetscErrorCode ierr;
346   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
347   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
348 
349   PetscFunctionBegin;
350   /*
351      Support for limiting the restriction or interpolation to only local
352      subdomain values (leaving the other values 0).
353   */
354   if (!(osm->type & PC_ASM_RESTRICT)) {
355     forward = SCATTER_FORWARD_LOCAL;
356     /* have to zero the work RHS since scatter may leave some slots empty */
357     for (i=0; i<n_local_true; i++) {
358       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
359     }
360   }
361   if (!(osm->type & PC_ASM_INTERPOLATE)) {
362     reverse = SCATTER_REVERSE_LOCAL;
363   }
364 
365   for (i=0; i<n_local; i++) {
366     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
367   }
368   ierr = VecSet(y,0.0);CHKERRQ(ierr);
369   /* do the local solves */
370   for (i=0; i<n_local_true; i++) {
371     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
372     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
373     if (osm->localization) {
374       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
375       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
376     }
377     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
378   }
379   /* handle the rest of the scatters that do not have local solves */
380   for (i=n_local_true; i<n_local; i++) {
381     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
382     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
383   }
384   for (i=0; i<n_local; i++) {
385     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
386   }
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "PCApplyTranspose_ASM"
392 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
393 {
394   PC_ASM         *osm = (PC_ASM*)pc->data;
395   PetscErrorCode ierr;
396   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
397   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
398 
399   PetscFunctionBegin;
400   /*
401      Support for limiting the restriction or interpolation to only local
402      subdomain values (leaving the other values 0).
403 
404      Note: these are reversed from the PCApply_ASM() because we are applying the
405      transpose of the three terms
406   */
407   if (!(osm->type & PC_ASM_INTERPOLATE)) {
408     forward = SCATTER_FORWARD_LOCAL;
409     /* have to zero the work RHS since scatter may leave some slots empty */
410     for (i=0; i<n_local_true; i++) {
411       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
412     }
413   }
414   if (!(osm->type & PC_ASM_RESTRICT)) {
415     reverse = SCATTER_REVERSE_LOCAL;
416   }
417 
418   for (i=0; i<n_local; i++) {
419     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
420   }
421   ierr = VecSet(y,0.0);CHKERRQ(ierr);
422   /* do the local solves */
423   for (i=0; i<n_local_true; i++) {
424     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
425     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
426     if (osm->localization) {
427       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
428       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
429     }
430     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
431   }
432   /* handle the rest of the scatters that do not have local solves */
433   for (i=n_local_true; i<n_local; i++) {
434     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
435     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
436   }
437   for (i=0; i<n_local; i++) {
438     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
439   }
440   PetscFunctionReturn(0);
441 }
442 
443 #undef __FUNCT__
444 #define __FUNCT__ "PCDestroy_ASM"
445 static PetscErrorCode PCDestroy_ASM(PC pc)
446 {
447   PC_ASM         *osm = (PC_ASM*)pc->data;
448   PetscErrorCode ierr;
449   PetscInt       i;
450 
451   PetscFunctionBegin;
452   if (osm->ksp) {
453     for (i=0; i<osm->n_local_true; i++) {
454       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
455     }
456     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
457   }
458   if (osm->pmat) {
459     if (osm->n_local_true > 0) {
460       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
461     }
462   }
463   if (osm->restriction) {
464     for (i=0; i<osm->n_local; i++) {
465       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
466       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
467       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
468       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
469       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
470       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
471     }
472     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
473     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
474     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
475     ierr = PetscFree(osm->x);CHKERRQ(ierr);
476     ierr = PetscFree(osm->y);CHKERRQ(ierr);
477     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
478   }
479   if (osm->is) {
480     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
481   }
482   ierr = PetscFree(osm);CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "PCSetFromOptions_ASM"
488 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
489 {
490   PC_ASM         *osm = (PC_ASM*)pc->data;
491   PetscErrorCode ierr;
492   PetscInt       blocks,ovl;
493   PetscTruth     symset,flg;
494   PCASMType      asmtype;
495 
496   PetscFunctionBegin;
497   /* set the type to symmetric if matrix is symmetric */
498   if (!osm->type_set && pc->pmat) {
499     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
500     if (symset && flg) { osm->type = PC_ASM_BASIC; }
501   }
502   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
503     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
504     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
505     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
506     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
507     flg  = PETSC_FALSE;
508     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
509     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
510   ierr = PetscOptionsTail();CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 /*------------------------------------------------------------------------------------*/
515 
516 EXTERN_C_BEGIN
517 #undef __FUNCT__
518 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
519 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
520 {
521   PC_ASM         *osm = (PC_ASM*)pc->data;
522   PetscErrorCode ierr;
523   PetscInt       i;
524 
525   PetscFunctionBegin;
526   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
527   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
528 
529   if (!pc->setupcalled) {
530     if (is) {
531       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
532     }
533     if (is_local) {
534       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
535     }
536     if (osm->is) {
537       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
538     }
539     osm->n_local_true = n;
540     osm->is           = 0;
541     osm->is_local     = 0;
542     if (is) {
543       ierr = PetscMalloc(n*sizeof(IS *),&osm->is);CHKERRQ(ierr);
544       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
545     }
546     if (is_local) {
547       ierr = PetscMalloc(n*sizeof(IS *),&osm->is_local);CHKERRQ(ierr);
548       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
549     }
550   }
551   PetscFunctionReturn(0);
552 }
553 EXTERN_C_END
554 
555 EXTERN_C_BEGIN
556 #undef __FUNCT__
557 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
558 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
559 {
560   PC_ASM         *osm = (PC_ASM*)pc->data;
561   PetscErrorCode ierr;
562   PetscMPIInt    rank,size;
563   PetscInt       n;
564 
565   PetscFunctionBegin;
566   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
567   if (is) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
568 
569   /*
570      Split the subdomains equally among all processors
571   */
572   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
573   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
574   n = N/size + ((N % size) > rank);
575   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
576   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
577   if (!pc->setupcalled) {
578     if (osm->is) {
579       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
580     }
581     osm->n_local_true = n;
582     osm->is           = 0;
583     osm->is_local     = 0;
584   }
585   PetscFunctionReturn(0);
586 }
587 EXTERN_C_END
588 
589 EXTERN_C_BEGIN
590 #undef __FUNCT__
591 #define __FUNCT__ "PCASMSetOverlap_ASM"
592 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
593 {
594   PC_ASM *osm = (PC_ASM*)pc->data;
595 
596   PetscFunctionBegin;
597   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
598   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
599   if (!pc->setupcalled) {
600     osm->overlap = ovl;
601   }
602   PetscFunctionReturn(0);
603 }
604 EXTERN_C_END
605 
606 EXTERN_C_BEGIN
607 #undef __FUNCT__
608 #define __FUNCT__ "PCASMSetType_ASM"
609 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
610 {
611   PC_ASM *osm = (PC_ASM*)pc->data;
612 
613   PetscFunctionBegin;
614   osm->type     = type;
615   osm->type_set = PETSC_TRUE;
616   PetscFunctionReturn(0);
617 }
618 EXTERN_C_END
619 
620 EXTERN_C_BEGIN
621 #undef __FUNCT__
622 #define __FUNCT__ "PCASMSetSortIndices_ASM"
623 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices_ASM(PC pc,PetscTruth doSort)
624 {
625   PC_ASM *osm = (PC_ASM*)pc->data;
626 
627   PetscFunctionBegin;
628   osm->sort_indices = doSort;
629   PetscFunctionReturn(0);
630 }
631 EXTERN_C_END
632 
633 EXTERN_C_BEGIN
634 #undef __FUNCT__
635 #define __FUNCT__ "PCASMGetSubKSP_ASM"
636 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
637 {
638   PC_ASM         *osm = (PC_ASM*)pc->data;
639   PetscErrorCode ierr;
640 
641   PetscFunctionBegin;
642   if (osm->n_local_true < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
643 
644   if (n_local) {
645     *n_local = osm->n_local_true;
646   }
647   if (first_local) {
648     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
649     *first_local -= osm->n_local_true;
650   }
651   if (ksp) {
652     /* Assume that local solves are now different; not necessarily
653        true though!  This flag is used only for PCView_ASM() */
654     *ksp                   = osm->ksp;
655     osm->same_local_solves = PETSC_FALSE;
656   }
657   PetscFunctionReturn(0);
658 }
659 EXTERN_C_END
660 
661 
662 #undef __FUNCT__
663 #define __FUNCT__ "PCASMSetLocalSubdomains"
664 /*@C
665     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
666     only) for the additive Schwarz preconditioner.
667 
668     Collective on PC
669 
670     Input Parameters:
671 +   pc - the preconditioner context
672 .   n - the number of subdomains for this processor (default value = 1)
673 .   is - the index sets that define the subdomains for this processor
674          (or PETSC_NULL for PETSc to determine subdomains)
675 -   is_local - the index sets that define the local part of the subdomains for this processor
676          (or PETSC_NULL to use the default of 1 subdomain per process)
677 
678     Notes:
679     The IS numbering is in the parallel, global numbering of the vector.
680 
681     By default the ASM preconditioner uses 1 block per processor.
682 
683     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
684 
685     Level: advanced
686 
687 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
688 
689 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
690           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
691 @*/
692 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
693 {
694   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
698   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
699   if (f) {
700     ierr = (*f)(pc,n,is,is_local);CHKERRQ(ierr);
701   }
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "PCASMSetTotalSubdomains"
707 /*@C
708     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
709     additive Schwarz preconditioner.  Either all or no processors in the
710     PC communicator must call this routine, with the same index sets.
711 
712     Collective on PC
713 
714     Input Parameters:
715 +   pc - the preconditioner context
716 .   n - the number of subdomains for all processors
717 .   is - the index sets that define the subdomains for all processor
718          (or PETSC_NULL for PETSc to determine subdomains)
719 -   is_local - the index sets that define the local part of the subdomains for this processor
720          (or PETSC_NULL to use the default of 1 subdomain per process)
721 
722     Options Database Key:
723     To set the total number of subdomain blocks rather than specify the
724     index sets, use the option
725 .    -pc_asm_blocks <blks> - Sets total blocks
726 
727     Notes:
728     Currently you cannot use this to set the actual subdomains with the argument is.
729 
730     By default the ASM preconditioner uses 1 block per processor.
731 
732     These index sets cannot be destroyed until after completion of the
733     linear solves for which the ASM preconditioner is being used.
734 
735     Use PCASMSetLocalSubdomains() to set local subdomains.
736 
737     Level: advanced
738 
739 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
740 
741 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
742           PCASMCreateSubdomains2D()
743 @*/
744 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
745 {
746   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
750   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
751   if (f) {
752     ierr = (*f)(pc,N,is,is_local);CHKERRQ(ierr);
753   }
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "PCASMSetOverlap"
759 /*@
760     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
761     additive Schwarz preconditioner.  Either all or no processors in the
762     PC communicator must call this routine.
763 
764     Collective on PC
765 
766     Input Parameters:
767 +   pc  - the preconditioner context
768 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
769 
770     Options Database Key:
771 .   -pc_asm_overlap <ovl> - Sets overlap
772 
773     Notes:
774     By default the ASM preconditioner uses 1 block per processor.  To use
775     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
776     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
777 
778     The overlap defaults to 1, so if one desires that no additional
779     overlap be computed beyond what may have been set with a call to
780     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
781     must be set to be 0.  In particular, if one does not explicitly set
782     the subdomains an application code, then all overlap would be computed
783     internally by PETSc, and using an overlap of 0 would result in an ASM
784     variant that is equivalent to the block Jacobi preconditioner.
785 
786     Note that one can define initial index sets with any overlap via
787     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
788     PCASMSetOverlap() merely allows PETSc to extend that overlap further
789     if desired.
790 
791     Level: intermediate
792 
793 .keywords: PC, ASM, set, overlap
794 
795 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
796           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
797 @*/
798 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
799 {
800   PetscErrorCode ierr,(*f)(PC,PetscInt);
801 
802   PetscFunctionBegin;
803   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
804   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
805   if (f) {
806     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
807   }
808   PetscFunctionReturn(0);
809 }
810 
811 #undef __FUNCT__
812 #define __FUNCT__ "PCASMSetType"
813 /*@
814     PCASMSetType - Sets the type of restriction and interpolation used
815     for local problems in the additive Schwarz method.
816 
817     Collective on PC
818 
819     Input Parameters:
820 +   pc  - the preconditioner context
821 -   type - variant of ASM, one of
822 .vb
823       PC_ASM_BASIC       - full interpolation and restriction
824       PC_ASM_RESTRICT    - full restriction, local processor interpolation
825       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
826       PC_ASM_NONE        - local processor restriction and interpolation
827 .ve
828 
829     Options Database Key:
830 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
831 
832     Level: intermediate
833 
834 .keywords: PC, ASM, set, type
835 
836 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
837           PCASMCreateSubdomains2D()
838 @*/
839 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
840 {
841   PetscErrorCode ierr,(*f)(PC,PCASMType);
842 
843   PetscFunctionBegin;
844   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
845   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
846   if (f) {
847     ierr = (*f)(pc,type);CHKERRQ(ierr);
848   }
849   PetscFunctionReturn(0);
850 }
851 
852 #undef __FUNCT__
853 #define __FUNCT__ "PCASMSetSortIndices"
854 /*@
855     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
856 
857     Collective on PC
858 
859     Input Parameters:
860 +   pc  - the preconditioner context
861 -   doSort - sort the subdomain indices
862 
863     Level: intermediate
864 
865 .keywords: PC, ASM, set, type
866 
867 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
868           PCASMCreateSubdomains2D()
869 @*/
870 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices(PC pc,PetscTruth doSort)
871 {
872   PetscErrorCode ierr,(*f)(PC,PetscTruth);
873 
874   PetscFunctionBegin;
875   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
876   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetSortIndices_C",(void (**)(void))&f);CHKERRQ(ierr);
877   if (f) {
878     ierr = (*f)(pc,doSort);CHKERRQ(ierr);
879   }
880   PetscFunctionReturn(0);
881 }
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "PCASMGetSubKSP"
885 /*@C
886    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
887    this processor.
888 
889    Collective on PC iff first_local is requested
890 
891    Input Parameter:
892 .  pc - the preconditioner context
893 
894    Output Parameters:
895 +  n_local - the number of blocks on this processor or PETSC_NULL
896 .  first_local - the global number of the first block on this processor or PETSC_NULL,
897                  all processors must request or all must pass PETSC_NULL
898 -  ksp - the array of KSP contexts
899 
900    Note:
901    After PCASMGetSubKSP() the array of KSPes is not to be freed
902 
903    Currently for some matrix implementations only 1 block per processor
904    is supported.
905 
906    You must call KSPSetUp() before calling PCASMGetSubKSP().
907 
908    Level: advanced
909 
910 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
911 
912 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
913           PCASMCreateSubdomains2D(),
914 @*/
915 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
916 {
917   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
918 
919   PetscFunctionBegin;
920   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
921   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
922   if (f) {
923     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
924   } else SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
925   PetscFunctionReturn(0);
926 }
927 
928 /* -------------------------------------------------------------------------------------*/
929 /*MC
930    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
931            its own KSP object.
932 
933    Options Database Keys:
934 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
935 .  -pc_asm_blocks <blks> - Sets total blocks
936 .  -pc_asm_overlap <ovl> - Sets overlap
937 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
938 
939      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
940       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
941       -pc_asm_type basic to use the standard ASM.
942 
943    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
944      than one processor. Defaults to one block per processor.
945 
946      To set options on the solvers for each block append -sub_ to all the KSP, and PC
947         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
948 
949      To set the options on the solvers separate for each block call PCASMGetSubKSP()
950          and set the options directly on the resulting KSP object (you can access its PC
951          with KSPGetPC())
952 
953 
954    Level: beginner
955 
956    Concepts: additive Schwarz method
957 
958     References:
959     An additive variant of the Schwarz alternating method for the case of many subregions
960     M Dryja, OB Widlund - Courant Institute, New York University Technical report
961 
962     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
963     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
964 
965 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
966            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
967            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
968 
969 M*/
970 
971 EXTERN_C_BEGIN
972 #undef __FUNCT__
973 #define __FUNCT__ "PCCreate_ASM"
974 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
975 {
976   PetscErrorCode ierr;
977   PC_ASM         *osm;
978 
979   PetscFunctionBegin;
980   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
981   osm->n                 = PETSC_DECIDE;
982   osm->n_local           = 0;
983   osm->n_local_true      = 0;
984   osm->overlap           = 1;
985   osm->ksp               = 0;
986   osm->restriction       = 0;
987   osm->localization      = 0;
988   osm->prolongation      = 0;
989   osm->x                 = 0;
990   osm->y                 = 0;
991   osm->y_local           = 0;
992   osm->is                = 0;
993   osm->is_local          = 0;
994   osm->mat               = 0;
995   osm->pmat              = 0;
996   osm->type              = PC_ASM_RESTRICT;
997   osm->same_local_solves = PETSC_TRUE;
998   osm->sort_indices      = PETSC_TRUE;
999 
1000   pc->data                   = (void*)osm;
1001   pc->ops->apply             = PCApply_ASM;
1002   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1003   pc->ops->setup             = PCSetUp_ASM;
1004   pc->ops->destroy           = PCDestroy_ASM;
1005   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1006   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1007   pc->ops->view              = PCView_ASM;
1008   pc->ops->applyrichardson   = 0;
1009 
1010   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1011                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1012   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1013                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1014   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1015                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1016   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1017                     PCASMSetType_ASM);CHKERRQ(ierr);
1018   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1019                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1020   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1021                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 EXTERN_C_END
1025 
1026 
1027 #undef __FUNCT__
1028 #define __FUNCT__ "PCASMCreateSubdomains"
1029 /*@C
1030    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1031    preconditioner for a any problem on a general grid.
1032 
1033    Collective
1034 
1035    Input Parameters:
1036 +  A - The global matrix operator
1037 -  n - the number of local blocks
1038 
1039    Output Parameters:
1040 .  outis - the array of index sets defining the subdomains
1041 
1042    Level: advanced
1043 
1044    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1045     from these if you use PCASMSetLocalSubdomains()
1046 
1047     In the Fortran version you must provide the array outis[] already allocated of length n.
1048 
1049 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1050 
1051 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1052 @*/
1053 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1054 {
1055   MatPartitioning           mpart;
1056   const char                *prefix;
1057   PetscErrorCode            (*f)(Mat,PetscTruth*,MatReuse,Mat*);
1058   PetscMPIInt               size;
1059   PetscInt                  i,j,rstart,rend,bs;
1060   PetscTruth                iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1061   Mat                       Ad = PETSC_NULL, adj;
1062   IS                        ispart,isnumb,*is;
1063   PetscErrorCode            ierr;
1064 
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1067   PetscValidPointer(outis,3);
1068   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1069 
1070   /* Get prefix, row distribution, and block size */
1071   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1072   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1073   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1074   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1075 
1076   /* Get diagonal block from matrix if possible */
1077   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1078   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1079   if (f) {
1080     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1081   } else if (size == 1) {
1082     iscopy = PETSC_FALSE; Ad = A;
1083   } else {
1084     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1085   }
1086   if (Ad) {
1087     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1088     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1089   }
1090   if (Ad && n > 1) {
1091     PetscTruth match,done;
1092     /* Try to setup a good matrix partitioning if available */
1093     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1094     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1095     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1096     ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_CURRENT,&match);CHKERRQ(ierr);
1097     if (!match) {
1098       ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_SQUARE,&match);CHKERRQ(ierr);
1099     }
1100     if (!match) { /* assume a "good" partitioner is available */
1101       PetscInt na,*ia,*ja;
1102       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1103       if (done) {
1104 	/* Build adjacency matrix by hand. Unfortunately a call to
1105 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1106 	   remove the block-aij structure and we cannot expect
1107 	   MatPartitioning to split vertices as we need */
1108 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1109 	nnz = 0;
1110 	for (i=0; i<na; i++) { /* count number of nonzeros */
1111 	  len = ia[i+1] - ia[i];
1112 	  row = ja + ia[i];
1113 	  for (j=0; j<len; j++) {
1114 	    if (row[j] == i) { /* don't count diagonal */
1115 	      len--; break;
1116 	    }
1117 	  }
1118 	  nnz += len;
1119 	}
1120 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1121 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1122 	nnz    = 0;
1123 	iia[0] = 0;
1124 	for (i=0; i<na; i++) { /* fill adjacency */
1125 	  cnt = 0;
1126 	  len = ia[i+1] - ia[i];
1127 	  row = ja + ia[i];
1128 	  for (j=0; j<len; j++) {
1129 	    if (row[j] != i) { /* if not diagonal */
1130 	      jja[nnz+cnt++] = row[j];
1131 	    }
1132 	  }
1133 	  nnz += cnt;
1134 	  iia[i+1] = nnz;
1135 	}
1136 	/* Partitioning of the adjacency matrix */
1137 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1138 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1139 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1140 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1141 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1142 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1143 	foundpart = PETSC_TRUE;
1144       }
1145       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1146     }
1147     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1148   }
1149   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1150 
1151   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1152   *outis = is;
1153 
1154   if (!foundpart) {
1155 
1156     /* Partitioning by contiguous chunks of rows */
1157 
1158     PetscInt mbs   = (rend-rstart)/bs;
1159     PetscInt start = rstart;
1160     for (i=0; i<n; i++) {
1161       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1162       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1163       start += count;
1164     }
1165 
1166   } else {
1167 
1168     /* Partitioning by adjacency of diagonal block  */
1169 
1170     const PetscInt *numbering;
1171     PetscInt       *count,nidx,*indices,*newidx,start=0;
1172     /* Get node count in each partition */
1173     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1174     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1175     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1176       for (i=0; i<n; i++) count[i] *= bs;
1177     }
1178     /* Build indices from node numbering */
1179     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1180     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1181     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1182     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1183     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1184     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1185     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1186       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1187       for (i=0; i<nidx; i++)
1188 	for (j=0; j<bs; j++)
1189 	  newidx[i*bs+j] = indices[i]*bs + j;
1190       ierr = PetscFree(indices);CHKERRQ(ierr);
1191       nidx   *= bs;
1192       indices = newidx;
1193     }
1194     /* Shift to get global indices */
1195     for (i=0; i<nidx; i++) indices[i] += rstart;
1196 
1197     /* Build the index sets for each block */
1198     for (i=0; i<n; i++) {
1199       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],&is[i]);CHKERRQ(ierr);
1200       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1201       start += count[i];
1202     }
1203 
1204     ierr = PetscFree(count);
1205     ierr = PetscFree(indices);
1206     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1207     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1208 
1209   }
1210 
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 #undef __FUNCT__
1215 #define __FUNCT__ "PCASMDestroySubdomains"
1216 /*@C
1217    PCASMDestroySubdomains - Destroys the index sets created with
1218    PCASMCreateSubdomains(). Should be called after setting subdomains
1219    with PCASMSetLocalSubdomains().
1220 
1221    Collective
1222 
1223    Input Parameters:
1224 +  n - the number of index sets
1225 .  is - the array of index sets
1226 -  is_local - the array of local index sets, can be PETSC_NULL
1227 
1228    Level: advanced
1229 
1230 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1231 
1232 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1233 @*/
1234 PetscErrorCode PETSCKSP_DLLEXPORT PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1235 {
1236   PetscInt       i;
1237   PetscErrorCode ierr;
1238   PetscFunctionBegin;
1239   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1240   PetscValidPointer(is,2);
1241   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1242   ierr = PetscFree(is);CHKERRQ(ierr);
1243   if (is_local) {
1244     PetscValidPointer(is_local,3);
1245     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1246     ierr = PetscFree(is_local);CHKERRQ(ierr);
1247   }
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 #undef __FUNCT__
1252 #define __FUNCT__ "PCASMCreateSubdomains2D"
1253 /*@
1254    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1255    preconditioner for a two-dimensional problem on a regular grid.
1256 
1257    Not Collective
1258 
1259    Input Parameters:
1260 +  m, n - the number of mesh points in the x and y directions
1261 .  M, N - the number of subdomains in the x and y directions
1262 .  dof - degrees of freedom per node
1263 -  overlap - overlap in mesh lines
1264 
1265    Output Parameters:
1266 +  Nsub - the number of subdomains created
1267 -  is - the array of index sets defining the subdomains
1268 
1269    Note:
1270    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1271    preconditioners.  More general related routines are
1272    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1273 
1274    Level: advanced
1275 
1276 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1277 
1278 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1279           PCASMSetOverlap()
1280 @*/
1281 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
1282 {
1283   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
1284   PetscErrorCode ierr;
1285   PetscInt       nidx,*idx,loc,ii,jj,count;
1286 
1287   PetscFunctionBegin;
1288   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1289 
1290   *Nsub = N*M;
1291   ierr = PetscMalloc((*Nsub)*sizeof(IS *),is);CHKERRQ(ierr);
1292   ystart = 0;
1293   loc_outter = 0;
1294   for (i=0; i<N; i++) {
1295     height = n/N + ((n % N) > i); /* height of subdomain */
1296     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1297     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1298     yright = ystart + height + overlap; if (yright > n) yright = n;
1299     xstart = 0;
1300     for (j=0; j<M; j++) {
1301       width = m/M + ((m % M) > j); /* width of subdomain */
1302       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1303       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1304       xright = xstart + width + overlap; if (xright > m) xright = m;
1305       nidx   = (xright - xleft)*(yright - yleft);
1306       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1307       loc    = 0;
1308       for (ii=yleft; ii<yright; ii++) {
1309         count = m*ii + xleft;
1310         for (jj=xleft; jj<xright; jj++) {
1311           idx[loc++] = count++;
1312         }
1313       }
1314       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
1315       ierr = PetscFree(idx);CHKERRQ(ierr);
1316       xstart += width;
1317     }
1318     ystart += height;
1319   }
1320   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "PCASMGetLocalSubdomains"
1326 /*@C
1327     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1328     only) for the additive Schwarz preconditioner.
1329 
1330     Collective on PC
1331 
1332     Input Parameter:
1333 .   pc - the preconditioner context
1334 
1335     Output Parameters:
1336 +   n - the number of subdomains for this processor (default value = 1)
1337 .   is - the index sets that define the subdomains for this processor
1338 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1339 
1340 
1341     Notes:
1342     The IS numbering is in the parallel, global numbering of the vector.
1343 
1344     Level: advanced
1345 
1346 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1347 
1348 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1349           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1350 @*/
1351 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1352 {
1353   PC_ASM         *osm;
1354   PetscErrorCode ierr;
1355   PetscTruth     match;
1356 
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1359   PetscValidIntPointer(n,2);
1360   if (is) PetscValidPointer(is,3);
1361   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1362   if (!match) {
1363     if (n)  *n  = 0;
1364     if (is) *is = PETSC_NULL;
1365   } else {
1366     osm = (PC_ASM*)pc->data;
1367     if (n)  *n  = osm->n_local_true;
1368     if (is) *is = osm->is;
1369     if (is_local) *is_local = osm->is_local;
1370   }
1371   PetscFunctionReturn(0);
1372 }
1373 
1374 #undef __FUNCT__
1375 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1376 /*@C
1377     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1378     only) for the additive Schwarz preconditioner.
1379 
1380     Collective on PC
1381 
1382     Input Parameter:
1383 .   pc - the preconditioner context
1384 
1385     Output Parameters:
1386 +   n - the number of matrices for this processor (default value = 1)
1387 -   mat - the matrices
1388 
1389 
1390     Level: advanced
1391 
1392 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1393 
1394 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1395           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1396 @*/
1397 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1398 {
1399   PC_ASM         *osm;
1400   PetscErrorCode ierr;
1401   PetscTruth     match;
1402 
1403   PetscFunctionBegin;
1404   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1405   PetscValidIntPointer(n,2);
1406   if (mat) PetscValidPointer(mat,3);
1407   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1408   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1409   if (!match) {
1410     if (n)   *n   = 0;
1411     if (mat) *mat = PETSC_NULL;
1412   } else {
1413     osm = (PC_ASM*)pc->data;
1414     if (n)   *n   = osm->n_local_true;
1415     if (mat) *mat = osm->pmat;
1416   }
1417   PetscFunctionReturn(0);
1418 }
1419