xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 6ac5842e34eedc6428162d8d42bedaaf46eae34c)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32 } PC_ASM;
33 
34 #undef __FUNCT__
35 #define __FUNCT__ "PCView_ASM"
36 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
37 {
38   PC_ASM         *osm = (PC_ASM*)pc->data;
39   PetscErrorCode ierr;
40   PetscMPIInt    rank;
41   PetscInt       i,bsz;
42   PetscBool      iascii,isstring;
43   PetscViewer    sviewer;
44 
45   PetscFunctionBegin;
46   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
50     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
51     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
68       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
69       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
72       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
73       for (i=0; i<osm->n_local; i++) {
74         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
75         if (i < osm->n_local_true) {
76           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
78           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
79           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80         }
81         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       }
83       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
85       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
86     }
87   } else if (isstring) {
88     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
89     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
90     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
91     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 #undef __FUNCT__
97 #define __FUNCT__ "PCASMPrintSubdomains"
98 static PetscErrorCode PCASMPrintSubdomains(PC pc)
99 {
100   PC_ASM         *osm = (PC_ASM*)pc->data;
101   const char     *prefix;
102   char           fname[PETSC_MAX_PATH_LEN+1];
103   PetscViewer    viewer, sviewer;
104   char           *s;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscMPIInt    rank, size;
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
112   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
113   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
114   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
115   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
116   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
117   for (i=0; i<osm->n_local; i++) {
118     if (i < osm->n_local_true) {
119       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
120       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
121       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
122       ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+512), &s);CHKERRQ(ierr);
123       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
124       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
125       for (j=0; j<nidx; j++) {
126         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
127       }
128       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
129       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
130       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
131       ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);CHKERRQ(ierr);
132       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
133       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);CHKERRQ(ierr);
135       ierr = PetscFree(s);CHKERRQ(ierr);
136       if (osm->is_local) {
137         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
138         ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+512), &s);CHKERRQ(ierr);
139         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
140         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
141         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
142         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
143         for (j=0; j<nidx; j++) {
144           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
145         }
146         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
147         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
148         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
149         ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);CHKERRQ(ierr);
150         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
151         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);CHKERRQ(ierr);
153         ierr = PetscFree(s);CHKERRQ(ierr);
154       }
155     } else {
156       /* Participate in collective viewer calls. */
157       ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);CHKERRQ(ierr);
158       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
159       ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);CHKERRQ(ierr);
160       /* Assume either all ranks have is_local or none do. */
161       if (osm->is_local) {
162         ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);CHKERRQ(ierr);
163         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
164         ierr = PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);CHKERRQ(ierr);
165       }
166     }
167   }
168   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
169   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
170   PetscFunctionReturn(0);
171 }
172 
173 #undef __FUNCT__
174 #define __FUNCT__ "PCSetUp_ASM"
175 static PetscErrorCode PCSetUp_ASM(PC pc)
176 {
177   PC_ASM         *osm = (PC_ASM*)pc->data;
178   PetscErrorCode ierr;
179   PetscBool      symset,flg;
180   PetscInt       i,m,m_local,firstRow,lastRow;
181   MatReuse       scall = MAT_REUSE_MATRIX;
182   IS             isl;
183   KSP            ksp;
184   PC             subpc;
185   const char     *prefix,*pprefix;
186   Vec            vec;
187   DM             *domain_dm = NULL;
188 
189   PetscFunctionBegin;
190   if (!pc->setupcalled) {
191 
192     if (!osm->type_set) {
193       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
194       if (symset && flg) osm->type = PC_ASM_BASIC;
195     }
196 
197     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
198     if (osm->n_local_true == PETSC_DECIDE) {
199       /* no subdomains given */
200       /* try pc->dm first, if allowed */
201       if (osm->dm_subdomains && pc->dm) {
202         PetscInt  num_domains, d;
203         char      **domain_names;
204         IS        *inner_domain_is, *outer_domain_is;
205         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
206         if (num_domains) {
207           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
208         }
209         for (d = 0; d < num_domains; ++d) {
210           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
211           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
212           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
213         }
214         ierr = PetscFree(domain_names);CHKERRQ(ierr);
215         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
216         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
217       }
218       if (osm->n_local_true == PETSC_DECIDE) {
219         /* still no subdomains; use one subdomain per processor */
220         osm->n_local_true = 1;
221       }
222     }
223     { /* determine the global and max number of subdomains */
224       struct {PetscInt max,sum;} inwork,outwork;
225       inwork.max   = osm->n_local_true;
226       inwork.sum   = osm->n_local_true;
227       ierr         = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
228       osm->n_local = outwork.max;
229       osm->n       = outwork.sum;
230     }
231     if (!osm->is) { /* create the index sets */
232       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
233     }
234     if (osm->n_local_true > 1 && !osm->is_local) {
235       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
236       for (i=0; i<osm->n_local_true; i++) {
237         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
238           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
239           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
240         } else {
241           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
242           osm->is_local[i] = osm->is[i];
243         }
244       }
245     }
246     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
247     flg  = PETSC_FALSE;
248     ierr = PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
249     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
250 
251     if (osm->overlap > 0) {
252       /* Extend the "overlapping" regions by a number of steps */
253       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
254     }
255     if (osm->sort_indices) {
256       for (i=0; i<osm->n_local_true; i++) {
257         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
258         if (osm->is_local) {
259           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
260         }
261       }
262     }
263     /* Create the local work vectors and scatter contexts */
264     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
265     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->restriction);CHKERRQ(ierr);
266     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->localization);CHKERRQ(ierr);}
267     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->prolongation);CHKERRQ(ierr);
268     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->x);CHKERRQ(ierr);
269     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y);CHKERRQ(ierr);
270     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y_local);CHKERRQ(ierr);
271     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
272     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
273       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
274       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
275       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
276       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
277       ierr = ISDestroy(&isl);CHKERRQ(ierr);
278       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
279       if (osm->is_local) {
280         ISLocalToGlobalMapping ltog;
281         IS                     isll;
282         const PetscInt         *idx_local;
283         PetscInt               *idx,nout;
284 
285         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
286         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
287         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
288         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
289         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
290         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
291         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
292         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
293         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
294         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
295         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
296         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
297         ierr = ISDestroy(&isll);CHKERRQ(ierr);
298 
299         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
300         ierr = ISDestroy(&isl);CHKERRQ(ierr);
301       } else {
302         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
303 
304         osm->y_local[i] = osm->y[i];
305 
306         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
307 
308         osm->prolongation[i] = osm->restriction[i];
309 
310         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
311       }
312     }
313     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
314     for (i=osm->n_local_true; i<osm->n_local; i++) {
315       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
316       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
317       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
318       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
319       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
320       if (osm->is_local) {
321         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
322         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
323       } else {
324         osm->prolongation[i] = osm->restriction[i];
325         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
326       }
327       ierr = ISDestroy(&isl);CHKERRQ(ierr);
328     }
329     ierr = VecDestroy(&vec);CHKERRQ(ierr);
330 
331     if (!osm->ksp) {
332       /* Create the local solvers */
333       ierr = PetscMalloc(osm->n_local_true*sizeof(KSP*),&osm->ksp);CHKERRQ(ierr);
334       if (domain_dm) {
335         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
336       }
337       for (i=0; i<osm->n_local_true; i++) {
338         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
339         ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
340         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
341         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
342         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
343         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
344         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
345         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
346         if (domain_dm) {
347           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
348           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
349           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
350         }
351         osm->ksp[i] = ksp;
352       }
353       if (domain_dm) {
354         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
355       }
356     }
357     scall = MAT_INITIAL_MATRIX;
358   } else {
359     /*
360        Destroy the blocks from the previous iteration
361     */
362     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
363       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
364       scall = MAT_INITIAL_MATRIX;
365     }
366   }
367 
368   /*
369      Extract out the submatrices
370   */
371   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
372   if (scall == MAT_INITIAL_MATRIX) {
373     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
374     for (i=0; i<osm->n_local_true; i++) {
375       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
376       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
377     }
378   }
379 
380   /* Return control to the user so that the submatrices can be modified (e.g., to apply
381      different boundary conditions for the submatrices than for the global problem) */
382   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
383 
384   /*
385      Loop over subdomains putting them into local ksp
386   */
387   for (i=0; i<osm->n_local_true; i++) {
388     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
389     if (!pc->setupcalled) {
390       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
391     }
392   }
393   PetscFunctionReturn(0);
394 }
395 
396 #undef __FUNCT__
397 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
398 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
399 {
400   PC_ASM         *osm = (PC_ASM*)pc->data;
401   PetscErrorCode ierr;
402   PetscInt       i;
403 
404   PetscFunctionBegin;
405   for (i=0; i<osm->n_local_true; i++) {
406     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
407   }
408   PetscFunctionReturn(0);
409 }
410 
411 #undef __FUNCT__
412 #define __FUNCT__ "PCApply_ASM"
413 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
414 {
415   PC_ASM         *osm = (PC_ASM*)pc->data;
416   PetscErrorCode ierr;
417   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
418   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
419 
420   PetscFunctionBegin;
421   /*
422      Support for limiting the restriction or interpolation to only local
423      subdomain values (leaving the other values 0).
424   */
425   if (!(osm->type & PC_ASM_RESTRICT)) {
426     forward = SCATTER_FORWARD_LOCAL;
427     /* have to zero the work RHS since scatter may leave some slots empty */
428     for (i=0; i<n_local_true; i++) {
429       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
430     }
431   }
432   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
433 
434   for (i=0; i<n_local; i++) {
435     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
436   }
437   ierr = VecZeroEntries(y);CHKERRQ(ierr);
438   /* do the local solves */
439   for (i=0; i<n_local_true; i++) {
440     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
441     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
442     if (osm->localization) {
443       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
444       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
445     }
446     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
447   }
448   /* handle the rest of the scatters that do not have local solves */
449   for (i=n_local_true; i<n_local; i++) {
450     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
451     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
452   }
453   for (i=0; i<n_local; i++) {
454     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
455   }
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "PCApplyTranspose_ASM"
461 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
462 {
463   PC_ASM         *osm = (PC_ASM*)pc->data;
464   PetscErrorCode ierr;
465   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
466   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
467 
468   PetscFunctionBegin;
469   /*
470      Support for limiting the restriction or interpolation to only local
471      subdomain values (leaving the other values 0).
472 
473      Note: these are reversed from the PCApply_ASM() because we are applying the
474      transpose of the three terms
475   */
476   if (!(osm->type & PC_ASM_INTERPOLATE)) {
477     forward = SCATTER_FORWARD_LOCAL;
478     /* have to zero the work RHS since scatter may leave some slots empty */
479     for (i=0; i<n_local_true; i++) {
480       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
481     }
482   }
483   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
484 
485   for (i=0; i<n_local; i++) {
486     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
487   }
488   ierr = VecZeroEntries(y);CHKERRQ(ierr);
489   /* do the local solves */
490   for (i=0; i<n_local_true; i++) {
491     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
492     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
493     if (osm->localization) {
494       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
495       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
496     }
497     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
498   }
499   /* handle the rest of the scatters that do not have local solves */
500   for (i=n_local_true; i<n_local; i++) {
501     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
502     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
503   }
504   for (i=0; i<n_local; i++) {
505     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
506   }
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "PCReset_ASM"
512 static PetscErrorCode PCReset_ASM(PC pc)
513 {
514   PC_ASM         *osm = (PC_ASM*)pc->data;
515   PetscErrorCode ierr;
516   PetscInt       i;
517 
518   PetscFunctionBegin;
519   if (osm->ksp) {
520     for (i=0; i<osm->n_local_true; i++) {
521       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
522     }
523   }
524   if (osm->pmat) {
525     if (osm->n_local_true > 0) {
526       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
527     }
528   }
529   if (osm->restriction) {
530     for (i=0; i<osm->n_local; i++) {
531       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
532       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
533       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
534       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
535       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
536       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
537     }
538     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
539     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
540     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
541     ierr = PetscFree(osm->x);CHKERRQ(ierr);
542     ierr = PetscFree(osm->y);CHKERRQ(ierr);
543     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
544   }
545   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
546 
547   osm->is       = 0;
548   osm->is_local = 0;
549   PetscFunctionReturn(0);
550 }
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "PCDestroy_ASM"
554 static PetscErrorCode PCDestroy_ASM(PC pc)
555 {
556   PC_ASM         *osm = (PC_ASM*)pc->data;
557   PetscErrorCode ierr;
558   PetscInt       i;
559 
560   PetscFunctionBegin;
561   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
562   if (osm->ksp) {
563     for (i=0; i<osm->n_local_true; i++) {
564       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
565     }
566     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
567   }
568   ierr = PetscFree(pc->data);CHKERRQ(ierr);
569   PetscFunctionReturn(0);
570 }
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "PCSetFromOptions_ASM"
574 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
575 {
576   PC_ASM         *osm = (PC_ASM*)pc->data;
577   PetscErrorCode ierr;
578   PetscInt       blocks,ovl;
579   PetscBool      symset,flg;
580   PCASMType      asmtype;
581 
582   PetscFunctionBegin;
583   /* set the type to symmetric if matrix is symmetric */
584   if (!osm->type_set && pc->pmat) {
585     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
586     if (symset && flg) osm->type = PC_ASM_BASIC;
587   }
588   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
589   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
590   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
591   if (flg) {
592     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
593     osm->dm_subdomains = PETSC_FALSE;
594   }
595   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
596   if (flg) {
597     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
598     osm->dm_subdomains = PETSC_FALSE;
599   }
600   flg  = PETSC_FALSE;
601   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
602   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
603   ierr = PetscOptionsTail();CHKERRQ(ierr);
604   PetscFunctionReturn(0);
605 }
606 
607 /*------------------------------------------------------------------------------------*/
608 
609 #undef __FUNCT__
610 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
611 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
612 {
613   PC_ASM         *osm = (PC_ASM*)pc->data;
614   PetscErrorCode ierr;
615   PetscInt       i;
616 
617   PetscFunctionBegin;
618   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
619   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
620 
621   if (!pc->setupcalled) {
622     if (is) {
623       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
624     }
625     if (is_local) {
626       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
627     }
628     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
629 
630     osm->n_local_true = n;
631     osm->is           = 0;
632     osm->is_local     = 0;
633     if (is) {
634       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
635       for (i=0; i<n; i++) osm->is[i] = is[i];
636       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
637       osm->overlap = -1;
638     }
639     if (is_local) {
640       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
641       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
642       if (!is) {
643         ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is);CHKERRQ(ierr);
644         for (i=0; i<osm->n_local_true; i++) {
645           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
646             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
647             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
648           } else {
649             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
650             osm->is[i] = osm->is_local[i];
651           }
652         }
653       }
654     }
655   }
656   PetscFunctionReturn(0);
657 }
658 
659 #undef __FUNCT__
660 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
661 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
662 {
663   PC_ASM         *osm = (PC_ASM*)pc->data;
664   PetscErrorCode ierr;
665   PetscMPIInt    rank,size;
666   PetscInt       n;
667 
668   PetscFunctionBegin;
669   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
670   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
671 
672   /*
673      Split the subdomains equally among all processors
674   */
675   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
676   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
677   n    = N/size + ((N % size) > rank);
678   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
679   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
680   if (!pc->setupcalled) {
681     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
682 
683     osm->n_local_true = n;
684     osm->is           = 0;
685     osm->is_local     = 0;
686   }
687   PetscFunctionReturn(0);
688 }
689 
690 #undef __FUNCT__
691 #define __FUNCT__ "PCASMSetOverlap_ASM"
692 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
693 {
694   PC_ASM *osm = (PC_ASM*)pc->data;
695 
696   PetscFunctionBegin;
697   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
698   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
699   if (!pc->setupcalled) osm->overlap = ovl;
700   PetscFunctionReturn(0);
701 }
702 
703 #undef __FUNCT__
704 #define __FUNCT__ "PCASMSetType_ASM"
705 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
706 {
707   PC_ASM *osm = (PC_ASM*)pc->data;
708 
709   PetscFunctionBegin;
710   osm->type     = type;
711   osm->type_set = PETSC_TRUE;
712   PetscFunctionReturn(0);
713 }
714 
715 #undef __FUNCT__
716 #define __FUNCT__ "PCASMSetSortIndices_ASM"
717 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
718 {
719   PC_ASM *osm = (PC_ASM*)pc->data;
720 
721   PetscFunctionBegin;
722   osm->sort_indices = doSort;
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "PCASMGetSubKSP_ASM"
728 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
729 {
730   PC_ASM         *osm = (PC_ASM*)pc->data;
731   PetscErrorCode ierr;
732 
733   PetscFunctionBegin;
734   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
735 
736   if (n_local) *n_local = osm->n_local_true;
737   if (first_local) {
738     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
739     *first_local -= osm->n_local_true;
740   }
741   if (ksp) {
742     /* Assume that local solves are now different; not necessarily
743        true though!  This flag is used only for PCView_ASM() */
744     *ksp                   = osm->ksp;
745     osm->same_local_solves = PETSC_FALSE;
746   }
747   PetscFunctionReturn(0);
748 }
749 
750 #undef __FUNCT__
751 #define __FUNCT__ "PCASMSetLocalSubdomains"
752 /*@C
753     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
754 
755     Collective on PC
756 
757     Input Parameters:
758 +   pc - the preconditioner context
759 .   n - the number of subdomains for this processor (default value = 1)
760 .   is - the index set that defines the subdomains for this processor
761          (or NULL for PETSc to determine subdomains)
762 -   is_local - the index sets that define the local part of the subdomains for this processor
763          (or NULL to use the default of 1 subdomain per process)
764 
765     Notes:
766     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
767 
768     By default the ASM preconditioner uses 1 block per processor.
769 
770     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
771 
772     Level: advanced
773 
774 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
775 
776 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
777           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
778 @*/
779 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
785   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 #undef __FUNCT__
790 #define __FUNCT__ "PCASMSetTotalSubdomains"
791 /*@C
792     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
793     additive Schwarz preconditioner.  Either all or no processors in the
794     PC communicator must call this routine, with the same index sets.
795 
796     Collective on PC
797 
798     Input Parameters:
799 +   pc - the preconditioner context
800 .   N  - the number of subdomains for all processors
801 .   is - the index sets that define the subdomains for all processors
802          (or NULL to ask PETSc to compe up with subdomains)
803 -   is_local - the index sets that define the local part of the subdomains for this processor
804          (or NULL to use the default of 1 subdomain per process)
805 
806     Options Database Key:
807     To set the total number of subdomain blocks rather than specify the
808     index sets, use the option
809 .    -pc_asm_blocks <blks> - Sets total blocks
810 
811     Notes:
812     Currently you cannot use this to set the actual subdomains with the argument is.
813 
814     By default the ASM preconditioner uses 1 block per processor.
815 
816     These index sets cannot be destroyed until after completion of the
817     linear solves for which the ASM preconditioner is being used.
818 
819     Use PCASMSetLocalSubdomains() to set local subdomains.
820 
821     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
822 
823     Level: advanced
824 
825 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
826 
827 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
828           PCASMCreateSubdomains2D()
829 @*/
830 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
831 {
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
836   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "PCASMSetOverlap"
842 /*@
843     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
844     additive Schwarz preconditioner.  Either all or no processors in the
845     PC communicator must call this routine.
846 
847     Logically Collective on PC
848 
849     Input Parameters:
850 +   pc  - the preconditioner context
851 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
852 
853     Options Database Key:
854 .   -pc_asm_overlap <ovl> - Sets overlap
855 
856     Notes:
857     By default the ASM preconditioner uses 1 block per processor.  To use
858     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
859     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
860 
861     The overlap defaults to 1, so if one desires that no additional
862     overlap be computed beyond what may have been set with a call to
863     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
864     must be set to be 0.  In particular, if one does not explicitly set
865     the subdomains an application code, then all overlap would be computed
866     internally by PETSc, and using an overlap of 0 would result in an ASM
867     variant that is equivalent to the block Jacobi preconditioner.
868 
869     Note that one can define initial index sets with any overlap via
870     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
871     PCASMSetOverlap() merely allows PETSc to extend that overlap further
872     if desired.
873 
874     Level: intermediate
875 
876 .keywords: PC, ASM, set, overlap
877 
878 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
879           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
880 @*/
881 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
882 {
883   PetscErrorCode ierr;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
887   PetscValidLogicalCollectiveInt(pc,ovl,2);
888   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "PCASMSetType"
894 /*@
895     PCASMSetType - Sets the type of restriction and interpolation used
896     for local problems in the additive Schwarz method.
897 
898     Logically Collective on PC
899 
900     Input Parameters:
901 +   pc  - the preconditioner context
902 -   type - variant of ASM, one of
903 .vb
904       PC_ASM_BASIC       - full interpolation and restriction
905       PC_ASM_RESTRICT    - full restriction, local processor interpolation
906       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
907       PC_ASM_NONE        - local processor restriction and interpolation
908 .ve
909 
910     Options Database Key:
911 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
912 
913     Level: intermediate
914 
915 .keywords: PC, ASM, set, type
916 
917 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
918           PCASMCreateSubdomains2D()
919 @*/
920 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
921 {
922   PetscErrorCode ierr;
923 
924   PetscFunctionBegin;
925   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
926   PetscValidLogicalCollectiveEnum(pc,type,2);
927   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
928   PetscFunctionReturn(0);
929 }
930 
931 #undef __FUNCT__
932 #define __FUNCT__ "PCASMSetSortIndices"
933 /*@
934     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
935 
936     Logically Collective on PC
937 
938     Input Parameters:
939 +   pc  - the preconditioner context
940 -   doSort - sort the subdomain indices
941 
942     Level: intermediate
943 
944 .keywords: PC, ASM, set, type
945 
946 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
947           PCASMCreateSubdomains2D()
948 @*/
949 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
950 {
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
955   PetscValidLogicalCollectiveBool(pc,doSort,2);
956   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 #undef __FUNCT__
961 #define __FUNCT__ "PCASMGetSubKSP"
962 /*@C
963    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
964    this processor.
965 
966    Collective on PC iff first_local is requested
967 
968    Input Parameter:
969 .  pc - the preconditioner context
970 
971    Output Parameters:
972 +  n_local - the number of blocks on this processor or NULL
973 .  first_local - the global number of the first block on this processor or NULL,
974                  all processors must request or all must pass NULL
975 -  ksp - the array of KSP contexts
976 
977    Note:
978    After PCASMGetSubKSP() the array of KSPes is not to be freed.
979 
980    Currently for some matrix implementations only 1 block per processor
981    is supported.
982 
983    You must call KSPSetUp() before calling PCASMGetSubKSP().
984 
985    Fortran note:
986    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
987 
988    Level: advanced
989 
990 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
991 
992 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
993           PCASMCreateSubdomains2D(),
994 @*/
995 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
996 {
997   PetscErrorCode ierr;
998 
999   PetscFunctionBegin;
1000   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1001   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 /* -------------------------------------------------------------------------------------*/
1006 /*MC
1007    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1008            its own KSP object.
1009 
1010    Options Database Keys:
1011 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
1012 .  -pc_asm_blocks <blks> - Sets total blocks
1013 .  -pc_asm_overlap <ovl> - Sets overlap
1014 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1015 
1016      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1017       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1018       -pc_asm_type basic to use the standard ASM.
1019 
1020    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1021      than one processor. Defaults to one block per processor.
1022 
1023      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1024         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1025 
1026      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1027          and set the options directly on the resulting KSP object (you can access its PC
1028          with KSPGetPC())
1029 
1030 
1031    Level: beginner
1032 
1033    Concepts: additive Schwarz method
1034 
1035     References:
1036     An additive variant of the Schwarz alternating method for the case of many subregions
1037     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1038 
1039     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1040     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1041 
1042 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1043            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1044            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1045 
1046 M*/
1047 
1048 #undef __FUNCT__
1049 #define __FUNCT__ "PCCreate_ASM"
1050 PETSC_EXTERN_C PetscErrorCode  PCCreate_ASM(PC pc)
1051 {
1052   PetscErrorCode ierr;
1053   PC_ASM         *osm;
1054 
1055   PetscFunctionBegin;
1056   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
1057 
1058   osm->n                 = PETSC_DECIDE;
1059   osm->n_local           = 0;
1060   osm->n_local_true      = PETSC_DECIDE;
1061   osm->overlap           = 1;
1062   osm->ksp               = 0;
1063   osm->restriction       = 0;
1064   osm->localization      = 0;
1065   osm->prolongation      = 0;
1066   osm->x                 = 0;
1067   osm->y                 = 0;
1068   osm->y_local           = 0;
1069   osm->is                = 0;
1070   osm->is_local          = 0;
1071   osm->mat               = 0;
1072   osm->pmat              = 0;
1073   osm->type              = PC_ASM_RESTRICT;
1074   osm->same_local_solves = PETSC_TRUE;
1075   osm->sort_indices      = PETSC_TRUE;
1076   osm->dm_subdomains     = PETSC_FALSE;
1077 
1078   pc->data                 = (void*)osm;
1079   pc->ops->apply           = PCApply_ASM;
1080   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1081   pc->ops->setup           = PCSetUp_ASM;
1082   pc->ops->reset           = PCReset_ASM;
1083   pc->ops->destroy         = PCDestroy_ASM;
1084   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1085   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1086   pc->ops->view            = PCView_ASM;
1087   pc->ops->applyrichardson = 0;
1088 
1089   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1090   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1091   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1092   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",PCASMSetType_ASM);CHKERRQ(ierr);
1093   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1094   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 #undef __FUNCT__
1099 #define __FUNCT__ "PCASMCreateSubdomains"
1100 /*@C
1101    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1102    preconditioner for a any problem on a general grid.
1103 
1104    Collective
1105 
1106    Input Parameters:
1107 +  A - The global matrix operator
1108 -  n - the number of local blocks
1109 
1110    Output Parameters:
1111 .  outis - the array of index sets defining the subdomains
1112 
1113    Level: advanced
1114 
1115    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1116     from these if you use PCASMSetLocalSubdomains()
1117 
1118     In the Fortran version you must provide the array outis[] already allocated of length n.
1119 
1120 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1121 
1122 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1123 @*/
1124 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1125 {
1126   MatPartitioning mpart;
1127   const char      *prefix;
1128   PetscErrorCode  (*f)(Mat,Mat*);
1129   PetscMPIInt     size;
1130   PetscInt        i,j,rstart,rend,bs;
1131   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1132   Mat             Ad     = NULL, adj;
1133   IS              ispart,isnumb,*is;
1134   PetscErrorCode  ierr;
1135 
1136   PetscFunctionBegin;
1137   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1138   PetscValidPointer(outis,3);
1139   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1140 
1141   /* Get prefix, row distribution, and block size */
1142   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1143   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1144   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1145   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1146 
1147   /* Get diagonal block from matrix if possible */
1148   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1149   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**) (void))&f);CHKERRQ(ierr);
1150   if (f) {
1151     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1152   } else if (size == 1) {
1153     Ad = A;
1154   }
1155   if (Ad) {
1156     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1157     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1158   }
1159   if (Ad && n > 1) {
1160     PetscBool match,done;
1161     /* Try to setup a good matrix partitioning if available */
1162     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1163     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1164     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1165     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1166     if (!match) {
1167       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1168     }
1169     if (!match) { /* assume a "good" partitioner is available */
1170       PetscInt       na;
1171       const PetscInt *ia,*ja;
1172       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1173       if (done) {
1174         /* Build adjacency matrix by hand. Unfortunately a call to
1175            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1176            remove the block-aij structure and we cannot expect
1177            MatPartitioning to split vertices as we need */
1178         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1179         const PetscInt *row;
1180         nnz = 0;
1181         for (i=0; i<na; i++) { /* count number of nonzeros */
1182           len = ia[i+1] - ia[i];
1183           row = ja + ia[i];
1184           for (j=0; j<len; j++) {
1185             if (row[j] == i) { /* don't count diagonal */
1186               len--; break;
1187             }
1188           }
1189           nnz += len;
1190         }
1191         ierr   = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1192         ierr   = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1193         nnz    = 0;
1194         iia[0] = 0;
1195         for (i=0; i<na; i++) { /* fill adjacency */
1196           cnt = 0;
1197           len = ia[i+1] - ia[i];
1198           row = ja + ia[i];
1199           for (j=0; j<len; j++) {
1200             if (row[j] != i) { /* if not diagonal */
1201               jja[nnz+cnt++] = row[j];
1202             }
1203           }
1204           nnz     += cnt;
1205           iia[i+1] = nnz;
1206         }
1207         /* Partitioning of the adjacency matrix */
1208         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1209         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1210         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1211         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1212         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1213         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1214         foundpart = PETSC_TRUE;
1215       }
1216       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1217     }
1218     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1219   }
1220 
1221   ierr   = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1222   *outis = is;
1223 
1224   if (!foundpart) {
1225 
1226     /* Partitioning by contiguous chunks of rows */
1227 
1228     PetscInt mbs   = (rend-rstart)/bs;
1229     PetscInt start = rstart;
1230     for (i=0; i<n; i++) {
1231       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1232       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1233       start += count;
1234     }
1235 
1236   } else {
1237 
1238     /* Partitioning by adjacency of diagonal block  */
1239 
1240     const PetscInt *numbering;
1241     PetscInt       *count,nidx,*indices,*newidx,start=0;
1242     /* Get node count in each partition */
1243     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1244     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1245     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1246       for (i=0; i<n; i++) count[i] *= bs;
1247     }
1248     /* Build indices from node numbering */
1249     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1250     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1251     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1252     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1253     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1254     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1255     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1256       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1257       for (i=0; i<nidx; i++) {
1258         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1259       }
1260       ierr    = PetscFree(indices);CHKERRQ(ierr);
1261       nidx   *= bs;
1262       indices = newidx;
1263     }
1264     /* Shift to get global indices */
1265     for (i=0; i<nidx; i++) indices[i] += rstart;
1266 
1267     /* Build the index sets for each block */
1268     for (i=0; i<n; i++) {
1269       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1270       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1271       start += count[i];
1272     }
1273 
1274     ierr = PetscFree(count);CHKERRQ(ierr);
1275     ierr = PetscFree(indices);CHKERRQ(ierr);
1276     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1277     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1278 
1279   }
1280   PetscFunctionReturn(0);
1281 }
1282 
1283 #undef __FUNCT__
1284 #define __FUNCT__ "PCASMDestroySubdomains"
1285 /*@C
1286    PCASMDestroySubdomains - Destroys the index sets created with
1287    PCASMCreateSubdomains(). Should be called after setting subdomains
1288    with PCASMSetLocalSubdomains().
1289 
1290    Collective
1291 
1292    Input Parameters:
1293 +  n - the number of index sets
1294 .  is - the array of index sets
1295 -  is_local - the array of local index sets, can be NULL
1296 
1297    Level: advanced
1298 
1299 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1300 
1301 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1302 @*/
1303 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1304 {
1305   PetscInt       i;
1306   PetscErrorCode ierr;
1307 
1308   PetscFunctionBegin;
1309   if (n <= 0) PetscFunctionReturn(0);
1310   if (is) {
1311     PetscValidPointer(is,2);
1312     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1313     ierr = PetscFree(is);CHKERRQ(ierr);
1314   }
1315   if (is_local) {
1316     PetscValidPointer(is_local,3);
1317     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1318     ierr = PetscFree(is_local);CHKERRQ(ierr);
1319   }
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 #undef __FUNCT__
1324 #define __FUNCT__ "PCASMCreateSubdomains2D"
1325 /*@
1326    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1327    preconditioner for a two-dimensional problem on a regular grid.
1328 
1329    Not Collective
1330 
1331    Input Parameters:
1332 +  m, n - the number of mesh points in the x and y directions
1333 .  M, N - the number of subdomains in the x and y directions
1334 .  dof - degrees of freedom per node
1335 -  overlap - overlap in mesh lines
1336 
1337    Output Parameters:
1338 +  Nsub - the number of subdomains created
1339 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1340 -  is_local - array of index sets defining non-overlapping subdomains
1341 
1342    Note:
1343    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1344    preconditioners.  More general related routines are
1345    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1346 
1347    Level: advanced
1348 
1349 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1350 
1351 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1352           PCASMSetOverlap()
1353 @*/
1354 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1355 {
1356   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1357   PetscErrorCode ierr;
1358   PetscInt       nidx,*idx,loc,ii,jj,count;
1359 
1360   PetscFunctionBegin;
1361   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1362 
1363   *Nsub     = N*M;
1364   ierr      = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1365   ierr      = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1366   ystart    = 0;
1367   loc_outer = 0;
1368   for (i=0; i<N; i++) {
1369     height = n/N + ((n % N) > i); /* height of subdomain */
1370     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1371     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1372     yright = ystart + height + overlap; if (yright > n) yright = n;
1373     xstart = 0;
1374     for (j=0; j<M; j++) {
1375       width = m/M + ((m % M) > j); /* width of subdomain */
1376       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1377       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1378       xright = xstart + width + overlap; if (xright > m) xright = m;
1379       nidx   = (xright - xleft)*(yright - yleft);
1380       ierr   = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1381       loc    = 0;
1382       for (ii=yleft; ii<yright; ii++) {
1383         count = m*ii + xleft;
1384         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1385       }
1386       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1387       if (overlap == 0) {
1388         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1389 
1390         (*is_local)[loc_outer] = (*is)[loc_outer];
1391       } else {
1392         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1393           for (jj=xstart; jj<xstart+width; jj++) {
1394             idx[loc++] = m*ii + jj;
1395           }
1396         }
1397         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1398       }
1399       ierr    = PetscFree(idx);CHKERRQ(ierr);
1400       xstart += width;
1401       loc_outer++;
1402     }
1403     ystart += height;
1404   }
1405   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 #undef __FUNCT__
1410 #define __FUNCT__ "PCASMGetLocalSubdomains"
1411 /*@C
1412     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1413     only) for the additive Schwarz preconditioner.
1414 
1415     Not Collective
1416 
1417     Input Parameter:
1418 .   pc - the preconditioner context
1419 
1420     Output Parameters:
1421 +   n - the number of subdomains for this processor (default value = 1)
1422 .   is - the index sets that define the subdomains for this processor
1423 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1424 
1425 
1426     Notes:
1427     The IS numbering is in the parallel, global numbering of the vector.
1428 
1429     Level: advanced
1430 
1431 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1432 
1433 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1434           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1435 @*/
1436 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1437 {
1438   PC_ASM         *osm;
1439   PetscErrorCode ierr;
1440   PetscBool      match;
1441 
1442   PetscFunctionBegin;
1443   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1444   PetscValidIntPointer(n,2);
1445   if (is) PetscValidPointer(is,3);
1446   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1447   if (!match) {
1448     if (n) *n = 0;
1449     if (is) *is = NULL;
1450   } else {
1451     osm = (PC_ASM*)pc->data;
1452     if (n) *n = osm->n_local_true;
1453     if (is) *is = osm->is;
1454     if (is_local) *is_local = osm->is_local;
1455   }
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 #undef __FUNCT__
1460 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1461 /*@C
1462     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1463     only) for the additive Schwarz preconditioner.
1464 
1465     Not Collective
1466 
1467     Input Parameter:
1468 .   pc - the preconditioner context
1469 
1470     Output Parameters:
1471 +   n - the number of matrices for this processor (default value = 1)
1472 -   mat - the matrices
1473 
1474 
1475     Level: advanced
1476 
1477     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1478 
1479            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1480 
1481 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1482 
1483 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1484           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1485 @*/
1486 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1487 {
1488   PC_ASM         *osm;
1489   PetscErrorCode ierr;
1490   PetscBool      match;
1491 
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1494   PetscValidIntPointer(n,2);
1495   if (mat) PetscValidPointer(mat,3);
1496   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1497   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1498   if (!match) {
1499     if (n) *n = 0;
1500     if (mat) *mat = NULL;
1501   } else {
1502     osm = (PC_ASM*)pc->data;
1503     if (n) *n = osm->n_local_true;
1504     if (mat) *mat = osm->pmat;
1505   }
1506   PetscFunctionReturn(0);
1507 }
1508 
1509 #undef __FUNCT__
1510 #define __FUNCT__ "PCASMSetDMSubdomains"
1511 /*@
1512     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1513     Logically Collective
1514 
1515     Input Parameter:
1516 +   pc  - the preconditioner
1517 -   flg - boolean indicating whether to use subdomains defined by the DM
1518 
1519     Options Database Key:
1520 .   -pc_asm_dm_subdomains
1521 
1522     Level: intermediate
1523 
1524     Notes:
1525     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1526     so setting either of the first two effectively turns the latter off.
1527 
1528 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1529 
1530 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1531           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1532 @*/
1533 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1534 {
1535   PC_ASM         *osm = (PC_ASM*)pc->data;
1536   PetscErrorCode ierr;
1537   PetscBool      match;
1538 
1539   PetscFunctionBegin;
1540   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1541   PetscValidLogicalCollectiveBool(pc,flg,2);
1542   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1543   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1544   if (match) {
1545     osm->dm_subdomains = flg;
1546   }
1547   PetscFunctionReturn(0);
1548 }
1549 
1550 #undef __FUNCT__
1551 #define __FUNCT__ "PCASMGetDMSubdomains"
1552 /*@
1553     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1554     Not Collective
1555 
1556     Input Parameter:
1557 .   pc  - the preconditioner
1558 
1559     Output Parameter:
1560 .   flg - boolean indicating whether to use subdomains defined by the DM
1561 
1562     Level: intermediate
1563 
1564 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1565 
1566 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1567           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1568 @*/
1569 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1570 {
1571   PC_ASM         *osm = (PC_ASM*)pc->data;
1572   PetscErrorCode ierr;
1573   PetscBool      match;
1574 
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1577   PetscValidPointer(flg,2);
1578   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1579   if (match) {
1580     if (flg) *flg = osm->dm_subdomains;
1581   }
1582   PetscFunctionReturn(0);
1583 }
1584