xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 680c5173fc5ccf4352d0ebdebcd816da6a3f83ed)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   /* For multiplicative solve */
34   Mat       *lmat;                /* submatrix for overlapping multiplicative (process) subdomain */
35   Vec        lx, ly;              /* work vectors */
36   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "PCView_ASM"
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 #undef __FUNCT__
101 #define __FUNCT__ "PCASMPrintSubdomains"
102 static PetscErrorCode PCASMPrintSubdomains(PC pc)
103 {
104   PC_ASM         *osm = (PC_ASM*)pc->data;
105   const char     *prefix;
106   char           fname[PETSC_MAX_PATH_LEN+1];
107   PetscViewer    viewer, sviewer;
108   char           *s;
109   PetscInt       i,j,nidx;
110   const PetscInt *idx;
111   PetscMPIInt    rank, size;
112   PetscErrorCode ierr;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
116   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
117   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
118   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
119   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
120   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
121   for (i=0; i<osm->n_local; i++) {
122     if (i < osm->n_local_true) {
123       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
125       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
126       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
127       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
128       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
129       for (j=0; j<nidx; j++) {
130         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
131       }
132       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
133       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
134       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
137       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
138       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
139       ierr = PetscFree(s);CHKERRQ(ierr);
140       if (osm->is_local) {
141         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
142         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
144         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
145         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
146         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
147         for (j=0; j<nidx; j++) {
148           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
149         }
150         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
151         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
152         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
155         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
156         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
157         ierr = PetscFree(s);CHKERRQ(ierr);
158       }
159     } else {
160       /* Participate in collective viewer calls. */
161       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
162       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
164       /* Assume either all ranks have is_local or none do. */
165       if (osm->is_local) {
166         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
167         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
169       }
170     }
171   }
172   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
173   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "PCSetUp_ASM"
179 static PetscErrorCode PCSetUp_ASM(PC pc)
180 {
181   PC_ASM         *osm = (PC_ASM*)pc->data;
182   PetscErrorCode ierr;
183   PetscBool      symset,flg;
184   PetscInt       i,m,m_local;
185   MatReuse       scall = MAT_REUSE_MATRIX;
186   IS             isl;
187   KSP            ksp;
188   PC             subpc;
189   const char     *prefix,*pprefix;
190   Vec            vec;
191   DM             *domain_dm = NULL;
192 
193   PetscFunctionBegin;
194   if (!pc->setupcalled) {
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       inwork.max   = osm->n_local_true;
230       inwork.sum   = osm->n_local_true;
231       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
232       osm->n_local = outwork.max;
233       osm->n       = outwork.sum;
234     }
235     if (!osm->is) { /* create the index sets */
236       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
237     }
238     if (osm->n_local_true > 1 && !osm->is_local) {
239       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
240       for (i=0; i<osm->n_local_true; i++) {
241         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
242           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
243           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
244         } else {
245           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
246           osm->is_local[i] = osm->is[i];
247         }
248       }
249     }
250     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
251     flg  = PETSC_FALSE;
252     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
253     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
254 
255     if (osm->overlap > 0) {
256       /* Extend the "overlapping" regions by a number of steps */
257       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
258     }
259     if (osm->sort_indices) {
260       for (i=0; i<osm->n_local_true; i++) {
261         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
262         if (osm->is_local) {
263           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
264         }
265       }
266     }
267     /* Create the local work vectors and scatter contexts */
268     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
269     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
270     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
271     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
272     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
273     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
274     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
275     for (i=0; i<osm->n_local_true; ++i) {
276       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
277       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
278       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
279       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
280       ierr = ISDestroy(&isl);CHKERRQ(ierr);
281       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
282       if (osm->is_local) {
283         ISLocalToGlobalMapping ltog;
284         IS                     isll;
285         const PetscInt         *idx_local;
286         PetscInt               *idx,nout;
287 
288         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
289         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
290         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
291         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
292         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
293         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
294         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
295         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
296         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
297         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
298         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
299         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
300         ierr = ISDestroy(&isll);CHKERRQ(ierr);
301 
302         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
303         ierr = ISDestroy(&isl);CHKERRQ(ierr);
304       } else {
305         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
306 
307         osm->y_local[i] = osm->y[i];
308 
309         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
310 
311         osm->prolongation[i] = osm->restriction[i];
312 
313         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
314       }
315     }
316     for (i=osm->n_local_true; i<osm->n_local; i++) {
317       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
318       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
319       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
320       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
321       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
322       if (osm->is_local) {
323         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
324         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
325       } else {
326         osm->prolongation[i] = osm->restriction[i];
327         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
328       }
329       ierr = ISDestroy(&isl);CHKERRQ(ierr);
330     }
331     ierr = VecDestroy(&vec);CHKERRQ(ierr);
332 
333     if (!osm->ksp) {
334       /* Create the local solvers */
335       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
336       if (domain_dm) {
337         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
338       }
339       for (i=0; i<osm->n_local_true; i++) {
340         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
341         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
342         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
343         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
344         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
345         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
346         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
347         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
348         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
349         if (domain_dm) {
350           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
351           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
352           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
353         }
354         osm->ksp[i] = ksp;
355       }
356       if (domain_dm) {
357         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
358       }
359     }
360     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
361       PetscInt m;
362 
363       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
364       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
365       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
366       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
368     }
369     scall = MAT_INITIAL_MATRIX;
370   } else {
371     /*
372        Destroy the blocks from the previous iteration
373     */
374     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
375       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
376       scall = MAT_INITIAL_MATRIX;
377     }
378   }
379 
380   /*
381      Extract out the submatrices
382   */
383   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
384   if (scall == MAT_INITIAL_MATRIX) {
385     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
386     for (i=0; i<osm->n_local_true; i++) {
387       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
388       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
389     }
390   }
391   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
392     ierr = MatGetSubMatrices(pc->pmat, 1, &osm->lis, &osm->lis, scall, &osm->lmat);CHKERRQ(ierr);
393   }
394 
395   /* Return control to the user so that the submatrices can be modified (e.g., to apply
396      different boundary conditions for the submatrices than for the global problem) */
397   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
398 
399   /*
400      Loop over subdomains putting them into local ksp
401   */
402   for (i=0; i<osm->n_local_true; i++) {
403     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
404     if (!pc->setupcalled) {
405       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
406     }
407   }
408   PetscFunctionReturn(0);
409 }
410 
411 #include <petsc/private/kspimpl.h>
412 #undef __FUNCT__
413 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
414 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
415 {
416   PC_ASM         *osm = (PC_ASM*)pc->data;
417   PetscErrorCode ierr;
418   PetscInt       i;
419 
420   PetscFunctionBegin;
421   for (i=0; i<osm->n_local_true; i++) {
422     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
423     if (osm->ksp[i]->reason == KSP_DIVERGED_PCSETUP_FAILED) {
424       pc->failedreason = PC_SUBPC_ERROR;
425     }
426   }
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "PCApply_ASM"
432 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
433 {
434   PC_ASM         *osm = (PC_ASM*)pc->data;
435   PetscErrorCode ierr;
436   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
437   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
438 
439   PetscFunctionBegin;
440   /*
441      Support for limiting the restriction or interpolation to only local
442      subdomain values (leaving the other values 0).
443   */
444   if (!(osm->type & PC_ASM_RESTRICT)) {
445     forward = SCATTER_FORWARD_LOCAL;
446     /* have to zero the work RHS since scatter may leave some slots empty */
447     for (i=0; i<n_local_true; i++) {
448       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
449     }
450   }
451   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
452 
453   switch (osm->loctype)
454   {
455   case PC_COMPOSITE_ADDITIVE:
456     for (i=0; i<n_local; i++) {
457       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
458     }
459     ierr = VecZeroEntries(y);CHKERRQ(ierr);
460     /* do the local solves */
461     for (i=0; i<n_local_true; i++) {
462       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
463       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
464       if (osm->localization) {
465         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
466         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
467       }
468       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
469     }
470     /* handle the rest of the scatters that do not have local solves */
471     for (i=n_local_true; i<n_local; i++) {
472       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
473       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
474     }
475     for (i=0; i<n_local; i++) {
476       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
477     }
478     break;
479   case PC_COMPOSITE_MULTIPLICATIVE:
480     ierr = VecZeroEntries(y);CHKERRQ(ierr);
481     /* do the local solves */
482     for (i = 0; i < n_local_true; ++i) {
483       if (i > 0) {
484         /* Update rhs */
485         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
486         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
487       } else {
488         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
489       }
490       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
491       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
492       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
493       if (osm->localization) {
494         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
495         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
496       }
497       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
498       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
499       if (i < n_local_true-1) {
500         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
501         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
502         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
503         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
504         ierr = MatMult(osm->lmat[0], osm->ly, osm->lx);CHKERRQ(ierr);
505       }
506     }
507     /* handle the rest of the scatters that do not have local solves */
508     for (i = n_local_true; i < n_local; ++i) {
509       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
510       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
511       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
512       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
513     }
514     break;
515   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
516   }
517   PetscFunctionReturn(0);
518 }
519 
520 #undef __FUNCT__
521 #define __FUNCT__ "PCApplyTranspose_ASM"
522 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
523 {
524   PC_ASM         *osm = (PC_ASM*)pc->data;
525   PetscErrorCode ierr;
526   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
527   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
528 
529   PetscFunctionBegin;
530   /*
531      Support for limiting the restriction or interpolation to only local
532      subdomain values (leaving the other values 0).
533 
534      Note: these are reversed from the PCApply_ASM() because we are applying the
535      transpose of the three terms
536   */
537   if (!(osm->type & PC_ASM_INTERPOLATE)) {
538     forward = SCATTER_FORWARD_LOCAL;
539     /* have to zero the work RHS since scatter may leave some slots empty */
540     for (i=0; i<n_local_true; i++) {
541       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
542     }
543   }
544   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
545 
546   for (i=0; i<n_local; i++) {
547     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
548   }
549   ierr = VecZeroEntries(y);CHKERRQ(ierr);
550   /* do the local solves */
551   for (i=0; i<n_local_true; i++) {
552     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
553     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
554     if (osm->localization) {
555       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
556       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
557     }
558     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
559   }
560   /* handle the rest of the scatters that do not have local solves */
561   for (i=n_local_true; i<n_local; i++) {
562     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
563     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
564   }
565   for (i=0; i<n_local; i++) {
566     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
567   }
568   PetscFunctionReturn(0);
569 }
570 
571 #undef __FUNCT__
572 #define __FUNCT__ "PCReset_ASM"
573 static PetscErrorCode PCReset_ASM(PC pc)
574 {
575   PC_ASM         *osm = (PC_ASM*)pc->data;
576   PetscErrorCode ierr;
577   PetscInt       i;
578 
579   PetscFunctionBegin;
580   if (osm->ksp) {
581     for (i=0; i<osm->n_local_true; i++) {
582       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
583     }
584   }
585   if (osm->pmat) {
586     if (osm->n_local_true > 0) {
587       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
588     }
589   }
590   if (osm->restriction) {
591     for (i=0; i<osm->n_local; i++) {
592       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
593       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
594       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
595       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
596       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
597       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
598     }
599     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
600     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
601     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
602     ierr = PetscFree(osm->x);CHKERRQ(ierr);
603     ierr = PetscFree(osm->y);CHKERRQ(ierr);
604     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
605   }
606   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
607   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
608     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
609     ierr = MatDestroyMatrices(1, &osm->lmat);CHKERRQ(ierr);
610     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
611     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
612   }
613 
614   osm->is       = 0;
615   osm->is_local = 0;
616   PetscFunctionReturn(0);
617 }
618 
619 #undef __FUNCT__
620 #define __FUNCT__ "PCDestroy_ASM"
621 static PetscErrorCode PCDestroy_ASM(PC pc)
622 {
623   PC_ASM         *osm = (PC_ASM*)pc->data;
624   PetscErrorCode ierr;
625   PetscInt       i;
626 
627   PetscFunctionBegin;
628   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
629   if (osm->ksp) {
630     for (i=0; i<osm->n_local_true; i++) {
631       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
632     }
633     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
634   }
635   ierr = PetscFree(pc->data);CHKERRQ(ierr);
636   PetscFunctionReturn(0);
637 }
638 
639 #undef __FUNCT__
640 #define __FUNCT__ "PCSetFromOptions_ASM"
641 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
642 {
643   PC_ASM         *osm = (PC_ASM*)pc->data;
644   PetscErrorCode ierr;
645   PetscInt       blocks,ovl;
646   PetscBool      symset,flg;
647   PCASMType      asmtype;
648   PCCompositeType loctype;
649 
650   PetscFunctionBegin;
651   /* set the type to symmetric if matrix is symmetric */
652   if (!osm->type_set && pc->pmat) {
653     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
654     if (symset && flg) osm->type = PC_ASM_BASIC;
655   }
656   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
657   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
658   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
659   if (flg) {
660     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
661     osm->dm_subdomains = PETSC_FALSE;
662   }
663   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
664   if (flg) {
665     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
666     osm->dm_subdomains = PETSC_FALSE;
667   }
668   flg  = PETSC_FALSE;
669   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
670   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
671   flg  = PETSC_FALSE;
672   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
673   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
674   ierr = PetscOptionsTail();CHKERRQ(ierr);
675   PetscFunctionReturn(0);
676 }
677 
678 /*------------------------------------------------------------------------------------*/
679 
680 #undef __FUNCT__
681 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
682 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
683 {
684   PC_ASM         *osm = (PC_ASM*)pc->data;
685   PetscErrorCode ierr;
686   PetscInt       i;
687 
688   PetscFunctionBegin;
689   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
690   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
691 
692   if (!pc->setupcalled) {
693     if (is) {
694       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
695     }
696     if (is_local) {
697       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
698     }
699     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
700 
701     osm->n_local_true = n;
702     osm->is           = 0;
703     osm->is_local     = 0;
704     if (is) {
705       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
706       for (i=0; i<n; i++) osm->is[i] = is[i];
707       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
708       osm->overlap = -1;
709     }
710     if (is_local) {
711       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
712       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
713       if (!is) {
714         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
715         for (i=0; i<osm->n_local_true; i++) {
716           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
717             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
718             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
719           } else {
720             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
721             osm->is[i] = osm->is_local[i];
722           }
723         }
724       }
725     }
726   }
727   PetscFunctionReturn(0);
728 }
729 
730 #undef __FUNCT__
731 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
732 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
733 {
734   PC_ASM         *osm = (PC_ASM*)pc->data;
735   PetscErrorCode ierr;
736   PetscMPIInt    rank,size;
737   PetscInt       n;
738 
739   PetscFunctionBegin;
740   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
741   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
742 
743   /*
744      Split the subdomains equally among all processors
745   */
746   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
747   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
748   n    = N/size + ((N % size) > rank);
749   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
750   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
751   if (!pc->setupcalled) {
752     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
753 
754     osm->n_local_true = n;
755     osm->is           = 0;
756     osm->is_local     = 0;
757   }
758   PetscFunctionReturn(0);
759 }
760 
761 #undef __FUNCT__
762 #define __FUNCT__ "PCASMSetOverlap_ASM"
763 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
764 {
765   PC_ASM *osm = (PC_ASM*)pc->data;
766 
767   PetscFunctionBegin;
768   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
769   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
770   if (!pc->setupcalled) osm->overlap = ovl;
771   PetscFunctionReturn(0);
772 }
773 
774 #undef __FUNCT__
775 #define __FUNCT__ "PCASMSetType_ASM"
776 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
777 {
778   PC_ASM *osm = (PC_ASM*)pc->data;
779 
780   PetscFunctionBegin;
781   osm->type     = type;
782   osm->type_set = PETSC_TRUE;
783   PetscFunctionReturn(0);
784 }
785 
786 #undef __FUNCT__
787 #define __FUNCT__ "PCASMGetType_ASM"
788 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
789 {
790   PC_ASM *osm = (PC_ASM*)pc->data;
791 
792   PetscFunctionBegin;
793   *type = osm->type;
794   PetscFunctionReturn(0);
795 }
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "PCASMSetLocalType_ASM"
799 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
800 {
801   PC_ASM *osm = (PC_ASM *) pc->data;
802 
803   PetscFunctionBegin;
804   osm->loctype = type;
805   PetscFunctionReturn(0);
806 }
807 
808 #undef __FUNCT__
809 #define __FUNCT__ "PCASMGetLocalType_ASM"
810 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
811 {
812   PC_ASM *osm = (PC_ASM *) pc->data;
813 
814   PetscFunctionBegin;
815   *type = osm->loctype;
816   PetscFunctionReturn(0);
817 }
818 
819 #undef __FUNCT__
820 #define __FUNCT__ "PCASMSetSortIndices_ASM"
821 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
822 {
823   PC_ASM *osm = (PC_ASM*)pc->data;
824 
825   PetscFunctionBegin;
826   osm->sort_indices = doSort;
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "PCASMGetSubKSP_ASM"
832 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
833 {
834   PC_ASM         *osm = (PC_ASM*)pc->data;
835   PetscErrorCode ierr;
836 
837   PetscFunctionBegin;
838   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
839 
840   if (n_local) *n_local = osm->n_local_true;
841   if (first_local) {
842     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
843     *first_local -= osm->n_local_true;
844   }
845   if (ksp) {
846     /* Assume that local solves are now different; not necessarily
847        true though!  This flag is used only for PCView_ASM() */
848     *ksp                   = osm->ksp;
849     osm->same_local_solves = PETSC_FALSE;
850   }
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "PCASMSetLocalSubdomains"
856 /*@C
857     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
858 
859     Collective on PC
860 
861     Input Parameters:
862 +   pc - the preconditioner context
863 .   n - the number of subdomains for this processor (default value = 1)
864 .   is - the index set that defines the subdomains for this processor
865          (or NULL for PETSc to determine subdomains)
866 -   is_local - the index sets that define the local part of the subdomains for this processor
867          (or NULL to use the default of 1 subdomain per process)
868 
869     Notes:
870     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
871 
872     By default the ASM preconditioner uses 1 block per processor.
873 
874     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
875 
876     Level: advanced
877 
878 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
879 
880 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
881           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
882 @*/
883 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
884 {
885   PetscErrorCode ierr;
886 
887   PetscFunctionBegin;
888   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
889   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "PCASMSetTotalSubdomains"
895 /*@C
896     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
897     additive Schwarz preconditioner.  Either all or no processors in the
898     PC communicator must call this routine, with the same index sets.
899 
900     Collective on PC
901 
902     Input Parameters:
903 +   pc - the preconditioner context
904 .   N  - the number of subdomains for all processors
905 .   is - the index sets that define the subdomains for all processors
906          (or NULL to ask PETSc to compe up with subdomains)
907 -   is_local - the index sets that define the local part of the subdomains for this processor
908          (or NULL to use the default of 1 subdomain per process)
909 
910     Options Database Key:
911     To set the total number of subdomain blocks rather than specify the
912     index sets, use the option
913 .    -pc_asm_blocks <blks> - Sets total blocks
914 
915     Notes:
916     Currently you cannot use this to set the actual subdomains with the argument is.
917 
918     By default the ASM preconditioner uses 1 block per processor.
919 
920     These index sets cannot be destroyed until after completion of the
921     linear solves for which the ASM preconditioner is being used.
922 
923     Use PCASMSetLocalSubdomains() to set local subdomains.
924 
925     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
926 
927     Level: advanced
928 
929 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
930 
931 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
932           PCASMCreateSubdomains2D()
933 @*/
934 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
935 {
936   PetscErrorCode ierr;
937 
938   PetscFunctionBegin;
939   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
940   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
941   PetscFunctionReturn(0);
942 }
943 
944 #undef __FUNCT__
945 #define __FUNCT__ "PCASMSetOverlap"
946 /*@
947     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
948     additive Schwarz preconditioner.  Either all or no processors in the
949     PC communicator must call this routine. If MatIncreaseOverlap is used,
950     use option -mat_increase_overlap when the problem size large.
951 
952     Logically Collective on PC
953 
954     Input Parameters:
955 +   pc  - the preconditioner context
956 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
957 
958     Options Database Key:
959 .   -pc_asm_overlap <ovl> - Sets overlap
960 
961     Notes:
962     By default the ASM preconditioner uses 1 block per processor.  To use
963     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
964     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
965 
966     The overlap defaults to 1, so if one desires that no additional
967     overlap be computed beyond what may have been set with a call to
968     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
969     must be set to be 0.  In particular, if one does not explicitly set
970     the subdomains an application code, then all overlap would be computed
971     internally by PETSc, and using an overlap of 0 would result in an ASM
972     variant that is equivalent to the block Jacobi preconditioner.
973 
974     Note that one can define initial index sets with any overlap via
975     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
976     PCASMSetOverlap() merely allows PETSc to extend that overlap further
977     if desired.
978 
979     Level: intermediate
980 
981 .keywords: PC, ASM, set, overlap
982 
983 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
984           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
985 @*/
986 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
987 {
988   PetscErrorCode ierr;
989 
990   PetscFunctionBegin;
991   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
992   PetscValidLogicalCollectiveInt(pc,ovl,2);
993   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
994   PetscFunctionReturn(0);
995 }
996 
997 #undef __FUNCT__
998 #define __FUNCT__ "PCASMSetType"
999 /*@
1000     PCASMSetType - Sets the type of restriction and interpolation used
1001     for local problems in the additive Schwarz method.
1002 
1003     Logically Collective on PC
1004 
1005     Input Parameters:
1006 +   pc  - the preconditioner context
1007 -   type - variant of ASM, one of
1008 .vb
1009       PC_ASM_BASIC       - full interpolation and restriction
1010       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1011       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1012       PC_ASM_NONE        - local processor restriction and interpolation
1013 .ve
1014 
1015     Options Database Key:
1016 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1017 
1018     Level: intermediate
1019 
1020 .keywords: PC, ASM, set, type
1021 
1022 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1023           PCASMCreateSubdomains2D()
1024 @*/
1025 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1026 {
1027   PetscErrorCode ierr;
1028 
1029   PetscFunctionBegin;
1030   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1031   PetscValidLogicalCollectiveEnum(pc,type,2);
1032   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1033   PetscFunctionReturn(0);
1034 }
1035 
1036 #undef __FUNCT__
1037 #define __FUNCT__ "PCASMGetType"
1038 /*@
1039     PCASMGetType - Gets the type of restriction and interpolation used
1040     for local problems in the additive Schwarz method.
1041 
1042     Logically Collective on PC
1043 
1044     Input Parameter:
1045 .   pc  - the preconditioner context
1046 
1047     Output Parameter:
1048 .   type - variant of ASM, one of
1049 
1050 .vb
1051       PC_ASM_BASIC       - full interpolation and restriction
1052       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1053       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1054       PC_ASM_NONE        - local processor restriction and interpolation
1055 .ve
1056 
1057     Options Database Key:
1058 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1059 
1060     Level: intermediate
1061 
1062 .keywords: PC, ASM, set, type
1063 
1064 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1065           PCASMCreateSubdomains2D()
1066 @*/
1067 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1068 {
1069   PetscErrorCode ierr;
1070 
1071   PetscFunctionBegin;
1072   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1073   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 #undef __FUNCT__
1078 #define __FUNCT__ "PCASMSetLocalType"
1079 /*@
1080   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1081 
1082   Logically Collective on PC
1083 
1084   Input Parameters:
1085 + pc  - the preconditioner context
1086 - type - type of composition, one of
1087 .vb
1088   PC_COMPOSITE_ADDITIVE       - local additive combination
1089   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1090 .ve
1091 
1092   Options Database Key:
1093 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1094 
1095   Level: intermediate
1096 
1097 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1098 @*/
1099 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1100 {
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1105   PetscValidLogicalCollectiveEnum(pc, type, 2);
1106   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1107   PetscFunctionReturn(0);
1108 }
1109 
1110 #undef __FUNCT__
1111 #define __FUNCT__ "PCASMGetLocalType"
1112 /*@
1113   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1114 
1115   Logically Collective on PC
1116 
1117   Input Parameter:
1118 . pc  - the preconditioner context
1119 
1120   Output Parameter:
1121 . type - type of composition, one of
1122 .vb
1123   PC_COMPOSITE_ADDITIVE       - local additive combination
1124   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1125 .ve
1126 
1127   Options Database Key:
1128 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1129 
1130   Level: intermediate
1131 
1132 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1133 @*/
1134 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1135 {
1136   PetscErrorCode ierr;
1137 
1138   PetscFunctionBegin;
1139   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1140   PetscValidPointer(type, 2);
1141   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 #undef __FUNCT__
1146 #define __FUNCT__ "PCASMSetSortIndices"
1147 /*@
1148     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1149 
1150     Logically Collective on PC
1151 
1152     Input Parameters:
1153 +   pc  - the preconditioner context
1154 -   doSort - sort the subdomain indices
1155 
1156     Level: intermediate
1157 
1158 .keywords: PC, ASM, set, type
1159 
1160 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1161           PCASMCreateSubdomains2D()
1162 @*/
1163 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1164 {
1165   PetscErrorCode ierr;
1166 
1167   PetscFunctionBegin;
1168   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1169   PetscValidLogicalCollectiveBool(pc,doSort,2);
1170   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "PCASMGetSubKSP"
1176 /*@C
1177    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1178    this processor.
1179 
1180    Collective on PC iff first_local is requested
1181 
1182    Input Parameter:
1183 .  pc - the preconditioner context
1184 
1185    Output Parameters:
1186 +  n_local - the number of blocks on this processor or NULL
1187 .  first_local - the global number of the first block on this processor or NULL,
1188                  all processors must request or all must pass NULL
1189 -  ksp - the array of KSP contexts
1190 
1191    Note:
1192    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1193 
1194    Currently for some matrix implementations only 1 block per processor
1195    is supported.
1196 
1197    You must call KSPSetUp() before calling PCASMGetSubKSP().
1198 
1199    Fortran note:
1200    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1201 
1202    Level: advanced
1203 
1204 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1205 
1206 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1207           PCASMCreateSubdomains2D(),
1208 @*/
1209 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1210 {
1211   PetscErrorCode ierr;
1212 
1213   PetscFunctionBegin;
1214   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1215   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1216   PetscFunctionReturn(0);
1217 }
1218 
1219 /* -------------------------------------------------------------------------------------*/
1220 /*MC
1221    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1222            its own KSP object.
1223 
1224    Options Database Keys:
1225 +  -pc_asm_blocks <blks> - Sets total blocks
1226 .  -pc_asm_overlap <ovl> - Sets overlap
1227 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1228 
1229      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1230       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1231       -pc_asm_type basic to use the standard ASM.
1232 
1233    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1234      than one processor. Defaults to one block per processor.
1235 
1236      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1237         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1238 
1239      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1240          and set the options directly on the resulting KSP object (you can access its PC
1241          with KSPGetPC())
1242 
1243 
1244    Level: beginner
1245 
1246    Concepts: additive Schwarz method
1247 
1248     References:
1249     An additive variant of the Schwarz alternating method for the case of many subregions
1250     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1251 
1252     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1253     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1254 
1255 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1256            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1257            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1258 
1259 M*/
1260 
1261 #undef __FUNCT__
1262 #define __FUNCT__ "PCCreate_ASM"
1263 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1264 {
1265   PetscErrorCode ierr;
1266   PC_ASM         *osm;
1267 
1268   PetscFunctionBegin;
1269   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1270 
1271   osm->n                 = PETSC_DECIDE;
1272   osm->n_local           = 0;
1273   osm->n_local_true      = PETSC_DECIDE;
1274   osm->overlap           = 1;
1275   osm->ksp               = 0;
1276   osm->restriction       = 0;
1277   osm->localization      = 0;
1278   osm->prolongation      = 0;
1279   osm->x                 = 0;
1280   osm->y                 = 0;
1281   osm->y_local           = 0;
1282   osm->is                = 0;
1283   osm->is_local          = 0;
1284   osm->mat               = 0;
1285   osm->pmat              = 0;
1286   osm->type              = PC_ASM_RESTRICT;
1287   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1288   osm->same_local_solves = PETSC_TRUE;
1289   osm->sort_indices      = PETSC_TRUE;
1290   osm->dm_subdomains     = PETSC_FALSE;
1291 
1292   pc->data                 = (void*)osm;
1293   pc->ops->apply           = PCApply_ASM;
1294   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1295   pc->ops->setup           = PCSetUp_ASM;
1296   pc->ops->reset           = PCReset_ASM;
1297   pc->ops->destroy         = PCDestroy_ASM;
1298   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1299   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1300   pc->ops->view            = PCView_ASM;
1301   pc->ops->applyrichardson = 0;
1302 
1303   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1304   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1305   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1306   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1307   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1308   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1309   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 #undef __FUNCT__
1316 #define __FUNCT__ "PCASMCreateSubdomains"
1317 /*@C
1318    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1319    preconditioner for a any problem on a general grid.
1320 
1321    Collective
1322 
1323    Input Parameters:
1324 +  A - The global matrix operator
1325 -  n - the number of local blocks
1326 
1327    Output Parameters:
1328 .  outis - the array of index sets defining the subdomains
1329 
1330    Level: advanced
1331 
1332    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1333     from these if you use PCASMSetLocalSubdomains()
1334 
1335     In the Fortran version you must provide the array outis[] already allocated of length n.
1336 
1337 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1338 
1339 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1340 @*/
1341 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1342 {
1343   MatPartitioning mpart;
1344   const char      *prefix;
1345   PetscErrorCode  (*f)(Mat,Mat*);
1346   PetscMPIInt     size;
1347   PetscInt        i,j,rstart,rend,bs;
1348   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1349   Mat             Ad     = NULL, adj;
1350   IS              ispart,isnumb,*is;
1351   PetscErrorCode  ierr;
1352 
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1355   PetscValidPointer(outis,3);
1356   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1357 
1358   /* Get prefix, row distribution, and block size */
1359   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1360   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1361   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1362   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1363 
1364   /* Get diagonal block from matrix if possible */
1365   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1366   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1367   if (f) {
1368     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1369   } else if (size == 1) {
1370     Ad = A;
1371   }
1372   if (Ad) {
1373     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1374     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1375   }
1376   if (Ad && n > 1) {
1377     PetscBool match,done;
1378     /* Try to setup a good matrix partitioning if available */
1379     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1380     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1381     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1382     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1383     if (!match) {
1384       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1385     }
1386     if (!match) { /* assume a "good" partitioner is available */
1387       PetscInt       na;
1388       const PetscInt *ia,*ja;
1389       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1390       if (done) {
1391         /* Build adjacency matrix by hand. Unfortunately a call to
1392            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1393            remove the block-aij structure and we cannot expect
1394            MatPartitioning to split vertices as we need */
1395         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1396         const PetscInt *row;
1397         nnz = 0;
1398         for (i=0; i<na; i++) { /* count number of nonzeros */
1399           len = ia[i+1] - ia[i];
1400           row = ja + ia[i];
1401           for (j=0; j<len; j++) {
1402             if (row[j] == i) { /* don't count diagonal */
1403               len--; break;
1404             }
1405           }
1406           nnz += len;
1407         }
1408         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1409         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1410         nnz    = 0;
1411         iia[0] = 0;
1412         for (i=0; i<na; i++) { /* fill adjacency */
1413           cnt = 0;
1414           len = ia[i+1] - ia[i];
1415           row = ja + ia[i];
1416           for (j=0; j<len; j++) {
1417             if (row[j] != i) { /* if not diagonal */
1418               jja[nnz+cnt++] = row[j];
1419             }
1420           }
1421           nnz     += cnt;
1422           iia[i+1] = nnz;
1423         }
1424         /* Partitioning of the adjacency matrix */
1425         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1426         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1427         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1428         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1429         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1430         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1431         foundpart = PETSC_TRUE;
1432       }
1433       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1434     }
1435     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1436   }
1437 
1438   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1439   *outis = is;
1440 
1441   if (!foundpart) {
1442 
1443     /* Partitioning by contiguous chunks of rows */
1444 
1445     PetscInt mbs   = (rend-rstart)/bs;
1446     PetscInt start = rstart;
1447     for (i=0; i<n; i++) {
1448       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1449       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1450       start += count;
1451     }
1452 
1453   } else {
1454 
1455     /* Partitioning by adjacency of diagonal block  */
1456 
1457     const PetscInt *numbering;
1458     PetscInt       *count,nidx,*indices,*newidx,start=0;
1459     /* Get node count in each partition */
1460     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1461     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1462     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1463       for (i=0; i<n; i++) count[i] *= bs;
1464     }
1465     /* Build indices from node numbering */
1466     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1467     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1468     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1469     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1470     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1471     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1472     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1473       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1474       for (i=0; i<nidx; i++) {
1475         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1476       }
1477       ierr    = PetscFree(indices);CHKERRQ(ierr);
1478       nidx   *= bs;
1479       indices = newidx;
1480     }
1481     /* Shift to get global indices */
1482     for (i=0; i<nidx; i++) indices[i] += rstart;
1483 
1484     /* Build the index sets for each block */
1485     for (i=0; i<n; i++) {
1486       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1487       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1488       start += count[i];
1489     }
1490 
1491     ierr = PetscFree(count);CHKERRQ(ierr);
1492     ierr = PetscFree(indices);CHKERRQ(ierr);
1493     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1494     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1495 
1496   }
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 #undef __FUNCT__
1501 #define __FUNCT__ "PCASMDestroySubdomains"
1502 /*@C
1503    PCASMDestroySubdomains - Destroys the index sets created with
1504    PCASMCreateSubdomains(). Should be called after setting subdomains
1505    with PCASMSetLocalSubdomains().
1506 
1507    Collective
1508 
1509    Input Parameters:
1510 +  n - the number of index sets
1511 .  is - the array of index sets
1512 -  is_local - the array of local index sets, can be NULL
1513 
1514    Level: advanced
1515 
1516 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1517 
1518 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1519 @*/
1520 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1521 {
1522   PetscInt       i;
1523   PetscErrorCode ierr;
1524 
1525   PetscFunctionBegin;
1526   if (n <= 0) PetscFunctionReturn(0);
1527   if (is) {
1528     PetscValidPointer(is,2);
1529     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1530     ierr = PetscFree(is);CHKERRQ(ierr);
1531   }
1532   if (is_local) {
1533     PetscValidPointer(is_local,3);
1534     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1535     ierr = PetscFree(is_local);CHKERRQ(ierr);
1536   }
1537   PetscFunctionReturn(0);
1538 }
1539 
1540 #undef __FUNCT__
1541 #define __FUNCT__ "PCASMCreateSubdomains2D"
1542 /*@
1543    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1544    preconditioner for a two-dimensional problem on a regular grid.
1545 
1546    Not Collective
1547 
1548    Input Parameters:
1549 +  m, n - the number of mesh points in the x and y directions
1550 .  M, N - the number of subdomains in the x and y directions
1551 .  dof - degrees of freedom per node
1552 -  overlap - overlap in mesh lines
1553 
1554    Output Parameters:
1555 +  Nsub - the number of subdomains created
1556 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1557 -  is_local - array of index sets defining non-overlapping subdomains
1558 
1559    Note:
1560    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1561    preconditioners.  More general related routines are
1562    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1563 
1564    Level: advanced
1565 
1566 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1567 
1568 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1569           PCASMSetOverlap()
1570 @*/
1571 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1572 {
1573   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1574   PetscErrorCode ierr;
1575   PetscInt       nidx,*idx,loc,ii,jj,count;
1576 
1577   PetscFunctionBegin;
1578   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1579 
1580   *Nsub     = N*M;
1581   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1582   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1583   ystart    = 0;
1584   loc_outer = 0;
1585   for (i=0; i<N; i++) {
1586     height = n/N + ((n % N) > i); /* height of subdomain */
1587     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1588     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1589     yright = ystart + height + overlap; if (yright > n) yright = n;
1590     xstart = 0;
1591     for (j=0; j<M; j++) {
1592       width = m/M + ((m % M) > j); /* width of subdomain */
1593       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1594       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1595       xright = xstart + width + overlap; if (xright > m) xright = m;
1596       nidx   = (xright - xleft)*(yright - yleft);
1597       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1598       loc    = 0;
1599       for (ii=yleft; ii<yright; ii++) {
1600         count = m*ii + xleft;
1601         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1602       }
1603       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1604       if (overlap == 0) {
1605         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1606 
1607         (*is_local)[loc_outer] = (*is)[loc_outer];
1608       } else {
1609         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1610           for (jj=xstart; jj<xstart+width; jj++) {
1611             idx[loc++] = m*ii + jj;
1612           }
1613         }
1614         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1615       }
1616       ierr    = PetscFree(idx);CHKERRQ(ierr);
1617       xstart += width;
1618       loc_outer++;
1619     }
1620     ystart += height;
1621   }
1622   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1623   PetscFunctionReturn(0);
1624 }
1625 
1626 #undef __FUNCT__
1627 #define __FUNCT__ "PCASMGetLocalSubdomains"
1628 /*@C
1629     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1630     only) for the additive Schwarz preconditioner.
1631 
1632     Not Collective
1633 
1634     Input Parameter:
1635 .   pc - the preconditioner context
1636 
1637     Output Parameters:
1638 +   n - the number of subdomains for this processor (default value = 1)
1639 .   is - the index sets that define the subdomains for this processor
1640 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1641 
1642 
1643     Notes:
1644     The IS numbering is in the parallel, global numbering of the vector.
1645 
1646     Level: advanced
1647 
1648 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1649 
1650 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1651           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1652 @*/
1653 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1654 {
1655   PC_ASM         *osm;
1656   PetscErrorCode ierr;
1657   PetscBool      match;
1658 
1659   PetscFunctionBegin;
1660   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1661   PetscValidIntPointer(n,2);
1662   if (is) PetscValidPointer(is,3);
1663   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1664   if (!match) {
1665     if (n) *n = 0;
1666     if (is) *is = NULL;
1667   } else {
1668     osm = (PC_ASM*)pc->data;
1669     if (n) *n = osm->n_local_true;
1670     if (is) *is = osm->is;
1671     if (is_local) *is_local = osm->is_local;
1672   }
1673   PetscFunctionReturn(0);
1674 }
1675 
1676 #undef __FUNCT__
1677 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1678 /*@C
1679     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1680     only) for the additive Schwarz preconditioner.
1681 
1682     Not Collective
1683 
1684     Input Parameter:
1685 .   pc - the preconditioner context
1686 
1687     Output Parameters:
1688 +   n - the number of matrices for this processor (default value = 1)
1689 -   mat - the matrices
1690 
1691 
1692     Level: advanced
1693 
1694     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1695 
1696            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1697 
1698 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1699 
1700 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1701           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1702 @*/
1703 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1704 {
1705   PC_ASM         *osm;
1706   PetscErrorCode ierr;
1707   PetscBool      match;
1708 
1709   PetscFunctionBegin;
1710   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1711   PetscValidIntPointer(n,2);
1712   if (mat) PetscValidPointer(mat,3);
1713   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1714   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1715   if (!match) {
1716     if (n) *n = 0;
1717     if (mat) *mat = NULL;
1718   } else {
1719     osm = (PC_ASM*)pc->data;
1720     if (n) *n = osm->n_local_true;
1721     if (mat) *mat = osm->pmat;
1722   }
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 #undef __FUNCT__
1727 #define __FUNCT__ "PCASMSetDMSubdomains"
1728 /*@
1729     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1730     Logically Collective
1731 
1732     Input Parameter:
1733 +   pc  - the preconditioner
1734 -   flg - boolean indicating whether to use subdomains defined by the DM
1735 
1736     Options Database Key:
1737 .   -pc_asm_dm_subdomains
1738 
1739     Level: intermediate
1740 
1741     Notes:
1742     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1743     so setting either of the first two effectively turns the latter off.
1744 
1745 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1746 
1747 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1748           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1749 @*/
1750 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1751 {
1752   PC_ASM         *osm = (PC_ASM*)pc->data;
1753   PetscErrorCode ierr;
1754   PetscBool      match;
1755 
1756   PetscFunctionBegin;
1757   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1758   PetscValidLogicalCollectiveBool(pc,flg,2);
1759   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1760   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1761   if (match) {
1762     osm->dm_subdomains = flg;
1763   }
1764   PetscFunctionReturn(0);
1765 }
1766 
1767 #undef __FUNCT__
1768 #define __FUNCT__ "PCASMGetDMSubdomains"
1769 /*@
1770     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1771     Not Collective
1772 
1773     Input Parameter:
1774 .   pc  - the preconditioner
1775 
1776     Output Parameter:
1777 .   flg - boolean indicating whether to use subdomains defined by the DM
1778 
1779     Level: intermediate
1780 
1781 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1782 
1783 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1784           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1785 @*/
1786 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1787 {
1788   PC_ASM         *osm = (PC_ASM*)pc->data;
1789   PetscErrorCode ierr;
1790   PetscBool      match;
1791 
1792   PetscFunctionBegin;
1793   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1794   PetscValidPointer(flg,2);
1795   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1796   if (match) {
1797     if (flg) *flg = osm->dm_subdomains;
1798   }
1799   PetscFunctionReturn(0);
1800 }
1801