xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 967582eba84cc53dc1fbf6b54d6aa49b7d83bae6)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <../src/ksp/pc/impls/asm/asm.h> /*I "petscpc.h" I*/
14 
15 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
16 {
17   PC_ASM         *osm = (PC_ASM*)pc->data;
18   PetscErrorCode ierr;
19   PetscMPIInt    rank;
20   PetscInt       i,bsz;
21   PetscBool      iascii,isstring;
22   PetscViewer    sviewer;
23 
24   PetscFunctionBegin;
25   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
27   if (iascii) {
28     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
29     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
30     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
31     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
32     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
33     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
34     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
35     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
36     if (osm->same_local_solves) {
37       if (osm->ksp) {
38         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
39         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
40         if (!rank) {
41           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
42           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
43           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
44         }
45         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
46       }
47     } else {
48       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
49       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
50       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
51       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
52       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
53       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
54       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
55       for (i=0; i<osm->n_local_true; i++) {
56         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
57         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
58         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
59         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
60       }
61       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
62       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
64       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
65     }
66   } else if (isstring) {
67     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
68     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
69     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
70     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71   }
72   PetscFunctionReturn(0);
73 }
74 
75 static PetscErrorCode PCASMPrintSubdomains(PC pc)
76 {
77   PC_ASM         *osm = (PC_ASM*)pc->data;
78   const char     *prefix;
79   char           fname[PETSC_MAX_PATH_LEN+1];
80   PetscViewer    viewer, sviewer;
81   char           *s;
82   PetscInt       i,j,nidx;
83   const PetscInt *idx;
84   PetscMPIInt    rank, size;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
89   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
90   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
92   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
93   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
94   for (i=0; i<osm->n_local; i++) {
95     if (i < osm->n_local_true) {
96       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
97       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
98       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
99 #define len  16*(nidx+1)+512
100       ierr = PetscMalloc1(len,&s);CHKERRQ(ierr);
101       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
102 #undef len
103       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
104       for (j=0; j<nidx; j++) {
105         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
106       }
107       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
108       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
109       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
110       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
111       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
112       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
113       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
114       ierr = PetscFree(s);CHKERRQ(ierr);
115       if (osm->is_local) {
116         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
117 #define len  16*(nidx+1)+512
118         ierr = PetscMalloc1(len, &s);CHKERRQ(ierr);
119         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
120 #undef len
121         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
122         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
123         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
124         for (j=0; j<nidx; j++) {
125           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
126         }
127         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
128         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
129         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
130         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
131         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
132         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
133         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
134         ierr = PetscFree(s);CHKERRQ(ierr);
135       }
136     } else {
137       /* Participate in collective viewer calls. */
138       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
139       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
140       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
141       /* Assume either all ranks have is_local or none do. */
142       if (osm->is_local) {
143         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
144         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
145         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
146       }
147     }
148   }
149   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
150   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode PCSetUp_ASM(PC pc)
155 {
156   PC_ASM         *osm = (PC_ASM*)pc->data;
157   PetscErrorCode ierr;
158   PetscBool      flg;
159   PetscInt       i,m,m_local;
160   MatReuse       scall = MAT_REUSE_MATRIX;
161   IS             isl;
162   KSP            ksp;
163   PC             subpc;
164   const char     *prefix,*pprefix;
165   Vec            vec;
166   DM             *domain_dm = NULL;
167 
168   PetscFunctionBegin;
169   if (!pc->setupcalled) {
170     PetscInt m;
171 
172     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
173     if (osm->n_local_true == PETSC_DECIDE) {
174       /* no subdomains given */
175       /* try pc->dm first, if allowed */
176       if (osm->dm_subdomains && pc->dm) {
177         PetscInt  num_domains, d;
178         char      **domain_names;
179         IS        *inner_domain_is, *outer_domain_is;
180         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
181         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
182                               A future improvement of this code might allow one to use
183                               DM-defined subdomains and also increase the overlap,
184                               but that is not currently supported */
185         if (num_domains) {
186           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
187         }
188         for (d = 0; d < num_domains; ++d) {
189           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
190           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
191           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
192         }
193         ierr = PetscFree(domain_names);CHKERRQ(ierr);
194         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
195         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
196       }
197       if (osm->n_local_true == PETSC_DECIDE) {
198         /* still no subdomains; use one subdomain per processor */
199         osm->n_local_true = 1;
200       }
201     }
202     { /* determine the global and max number of subdomains */
203       struct {PetscInt max,sum;} inwork,outwork;
204       PetscMPIInt size;
205 
206       inwork.max   = osm->n_local_true;
207       inwork.sum   = osm->n_local_true;
208       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
209       osm->n_local = outwork.max;
210       osm->n       = outwork.sum;
211 
212       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
213       if (outwork.max == 1 && outwork.sum == size) {
214         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
215         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
216       }
217     }
218     if (!osm->is) { /* create the index sets */
219       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
220     }
221     if (osm->n_local_true > 1 && !osm->is_local) {
222       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
223       for (i=0; i<osm->n_local_true; i++) {
224         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
225           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
226           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
227         } else {
228           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
229           osm->is_local[i] = osm->is[i];
230         }
231       }
232     }
233     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
234     flg  = PETSC_FALSE;
235     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
236     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
237 
238     if (osm->overlap > 0) {
239       /* Extend the "overlapping" regions by a number of steps */
240       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
241     }
242     if (osm->sort_indices) {
243       for (i=0; i<osm->n_local_true; i++) {
244         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
245         if (osm->is_local) {
246           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
247         }
248       }
249     }
250 
251     if (!osm->ksp) {
252       /* Create the local solvers */
253       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
254       if (domain_dm) {
255         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
256       }
257       for (i=0; i<osm->n_local_true; i++) {
258         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
259         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
260         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
261         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
262         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
263         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
264         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
265         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
266         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
267         if (domain_dm) {
268           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
269           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
270           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
271         }
272         osm->ksp[i] = ksp;
273       }
274       if (domain_dm) {
275         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
276       }
277     }
278 
279     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
280     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
281     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
282     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
283     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
284 
285     scall = MAT_INITIAL_MATRIX;
286   } else {
287     /*
288        Destroy the blocks from the previous iteration
289     */
290     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
291       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
292       scall = MAT_INITIAL_MATRIX;
293     }
294   }
295 
296   /*
297      Extract out the submatrices
298   */
299   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
300   if (scall == MAT_INITIAL_MATRIX) {
301     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
302     for (i=0; i<osm->n_local_true; i++) {
303       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
304       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
305     }
306   }
307 
308   /* Convert the types of the submatrices (if needbe) */
309   if (osm->sub_mat_type) {
310     for (i=0; i<osm->n_local_true; i++) {
311       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
312     }
313   }
314 
315   if (!pc->setupcalled) {
316     /* Create the local work vectors (from the local matrices) and scatter contexts */
317     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
318 
319     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
320     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
321       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
322     }
323     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
324     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
325     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
326 
327     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
328     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
329     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
330     ierr = ISDestroy(&isl);CHKERRQ(ierr);
331 
332 
333     for (i=0; i<osm->n_local_true; ++i) {
334       ISLocalToGlobalMapping ltog;
335       IS                     isll;
336       const PetscInt         *idx_is;
337       PetscInt               *idx_lis,nout;
338 
339       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
340       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
341       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
342 
343       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
344       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
345       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
346       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
347       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
348       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
349       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
350       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
351       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
352       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
353       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
354       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
355       ierr = ISDestroy(&isll);CHKERRQ(ierr);
356       ierr = ISDestroy(&isl);CHKERRQ(ierr);
357       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
358         ISLocalToGlobalMapping ltog;
359         IS                     isll,isll_local;
360         const PetscInt         *idx_local;
361         PetscInt               *idx1, *idx2, nout;
362 
363         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
364         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
365 
366         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
367         ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
368         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
369         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
370         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
371         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
372 
373         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
374         ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
375         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
376         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
377         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
378         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
379 
380         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
381         ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
382 
383         ierr = ISDestroy(&isll);CHKERRQ(ierr);
384         ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
385       }
386     }
387     ierr = VecDestroy(&vec);CHKERRQ(ierr);
388   }
389 
390   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
391     IS      *cis;
392     PetscInt c;
393 
394     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
395     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
396     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
397     ierr = PetscFree(cis);CHKERRQ(ierr);
398   }
399 
400   /* Return control to the user so that the submatrices can be modified (e.g., to apply
401      different boundary conditions for the submatrices than for the global problem) */
402   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
403 
404   /*
405      Loop over subdomains putting them into local ksp
406   */
407   for (i=0; i<osm->n_local_true; i++) {
408     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
409     if (!pc->setupcalled) {
410       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
411     }
412   }
413   PetscFunctionReturn(0);
414 }
415 
416 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
417 {
418   PC_ASM             *osm = (PC_ASM*)pc->data;
419   PetscErrorCode     ierr;
420   PetscInt           i;
421   KSPConvergedReason reason;
422 
423   PetscFunctionBegin;
424   for (i=0; i<osm->n_local_true; i++) {
425     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
426     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
427     if (reason == KSP_DIVERGED_PC_FAILED) {
428       pc->failedreason = PC_SUBPC_ERROR;
429     }
430   }
431   PetscFunctionReturn(0);
432 }
433 
434 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
435 {
436   PC_ASM         *osm = (PC_ASM*)pc->data;
437   PetscErrorCode ierr;
438   PetscInt       i,n_local_true = osm->n_local_true;
439   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
440 
441   PetscFunctionBegin;
442   /*
443      Support for limiting the restriction or interpolation to only local
444      subdomain values (leaving the other values 0).
445   */
446   if (!(osm->type & PC_ASM_RESTRICT)) {
447     forward = SCATTER_FORWARD_LOCAL;
448     /* have to zero the work RHS since scatter may leave some slots empty */
449     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
450   }
451   if (!(osm->type & PC_ASM_INTERPOLATE)) {
452     reverse = SCATTER_REVERSE_LOCAL;
453   }
454 
455   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
456     /* zero the global and the local solutions */
457     ierr = VecSet(y, 0.0);CHKERRQ(ierr);
458     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
459 
460     /* Copy the global RHS to local RHS including the ghost nodes */
461     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
462     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
463 
464     /* Restrict local RHS to the overlapping 0-block RHS */
465     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
466     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
467 
468     /* do the local solves */
469     for (i = 0; i < n_local_true; ++i) {
470 
471       /* solve the overlapping i-block */
472       ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
473       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
474       ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
475       ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
476 
477       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
478         ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
479         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
480       }
481       else { /* interpolate the overlapping i-block solution to the local solution */
482         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
483         ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
484       }
485 
486       if (i < n_local_true-1) {
487         /* Restrict local RHS to the overlapping (i+1)-block RHS */
488         ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
489         ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
490 
491         if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
492           /* update the overlapping (i+1)-block RHS using the current local solution */
493           ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
494           ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
495         }
496       }
497     }
498     /* Add the local solution to the global solution including the ghost nodes */
499     ierr = VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
500     ierr = VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
501   } else {
502     SETERRQ1(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
503   }
504   PetscFunctionReturn(0);
505 }
506 
507 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
508 {
509   PC_ASM         *osm = (PC_ASM*)pc->data;
510   PetscErrorCode ierr;
511   PetscInt       i,n_local_true = osm->n_local_true;
512   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
513 
514   PetscFunctionBegin;
515   /*
516      Support for limiting the restriction or interpolation to only local
517      subdomain values (leaving the other values 0).
518 
519      Note: these are reversed from the PCApply_ASM() because we are applying the
520      transpose of the three terms
521   */
522 
523   if (!(osm->type & PC_ASM_INTERPOLATE)) {
524     forward = SCATTER_FORWARD_LOCAL;
525     /* have to zero the work RHS since scatter may leave some slots empty */
526     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
527   }
528   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
529 
530   /* zero the global and the local solutions */
531   ierr = VecSet(y, 0.0);CHKERRQ(ierr);
532   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
533 
534   /* Copy the global RHS to local RHS including the ghost nodes */
535   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
536   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
537 
538   /* Restrict local RHS to the overlapping 0-block RHS */
539   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
540   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
541 
542   /* do the local solves */
543   for (i = 0; i < n_local_true; ++i) {
544 
545     /* solve the overlapping i-block */
546     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
547     ierr = KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
548     ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
549     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
550 
551     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
552       ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
553       ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
554     } else { /* interpolate the overlapping i-block solution to the local solution */
555       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
556       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
557     }
558 
559     if (i < n_local_true-1) {
560       /* Restrict local RHS to the overlapping (i+1)-block RHS */
561       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
562       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
563     }
564   }
565   /* Add the local solution to the global solution including the ghost nodes */
566   ierr = VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
567   ierr = VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
568 
569   PetscFunctionReturn(0);
570 
571 }
572 
573 static PetscErrorCode PCReset_ASM(PC pc)
574 {
575   PC_ASM         *osm = (PC_ASM*)pc->data;
576   PetscErrorCode ierr;
577   PetscInt       i;
578 
579   PetscFunctionBegin;
580   if (osm->ksp) {
581     for (i=0; i<osm->n_local_true; i++) {
582       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
583     }
584   }
585   if (osm->pmat) {
586     if (osm->n_local_true > 0) {
587       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
588     }
589   }
590   if (osm->lrestriction) {
591     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
592     for (i=0; i<osm->n_local_true; i++) {
593       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
594       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
595       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
596       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
597     }
598     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
599     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
600     ierr = PetscFree(osm->x);CHKERRQ(ierr);
601     ierr = PetscFree(osm->y);CHKERRQ(ierr);
602 
603   }
604   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
605   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
606   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
607   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
608   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
609     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
610   }
611 
612   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
613 
614   osm->is       = 0;
615   osm->is_local = 0;
616   PetscFunctionReturn(0);
617 }
618 
619 static PetscErrorCode PCDestroy_ASM(PC pc)
620 {
621   PC_ASM         *osm = (PC_ASM*)pc->data;
622   PetscErrorCode ierr;
623   PetscInt       i;
624 
625   PetscFunctionBegin;
626   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
627   if (osm->ksp) {
628     for (i=0; i<osm->n_local_true; i++) {
629       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
630     }
631     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
632   }
633   ierr = PetscFree(pc->data);CHKERRQ(ierr);
634   PetscFunctionReturn(0);
635 }
636 
637 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
638 {
639   PC_ASM         *osm = (PC_ASM*)pc->data;
640   PetscErrorCode ierr;
641   PetscInt       blocks,ovl;
642   PetscBool      flg;
643   PCASMType      asmtype;
644   PCCompositeType loctype;
645   char           sub_mat_type[256];
646 
647   PetscFunctionBegin;
648   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
649   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
650   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
651   if (flg) {
652     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
653     osm->dm_subdomains = PETSC_FALSE;
654   }
655   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
656   if (flg) {
657     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
658     osm->dm_subdomains = PETSC_FALSE;
659   }
660   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
661   if (flg) {
662     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
663     osm->dm_subdomains = PETSC_FALSE;
664   }
665   flg  = PETSC_FALSE;
666   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
667   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
668   flg  = PETSC_FALSE;
669   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
670   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
671   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
672   if (flg) {
673     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
674   }
675   ierr = PetscOptionsTail();CHKERRQ(ierr);
676   PetscFunctionReturn(0);
677 }
678 
679 /*------------------------------------------------------------------------------------*/
680 
681 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
682 {
683   PC_ASM         *osm = (PC_ASM*)pc->data;
684   PetscErrorCode ierr;
685   PetscInt       i;
686 
687   PetscFunctionBegin;
688   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
689   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
690 
691   if (!pc->setupcalled) {
692     if (is) {
693       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
694     }
695     if (is_local) {
696       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
697     }
698     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
699 
700     osm->n_local_true = n;
701     osm->is           = 0;
702     osm->is_local     = 0;
703     if (is) {
704       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
705       for (i=0; i<n; i++) osm->is[i] = is[i];
706       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
707       osm->overlap = -1;
708     }
709     if (is_local) {
710       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
711       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
712       if (!is) {
713         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
714         for (i=0; i<osm->n_local_true; i++) {
715           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
716             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
717             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
718           } else {
719             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
720             osm->is[i] = osm->is_local[i];
721           }
722         }
723       }
724     }
725   }
726   PetscFunctionReturn(0);
727 }
728 
729 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
730 {
731   PC_ASM         *osm = (PC_ASM*)pc->data;
732   PetscErrorCode ierr;
733   PetscMPIInt    rank,size;
734   PetscInt       n;
735 
736   PetscFunctionBegin;
737   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
738   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
739 
740   /*
741      Split the subdomains equally among all processors
742   */
743   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
744   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
745   n    = N/size + ((N % size) > rank);
746   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
747   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
748   if (!pc->setupcalled) {
749     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
750 
751     osm->n_local_true = n;
752     osm->is           = 0;
753     osm->is_local     = 0;
754   }
755   PetscFunctionReturn(0);
756 }
757 
758 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
759 {
760   PC_ASM *osm = (PC_ASM*)pc->data;
761 
762   PetscFunctionBegin;
763   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
764   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
765   if (!pc->setupcalled) osm->overlap = ovl;
766   PetscFunctionReturn(0);
767 }
768 
769 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
770 {
771   PC_ASM *osm = (PC_ASM*)pc->data;
772 
773   PetscFunctionBegin;
774   osm->type     = type;
775   osm->type_set = PETSC_TRUE;
776   PetscFunctionReturn(0);
777 }
778 
779 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
780 {
781   PC_ASM *osm = (PC_ASM*)pc->data;
782 
783   PetscFunctionBegin;
784   *type = osm->type;
785   PetscFunctionReturn(0);
786 }
787 
788 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
789 {
790   PC_ASM *osm = (PC_ASM *) pc->data;
791 
792   PetscFunctionBegin;
793   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
794   osm->loctype = type;
795   PetscFunctionReturn(0);
796 }
797 
798 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
799 {
800   PC_ASM *osm = (PC_ASM *) pc->data;
801 
802   PetscFunctionBegin;
803   *type = osm->loctype;
804   PetscFunctionReturn(0);
805 }
806 
807 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
808 {
809   PC_ASM *osm = (PC_ASM*)pc->data;
810 
811   PetscFunctionBegin;
812   osm->sort_indices = doSort;
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
817 {
818   PC_ASM         *osm = (PC_ASM*)pc->data;
819   PetscErrorCode ierr;
820 
821   PetscFunctionBegin;
822   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
823 
824   if (n_local) *n_local = osm->n_local_true;
825   if (first_local) {
826     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
827     *first_local -= osm->n_local_true;
828   }
829   if (ksp) {
830     /* Assume that local solves are now different; not necessarily
831        true though!  This flag is used only for PCView_ASM() */
832     *ksp                   = osm->ksp;
833     osm->same_local_solves = PETSC_FALSE;
834   }
835   PetscFunctionReturn(0);
836 }
837 
838 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
839 {
840   PC_ASM         *osm = (PC_ASM*)pc->data;
841 
842   PetscFunctionBegin;
843   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
844   PetscValidPointer(sub_mat_type,2);
845   *sub_mat_type = osm->sub_mat_type;
846   PetscFunctionReturn(0);
847 }
848 
849 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
850 {
851   PetscErrorCode    ierr;
852   PC_ASM            *osm = (PC_ASM*)pc->data;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
856   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
857   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 /*@C
862     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
863 
864     Collective on pc
865 
866     Input Parameters:
867 +   pc - the preconditioner context
868 .   n - the number of subdomains for this processor (default value = 1)
869 .   is - the index set that defines the subdomains for this processor
870          (or NULL for PETSc to determine subdomains)
871 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
872          (or NULL to not provide these)
873 
874     Options Database Key:
875     To set the total number of subdomain blocks rather than specify the
876     index sets, use the option
877 .    -pc_asm_local_blocks <blks> - Sets local blocks
878 
879     Notes:
880     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
881 
882     By default the ASM preconditioner uses 1 block per processor.
883 
884     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
885 
886     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
887     back to form the global solution (this is the standard restricted additive Schwarz method)
888 
889     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
890     no code to handle that case.
891 
892     Level: advanced
893 
894 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
895           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
896 @*/
897 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
898 {
899   PetscErrorCode ierr;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
903   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
904   PetscFunctionReturn(0);
905 }
906 
907 /*@C
908     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
909     additive Schwarz preconditioner.  Either all or no processors in the
910     PC communicator must call this routine, with the same index sets.
911 
912     Collective on pc
913 
914     Input Parameters:
915 +   pc - the preconditioner context
916 .   N  - the number of subdomains for all processors
917 .   is - the index sets that define the subdomains for all processors
918          (or NULL to ask PETSc to determine the subdomains)
919 -   is_local - the index sets that define the local part of the subdomains for this processor
920          (or NULL to not provide this information)
921 
922     Options Database Key:
923     To set the total number of subdomain blocks rather than specify the
924     index sets, use the option
925 .    -pc_asm_blocks <blks> - Sets total blocks
926 
927     Notes:
928     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
929 
930     By default the ASM preconditioner uses 1 block per processor.
931 
932     These index sets cannot be destroyed until after completion of the
933     linear solves for which the ASM preconditioner is being used.
934 
935     Use PCASMSetLocalSubdomains() to set local subdomains.
936 
937     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
938 
939     Level: advanced
940 
941 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
942           PCASMCreateSubdomains2D()
943 @*/
944 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
945 {
946   PetscErrorCode ierr;
947 
948   PetscFunctionBegin;
949   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
950   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 /*@
955     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
956     additive Schwarz preconditioner.  Either all or no processors in the
957     PC communicator must call this routine.
958 
959     Logically Collective on pc
960 
961     Input Parameters:
962 +   pc  - the preconditioner context
963 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
964 
965     Options Database Key:
966 .   -pc_asm_overlap <ovl> - Sets overlap
967 
968     Notes:
969     By default the ASM preconditioner uses 1 block per processor.  To use
970     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
971     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
972 
973     The overlap defaults to 1, so if one desires that no additional
974     overlap be computed beyond what may have been set with a call to
975     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
976     must be set to be 0.  In particular, if one does not explicitly set
977     the subdomains an application code, then all overlap would be computed
978     internally by PETSc, and using an overlap of 0 would result in an ASM
979     variant that is equivalent to the block Jacobi preconditioner.
980 
981     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
982     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
983 
984     Note that one can define initial index sets with any overlap via
985     PCASMSetLocalSubdomains(); the routine
986     PCASMSetOverlap() merely allows PETSc to extend that overlap further
987     if desired.
988 
989     Level: intermediate
990 
991 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
992           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
993 @*/
994 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
995 {
996   PetscErrorCode ierr;
997 
998   PetscFunctionBegin;
999   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1000   PetscValidLogicalCollectiveInt(pc,ovl,2);
1001   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 /*@
1006     PCASMSetType - Sets the type of restriction and interpolation used
1007     for local problems in the additive Schwarz method.
1008 
1009     Logically Collective on pc
1010 
1011     Input Parameters:
1012 +   pc  - the preconditioner context
1013 -   type - variant of ASM, one of
1014 .vb
1015       PC_ASM_BASIC       - full interpolation and restriction
1016       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1017       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1018       PC_ASM_NONE        - local processor restriction and interpolation
1019 .ve
1020 
1021     Options Database Key:
1022 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1023 
1024     Notes:
1025     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1026     to limit the local processor interpolation
1027 
1028     Level: intermediate
1029 
1030 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1031           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1032 @*/
1033 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1034 {
1035   PetscErrorCode ierr;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1039   PetscValidLogicalCollectiveEnum(pc,type,2);
1040   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 /*@
1045     PCASMGetType - Gets the type of restriction and interpolation used
1046     for local problems in the additive Schwarz method.
1047 
1048     Logically Collective on pc
1049 
1050     Input Parameter:
1051 .   pc  - the preconditioner context
1052 
1053     Output Parameter:
1054 .   type - variant of ASM, one of
1055 
1056 .vb
1057       PC_ASM_BASIC       - full interpolation and restriction
1058       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1059       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1060       PC_ASM_NONE        - local processor restriction and interpolation
1061 .ve
1062 
1063     Options Database Key:
1064 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1065 
1066     Level: intermediate
1067 
1068 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1069           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1070 @*/
1071 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1072 {
1073   PetscErrorCode ierr;
1074 
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1077   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1078   PetscFunctionReturn(0);
1079 }
1080 
1081 /*@
1082   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1083 
1084   Logically Collective on pc
1085 
1086   Input Parameters:
1087 + pc  - the preconditioner context
1088 - type - type of composition, one of
1089 .vb
1090   PC_COMPOSITE_ADDITIVE       - local additive combination
1091   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1092 .ve
1093 
1094   Options Database Key:
1095 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1096 
1097   Level: intermediate
1098 
1099 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1100 @*/
1101 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1102 {
1103   PetscErrorCode ierr;
1104 
1105   PetscFunctionBegin;
1106   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1107   PetscValidLogicalCollectiveEnum(pc, type, 2);
1108   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 /*@
1113   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1114 
1115   Logically Collective on pc
1116 
1117   Input Parameter:
1118 . pc  - the preconditioner context
1119 
1120   Output Parameter:
1121 . type - type of composition, one of
1122 .vb
1123   PC_COMPOSITE_ADDITIVE       - local additive combination
1124   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1125 .ve
1126 
1127   Options Database Key:
1128 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1129 
1130   Level: intermediate
1131 
1132 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1133 @*/
1134 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1135 {
1136   PetscErrorCode ierr;
1137 
1138   PetscFunctionBegin;
1139   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1140   PetscValidPointer(type, 2);
1141   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 /*@
1146     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1147 
1148     Logically Collective on pc
1149 
1150     Input Parameters:
1151 +   pc  - the preconditioner context
1152 -   doSort - sort the subdomain indices
1153 
1154     Level: intermediate
1155 
1156 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1157           PCASMCreateSubdomains2D()
1158 @*/
1159 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1160 {
1161   PetscErrorCode ierr;
1162 
1163   PetscFunctionBegin;
1164   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1165   PetscValidLogicalCollectiveBool(pc,doSort,2);
1166   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 /*@C
1171    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1172    this processor.
1173 
1174    Collective on pc iff first_local is requested
1175 
1176    Input Parameter:
1177 .  pc - the preconditioner context
1178 
1179    Output Parameters:
1180 +  n_local - the number of blocks on this processor or NULL
1181 .  first_local - the global number of the first block on this processor or NULL,
1182                  all processors must request or all must pass NULL
1183 -  ksp - the array of KSP contexts
1184 
1185    Note:
1186    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1187 
1188    You must call KSPSetUp() before calling PCASMGetSubKSP().
1189 
1190    Fortran note:
1191    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1192 
1193    Level: advanced
1194 
1195 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1196           PCASMCreateSubdomains2D(),
1197 @*/
1198 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1199 {
1200   PetscErrorCode ierr;
1201 
1202   PetscFunctionBegin;
1203   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1204   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1205   PetscFunctionReturn(0);
1206 }
1207 
1208 /* -------------------------------------------------------------------------------------*/
1209 /*MC
1210    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1211            its own KSP object.
1212 
1213    Options Database Keys:
1214 +  -pc_asm_blocks <blks> - Sets total blocks
1215 .  -pc_asm_overlap <ovl> - Sets overlap
1216 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1217 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1218 
1219      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1220       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1221       -pc_asm_type basic to use the standard ASM.
1222 
1223    Notes:
1224     Each processor can have one or more blocks, but a block cannot be shared by more
1225      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1226 
1227      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1228         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1229 
1230      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1231          and set the options directly on the resulting KSP object (you can access its PC
1232          with KSPGetPC())
1233 
1234    Level: beginner
1235 
1236     References:
1237 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1238      Courant Institute, New York University Technical report
1239 -   2. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1240     Cambridge University Press.
1241 
1242 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1243            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1244            PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1245 
1246 M*/
1247 
1248 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1249 {
1250   PetscErrorCode ierr;
1251   PC_ASM         *osm;
1252 
1253   PetscFunctionBegin;
1254   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1255 
1256   osm->n                 = PETSC_DECIDE;
1257   osm->n_local           = 0;
1258   osm->n_local_true      = PETSC_DECIDE;
1259   osm->overlap           = 1;
1260   osm->ksp               = 0;
1261   osm->restriction       = 0;
1262   osm->lprolongation     = 0;
1263   osm->lrestriction      = 0;
1264   osm->x                 = 0;
1265   osm->y                 = 0;
1266   osm->is                = 0;
1267   osm->is_local          = 0;
1268   osm->mat               = 0;
1269   osm->pmat              = 0;
1270   osm->type              = PC_ASM_RESTRICT;
1271   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1272   osm->same_local_solves = PETSC_TRUE;
1273   osm->sort_indices      = PETSC_TRUE;
1274   osm->dm_subdomains     = PETSC_FALSE;
1275   osm->sub_mat_type      = NULL;
1276 
1277   pc->data                 = (void*)osm;
1278   pc->ops->apply           = PCApply_ASM;
1279   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1280   pc->ops->setup           = PCSetUp_ASM;
1281   pc->ops->reset           = PCReset_ASM;
1282   pc->ops->destroy         = PCDestroy_ASM;
1283   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1284   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1285   pc->ops->view            = PCView_ASM;
1286   pc->ops->applyrichardson = 0;
1287 
1288   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1289   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1290   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1291   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1292   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1293   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1294   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1295   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1296   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1297   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1298   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1299   PetscFunctionReturn(0);
1300 }
1301 
1302 /*@C
1303    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1304    preconditioner for a any problem on a general grid.
1305 
1306    Collective
1307 
1308    Input Parameters:
1309 +  A - The global matrix operator
1310 -  n - the number of local blocks
1311 
1312    Output Parameters:
1313 .  outis - the array of index sets defining the subdomains
1314 
1315    Level: advanced
1316 
1317    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1318     from these if you use PCASMSetLocalSubdomains()
1319 
1320     In the Fortran version you must provide the array outis[] already allocated of length n.
1321 
1322 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1323 @*/
1324 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1325 {
1326   MatPartitioning mpart;
1327   const char      *prefix;
1328   PetscInt        i,j,rstart,rend,bs;
1329   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1330   Mat             Ad     = NULL, adj;
1331   IS              ispart,isnumb,*is;
1332   PetscErrorCode  ierr;
1333 
1334   PetscFunctionBegin;
1335   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1336   PetscValidPointer(outis,3);
1337   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1338 
1339   /* Get prefix, row distribution, and block size */
1340   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1341   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1342   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1343   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1344 
1345   /* Get diagonal block from matrix if possible */
1346   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1347   if (hasop) {
1348     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1349   }
1350   if (Ad) {
1351     ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1352     if (!isbaij) {ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1353   }
1354   if (Ad && n > 1) {
1355     PetscBool match,done;
1356     /* Try to setup a good matrix partitioning if available */
1357     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1358     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1359     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1360     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1361     if (!match) {
1362       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1363     }
1364     if (!match) { /* assume a "good" partitioner is available */
1365       PetscInt       na;
1366       const PetscInt *ia,*ja;
1367       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1368       if (done) {
1369         /* Build adjacency matrix by hand. Unfortunately a call to
1370            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1371            remove the block-aij structure and we cannot expect
1372            MatPartitioning to split vertices as we need */
1373         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1374         const PetscInt *row;
1375         nnz = 0;
1376         for (i=0; i<na; i++) { /* count number of nonzeros */
1377           len = ia[i+1] - ia[i];
1378           row = ja + ia[i];
1379           for (j=0; j<len; j++) {
1380             if (row[j] == i) { /* don't count diagonal */
1381               len--; break;
1382             }
1383           }
1384           nnz += len;
1385         }
1386         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1387         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1388         nnz    = 0;
1389         iia[0] = 0;
1390         for (i=0; i<na; i++) { /* fill adjacency */
1391           cnt = 0;
1392           len = ia[i+1] - ia[i];
1393           row = ja + ia[i];
1394           for (j=0; j<len; j++) {
1395             if (row[j] != i) { /* if not diagonal */
1396               jja[nnz+cnt++] = row[j];
1397             }
1398           }
1399           nnz     += cnt;
1400           iia[i+1] = nnz;
1401         }
1402         /* Partitioning of the adjacency matrix */
1403         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1404         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1405         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1406         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1407         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1408         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1409         foundpart = PETSC_TRUE;
1410       }
1411       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1412     }
1413     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1414   }
1415 
1416   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1417   *outis = is;
1418 
1419   if (!foundpart) {
1420 
1421     /* Partitioning by contiguous chunks of rows */
1422 
1423     PetscInt mbs   = (rend-rstart)/bs;
1424     PetscInt start = rstart;
1425     for (i=0; i<n; i++) {
1426       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1427       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1428       start += count;
1429     }
1430 
1431   } else {
1432 
1433     /* Partitioning by adjacency of diagonal block  */
1434 
1435     const PetscInt *numbering;
1436     PetscInt       *count,nidx,*indices,*newidx,start=0;
1437     /* Get node count in each partition */
1438     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1439     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1440     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1441       for (i=0; i<n; i++) count[i] *= bs;
1442     }
1443     /* Build indices from node numbering */
1444     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1445     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1446     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1447     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1448     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1449     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1450     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1451       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1452       for (i=0; i<nidx; i++) {
1453         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1454       }
1455       ierr    = PetscFree(indices);CHKERRQ(ierr);
1456       nidx   *= bs;
1457       indices = newidx;
1458     }
1459     /* Shift to get global indices */
1460     for (i=0; i<nidx; i++) indices[i] += rstart;
1461 
1462     /* Build the index sets for each block */
1463     for (i=0; i<n; i++) {
1464       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1465       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1466       start += count[i];
1467     }
1468 
1469     ierr = PetscFree(count);CHKERRQ(ierr);
1470     ierr = PetscFree(indices);CHKERRQ(ierr);
1471     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1472     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1473 
1474   }
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 /*@C
1479    PCASMDestroySubdomains - Destroys the index sets created with
1480    PCASMCreateSubdomains(). Should be called after setting subdomains
1481    with PCASMSetLocalSubdomains().
1482 
1483    Collective
1484 
1485    Input Parameters:
1486 +  n - the number of index sets
1487 .  is - the array of index sets
1488 -  is_local - the array of local index sets, can be NULL
1489 
1490    Level: advanced
1491 
1492 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1493 @*/
1494 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1495 {
1496   PetscInt       i;
1497   PetscErrorCode ierr;
1498 
1499   PetscFunctionBegin;
1500   if (n <= 0) PetscFunctionReturn(0);
1501   if (is) {
1502     PetscValidPointer(is,2);
1503     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1504     ierr = PetscFree(is);CHKERRQ(ierr);
1505   }
1506   if (is_local) {
1507     PetscValidPointer(is_local,3);
1508     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1509     ierr = PetscFree(is_local);CHKERRQ(ierr);
1510   }
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 /*@
1515    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1516    preconditioner for a two-dimensional problem on a regular grid.
1517 
1518    Not Collective
1519 
1520    Input Parameters:
1521 +  m, n - the number of mesh points in the x and y directions
1522 .  M, N - the number of subdomains in the x and y directions
1523 .  dof - degrees of freedom per node
1524 -  overlap - overlap in mesh lines
1525 
1526    Output Parameters:
1527 +  Nsub - the number of subdomains created
1528 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1529 -  is_local - array of index sets defining non-overlapping subdomains
1530 
1531    Note:
1532    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1533    preconditioners.  More general related routines are
1534    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1535 
1536    Level: advanced
1537 
1538 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1539           PCASMSetOverlap()
1540 @*/
1541 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1542 {
1543   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1544   PetscErrorCode ierr;
1545   PetscInt       nidx,*idx,loc,ii,jj,count;
1546 
1547   PetscFunctionBegin;
1548   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1549 
1550   *Nsub     = N*M;
1551   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1552   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1553   ystart    = 0;
1554   loc_outer = 0;
1555   for (i=0; i<N; i++) {
1556     height = n/N + ((n % N) > i); /* height of subdomain */
1557     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1558     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1559     yright = ystart + height + overlap; if (yright > n) yright = n;
1560     xstart = 0;
1561     for (j=0; j<M; j++) {
1562       width = m/M + ((m % M) > j); /* width of subdomain */
1563       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1564       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1565       xright = xstart + width + overlap; if (xright > m) xright = m;
1566       nidx   = (xright - xleft)*(yright - yleft);
1567       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1568       loc    = 0;
1569       for (ii=yleft; ii<yright; ii++) {
1570         count = m*ii + xleft;
1571         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1572       }
1573       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1574       if (overlap == 0) {
1575         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1576 
1577         (*is_local)[loc_outer] = (*is)[loc_outer];
1578       } else {
1579         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1580           for (jj=xstart; jj<xstart+width; jj++) {
1581             idx[loc++] = m*ii + jj;
1582           }
1583         }
1584         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1585       }
1586       ierr    = PetscFree(idx);CHKERRQ(ierr);
1587       xstart += width;
1588       loc_outer++;
1589     }
1590     ystart += height;
1591   }
1592   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1593   PetscFunctionReturn(0);
1594 }
1595 
1596 /*@C
1597     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1598     only) for the additive Schwarz preconditioner.
1599 
1600     Not Collective
1601 
1602     Input Parameter:
1603 .   pc - the preconditioner context
1604 
1605     Output Parameters:
1606 +   n - the number of subdomains for this processor (default value = 1)
1607 .   is - the index sets that define the subdomains for this processor
1608 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1609 
1610 
1611     Notes:
1612     The IS numbering is in the parallel, global numbering of the vector.
1613 
1614     Level: advanced
1615 
1616 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1617           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1618 @*/
1619 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1620 {
1621   PC_ASM         *osm = (PC_ASM*)pc->data;
1622   PetscErrorCode ierr;
1623   PetscBool      match;
1624 
1625   PetscFunctionBegin;
1626   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1627   PetscValidIntPointer(n,2);
1628   if (is) PetscValidPointer(is,3);
1629   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1630   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1631   if (n) *n = osm->n_local_true;
1632   if (is) *is = osm->is;
1633   if (is_local) *is_local = osm->is_local;
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 /*@C
1638     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1639     only) for the additive Schwarz preconditioner.
1640 
1641     Not Collective
1642 
1643     Input Parameter:
1644 .   pc - the preconditioner context
1645 
1646     Output Parameters:
1647 +   n - the number of matrices for this processor (default value = 1)
1648 -   mat - the matrices
1649 
1650     Level: advanced
1651 
1652     Notes:
1653     Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1654 
1655            Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1656 
1657 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1658           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1659 @*/
1660 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1661 {
1662   PC_ASM         *osm;
1663   PetscErrorCode ierr;
1664   PetscBool      match;
1665 
1666   PetscFunctionBegin;
1667   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1668   PetscValidIntPointer(n,2);
1669   if (mat) PetscValidPointer(mat,3);
1670   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1671   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1672   if (!match) {
1673     if (n) *n = 0;
1674     if (mat) *mat = NULL;
1675   } else {
1676     osm = (PC_ASM*)pc->data;
1677     if (n) *n = osm->n_local_true;
1678     if (mat) *mat = osm->pmat;
1679   }
1680   PetscFunctionReturn(0);
1681 }
1682 
1683 /*@
1684     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1685 
1686     Logically Collective
1687 
1688     Input Parameter:
1689 +   pc  - the preconditioner
1690 -   flg - boolean indicating whether to use subdomains defined by the DM
1691 
1692     Options Database Key:
1693 .   -pc_asm_dm_subdomains
1694 
1695     Level: intermediate
1696 
1697     Notes:
1698     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1699     so setting either of the first two effectively turns the latter off.
1700 
1701 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1702           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1703 @*/
1704 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1705 {
1706   PC_ASM         *osm = (PC_ASM*)pc->data;
1707   PetscErrorCode ierr;
1708   PetscBool      match;
1709 
1710   PetscFunctionBegin;
1711   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1712   PetscValidLogicalCollectiveBool(pc,flg,2);
1713   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1714   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1715   if (match) {
1716     osm->dm_subdomains = flg;
1717   }
1718   PetscFunctionReturn(0);
1719 }
1720 
1721 /*@
1722     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1723     Not Collective
1724 
1725     Input Parameter:
1726 .   pc  - the preconditioner
1727 
1728     Output Parameter:
1729 .   flg - boolean indicating whether to use subdomains defined by the DM
1730 
1731     Level: intermediate
1732 
1733 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1734           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1735 @*/
1736 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1737 {
1738   PC_ASM         *osm = (PC_ASM*)pc->data;
1739   PetscErrorCode ierr;
1740   PetscBool      match;
1741 
1742   PetscFunctionBegin;
1743   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1744   PetscValidBoolPointer(flg,2);
1745   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1746   if (match) {
1747     if (flg) *flg = osm->dm_subdomains;
1748   }
1749   PetscFunctionReturn(0);
1750 }
1751 
1752 /*@
1753      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1754 
1755    Not Collective
1756 
1757    Input Parameter:
1758 .  pc - the PC
1759 
1760    Output Parameter:
1761 .  -pc_asm_sub_mat_type - name of matrix type
1762 
1763    Level: advanced
1764 
1765 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1766 @*/
1767 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1768   PetscErrorCode ierr;
1769 
1770   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1771   PetscFunctionReturn(0);
1772 }
1773 
1774 /*@
1775      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1776 
1777    Collective on Mat
1778 
1779    Input Parameters:
1780 +  pc             - the PC object
1781 -  sub_mat_type   - matrix type
1782 
1783    Options Database Key:
1784 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1785 
1786    Notes:
1787    See "${PETSC_DIR}/include/petscmat.h" for available types
1788 
1789   Level: advanced
1790 
1791 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1792 @*/
1793 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1794 {
1795   PetscErrorCode ierr;
1796 
1797   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1798   PetscFunctionReturn(0);
1799 }
1800