xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision b698fc57f0bea7237255b29c1b77df0acc362ffd)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <../src/ksp/pc/impls/asm/asm.h> /*I "petscpc.h" I*/
14 
15 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
16 {
17   PC_ASM         *osm = (PC_ASM*)pc->data;
18   PetscErrorCode ierr;
19   PetscMPIInt    rank;
20   PetscInt       i,bsz;
21   PetscBool      iascii,isstring;
22   PetscViewer    sviewer;
23 
24   PetscFunctionBegin;
25   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
27   if (iascii) {
28     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
29     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
30     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
31     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
32     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
33     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
34     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
35     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
36     if (osm->same_local_solves) {
37       if (osm->ksp) {
38         ierr = PetscViewerASCIIPrintf(viewer,"  Local solver is the same for all blocks, as in the following KSP and PC objects on rank 0:\n");CHKERRQ(ierr);
39         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
40         if (!rank) {
41           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
42           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
43           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
44         }
45         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
46       }
47     } else {
48       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
49       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
50       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
51       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
52       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
53       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
54       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
55       for (i=0; i<osm->n_local_true; i++) {
56         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
57         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
58         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
59         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
60       }
61       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
62       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
64       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
65     }
66   } else if (isstring) {
67     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
68     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
69     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
70     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71   }
72   PetscFunctionReturn(0);
73 }
74 
75 static PetscErrorCode PCASMPrintSubdomains(PC pc)
76 {
77   PC_ASM         *osm = (PC_ASM*)pc->data;
78   const char     *prefix;
79   char           fname[PETSC_MAX_PATH_LEN+1];
80   PetscViewer    viewer, sviewer;
81   char           *s;
82   PetscInt       i,j,nidx;
83   const PetscInt *idx;
84   PetscMPIInt    rank, size;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
89   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
90   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,sizeof(fname),NULL);CHKERRQ(ierr);
92   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
93   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
94   for (i=0; i<osm->n_local; i++) {
95     if (i < osm->n_local_true) {
96       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
97       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
98       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
99 #define len  16*(nidx+1)+512
100       ierr = PetscMalloc1(len,&s);CHKERRQ(ierr);
101       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
102 #undef len
103       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
104       for (j=0; j<nidx; j++) {
105         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
106       }
107       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
108       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
109       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
110       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
111       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
112       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
113       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
114       ierr = PetscFree(s);CHKERRQ(ierr);
115       if (osm->is_local) {
116         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
117 #define len  16*(nidx+1)+512
118         ierr = PetscMalloc1(len, &s);CHKERRQ(ierr);
119         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
120 #undef len
121         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
122         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
123         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
124         for (j=0; j<nidx; j++) {
125           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
126         }
127         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
128         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
129         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
130         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
131         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
132         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
133         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
134         ierr = PetscFree(s);CHKERRQ(ierr);
135       }
136     } else {
137       /* Participate in collective viewer calls. */
138       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
139       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
140       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
141       /* Assume either all ranks have is_local or none do. */
142       if (osm->is_local) {
143         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
144         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
145         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
146       }
147     }
148   }
149   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
150   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode PCSetUp_ASM(PC pc)
155 {
156   PC_ASM         *osm = (PC_ASM*)pc->data;
157   PetscErrorCode ierr;
158   PetscBool      flg;
159   PetscInt       i,m,m_local;
160   MatReuse       scall = MAT_REUSE_MATRIX;
161   IS             isl;
162   KSP            ksp;
163   PC             subpc;
164   const char     *prefix,*pprefix;
165   Vec            vec;
166   DM             *domain_dm = NULL;
167 
168   PetscFunctionBegin;
169   if (!pc->setupcalled) {
170     PetscInt m;
171 
172     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
173     if (osm->n_local_true == PETSC_DECIDE) {
174       /* no subdomains given */
175       /* try pc->dm first, if allowed */
176       if (osm->dm_subdomains && pc->dm) {
177         PetscInt  num_domains, d;
178         char      **domain_names;
179         IS        *inner_domain_is, *outer_domain_is;
180         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
181         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
182                               A future improvement of this code might allow one to use
183                               DM-defined subdomains and also increase the overlap,
184                               but that is not currently supported */
185         if (num_domains) {
186           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
187         }
188         for (d = 0; d < num_domains; ++d) {
189           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
190           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
191           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
192         }
193         ierr = PetscFree(domain_names);CHKERRQ(ierr);
194         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
195         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
196       }
197       if (osm->n_local_true == PETSC_DECIDE) {
198         /* still no subdomains; use one subdomain per processor */
199         osm->n_local_true = 1;
200       }
201     }
202     { /* determine the global and max number of subdomains */
203       struct {PetscInt max,sum;} inwork,outwork;
204       PetscMPIInt size;
205 
206       inwork.max   = osm->n_local_true;
207       inwork.sum   = osm->n_local_true;
208       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
209       osm->n_local = outwork.max;
210       osm->n       = outwork.sum;
211 
212       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
213       if (outwork.max == 1 && outwork.sum == size) {
214         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
215         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
216       }
217     }
218     if (!osm->is) { /* create the index sets */
219       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
220     }
221     if (osm->n_local_true > 1 && !osm->is_local) {
222       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
223       for (i=0; i<osm->n_local_true; i++) {
224         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
225           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
226           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
227         } else {
228           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
229           osm->is_local[i] = osm->is[i];
230         }
231       }
232     }
233     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
234     flg  = PETSC_FALSE;
235     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
236     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
237 
238     if (osm->overlap > 0) {
239       /* Extend the "overlapping" regions by a number of steps */
240       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
241     }
242     if (osm->sort_indices) {
243       for (i=0; i<osm->n_local_true; i++) {
244         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
245         if (osm->is_local) {
246           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
247         }
248       }
249     }
250 
251     if (!osm->ksp) {
252       /* Create the local solvers */
253       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
254       if (domain_dm) {
255         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
256       }
257       for (i=0; i<osm->n_local_true; i++) {
258         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
259         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
260         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
261         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
262         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
263         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
264         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
265         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
266         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
267         if (domain_dm) {
268           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
269           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
270           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
271         }
272         osm->ksp[i] = ksp;
273       }
274       if (domain_dm) {
275         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
276       }
277     }
278 
279     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
280     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
281     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
282 
283     scall = MAT_INITIAL_MATRIX;
284   } else {
285     /*
286        Destroy the blocks from the previous iteration
287     */
288     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
289       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
290       scall = MAT_INITIAL_MATRIX;
291     }
292   }
293 
294   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
295   if ((scall == MAT_REUSE_MATRIX) && osm->sub_mat_type) {
296     if (osm->n_local_true > 0) {
297       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
298     }
299     scall = MAT_INITIAL_MATRIX;
300   }
301 
302   /*
303      Extract out the submatrices
304   */
305   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
306   if (scall == MAT_INITIAL_MATRIX) {
307     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
308     for (i=0; i<osm->n_local_true; i++) {
309       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
310       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
311     }
312   }
313 
314   /* Convert the types of the submatrices (if needbe) */
315   if (osm->sub_mat_type) {
316     for (i=0; i<osm->n_local_true; i++) {
317       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
318     }
319   }
320 
321   if (!pc->setupcalled) {
322     VecType vtype;
323 
324     /* Create the local work vectors (from the local matrices) and scatter contexts */
325     ierr = MatCreateVecs(pc->pmat,&vec,NULL);CHKERRQ(ierr);
326 
327     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
328     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
329       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
330     }
331     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
332     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
333     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
334 
335     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
336     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
337     ierr = MatGetVecType(osm->pmat[0],&vtype);CHKERRQ(ierr);
338     ierr = VecCreate(PETSC_COMM_SELF,&osm->lx);CHKERRQ(ierr);
339     ierr = VecSetSizes(osm->lx,m,m);CHKERRQ(ierr);
340     ierr = VecSetType(osm->lx,vtype);CHKERRQ(ierr);
341     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
342     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
343     ierr = ISDestroy(&isl);CHKERRQ(ierr);
344 
345     for (i=0; i<osm->n_local_true; ++i) {
346       ISLocalToGlobalMapping ltog;
347       IS                     isll;
348       const PetscInt         *idx_is;
349       PetscInt               *idx_lis,nout;
350 
351       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
352       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
353       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
354 
355       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
356       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
357       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
358       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
359       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
360       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
361       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
362       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
363       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
364       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
365       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
366       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
367       ierr = ISDestroy(&isll);CHKERRQ(ierr);
368       ierr = ISDestroy(&isl);CHKERRQ(ierr);
369       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
370         ISLocalToGlobalMapping ltog;
371         IS                     isll,isll_local;
372         const PetscInt         *idx_local;
373         PetscInt               *idx1, *idx2, nout;
374 
375         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
376         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
377 
378         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
379         ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
380         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
381         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
382         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
383         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
384 
385         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
386         ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
387         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
388         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
389         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
390         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
391 
392         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
393         ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
394 
395         ierr = ISDestroy(&isll);CHKERRQ(ierr);
396         ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
397       }
398     }
399     ierr = VecDestroy(&vec);CHKERRQ(ierr);
400   }
401 
402   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
403     IS      *cis;
404     PetscInt c;
405 
406     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
407     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
408     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
409     ierr = PetscFree(cis);CHKERRQ(ierr);
410   }
411 
412   /* Return control to the user so that the submatrices can be modified (e.g., to apply
413      different boundary conditions for the submatrices than for the global problem) */
414   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
415 
416   /*
417      Loop over subdomains putting them into local ksp
418   */
419   for (i=0; i<osm->n_local_true; i++) {
420     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
421     if (!pc->setupcalled) {
422       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
423     }
424   }
425   PetscFunctionReturn(0);
426 }
427 
428 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
429 {
430   PC_ASM             *osm = (PC_ASM*)pc->data;
431   PetscErrorCode     ierr;
432   PetscInt           i;
433   KSPConvergedReason reason;
434 
435   PetscFunctionBegin;
436   for (i=0; i<osm->n_local_true; i++) {
437     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
438     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
439     if (reason == KSP_DIVERGED_PC_FAILED) {
440       pc->failedreason = PC_SUBPC_ERROR;
441     }
442   }
443   PetscFunctionReturn(0);
444 }
445 
446 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
447 {
448   PC_ASM         *osm = (PC_ASM*)pc->data;
449   PetscErrorCode ierr;
450   PetscInt       i,n_local_true = osm->n_local_true;
451   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
452 
453   PetscFunctionBegin;
454   /*
455      support for limiting the restriction or interpolation to only local
456      subdomain values (leaving the other values 0).
457   */
458   if (!(osm->type & PC_ASM_RESTRICT)) {
459     forward = SCATTER_FORWARD_LOCAL;
460     /* have to zero the work RHS since scatter may leave some slots empty */
461     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
462   }
463   if (!(osm->type & PC_ASM_INTERPOLATE)) {
464     reverse = SCATTER_REVERSE_LOCAL;
465   }
466 
467   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
468     /* zero the global and the local solutions */
469     ierr = VecSet(y, 0.0);CHKERRQ(ierr);
470     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
471 
472     /* copy the global RHS to local RHS including the ghost nodes */
473     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
474     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
475 
476     /* restrict local RHS to the overlapping 0-block RHS */
477     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
478     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
479 
480     /* do the local solves */
481     for (i = 0; i < n_local_true; ++i) {
482 
483       /* solve the overlapping i-block */
484       ierr = PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i],0);CHKERRQ(ierr);
485       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
486       ierr = KSPCheckSolve(osm->ksp[i], pc, osm->y[i]);CHKERRQ(ierr);
487       ierr = PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0);CHKERRQ(ierr);
488 
489       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
490         ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
491         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
492       } else { /* interpolate the overlapping i-block solution to the local solution */
493         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
494         ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
495       }
496 
497       if (i < n_local_true-1) {
498         /* restrict local RHS to the overlapping (i+1)-block RHS */
499         ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
500         ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
501 
502         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
503           /* update the overlapping (i+1)-block RHS using the current local solution */
504           ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
505           ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]);CHKERRQ(ierr);
506         }
507       }
508     }
509     /* add the local solution to the global solution including the ghost nodes */
510     ierr = VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
511     ierr = VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
512   } else SETERRQ1(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
513   PetscFunctionReturn(0);
514 }
515 
516 static PetscErrorCode PCMatApply_ASM(PC pc,Mat X,Mat Y)
517 {
518   PC_ASM         *osm = (PC_ASM*)pc->data;
519   Mat            Z,W;
520   Vec            x;
521   PetscInt       i,m,N;
522   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
523   PetscErrorCode ierr;
524 
525   PetscFunctionBegin;
526   if (osm->n_local_true > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Not yet implemented");
527   /*
528      support for limiting the restriction or interpolation to only local
529      subdomain values (leaving the other values 0).
530   */
531   if (!(osm->type & PC_ASM_RESTRICT)) {
532     forward = SCATTER_FORWARD_LOCAL;
533     /* have to zero the work RHS since scatter may leave some slots empty */
534     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
535   }
536   if (!(osm->type & PC_ASM_INTERPOLATE)) {
537     reverse = SCATTER_REVERSE_LOCAL;
538   }
539   ierr = VecGetLocalSize(osm->x[0], &m);CHKERRQ(ierr);
540   ierr = MatGetSize(X, NULL, &N);CHKERRQ(ierr);
541   ierr = MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z);CHKERRQ(ierr);
542   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
543     /* zero the global and the local solutions */
544     ierr = MatZeroEntries(Y);CHKERRQ(ierr);
545     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
546 
547     for (i = 0; i < N; ++i) {
548       ierr = MatDenseGetColumnVecRead(X, i, &x);
549       /* copy the global RHS to local RHS including the ghost nodes */
550       ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
551       ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
552       ierr = MatDenseRestoreColumnVecRead(X, i, &x);
553 
554       ierr = MatDenseGetColumnVecWrite(Z, i, &x);
555       /* restrict local RHS to the overlapping 0-block RHS */
556       ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward);CHKERRQ(ierr);
557       ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward);CHKERRQ(ierr);
558       ierr = MatDenseRestoreColumnVecWrite(Z, i, &x);
559     }
560     ierr = MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W);CHKERRQ(ierr);
561     /* solve the overlapping 0-block */
562     ierr = PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0);CHKERRQ(ierr);
563     ierr = KSPMatSolve(osm->ksp[0], Z, W);CHKERRQ(ierr);
564     ierr = KSPCheckSolve(osm->ksp[0], pc, NULL);CHKERRQ(ierr);
565     ierr = PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W,0);CHKERRQ(ierr);
566     ierr = MatDestroy(&Z);CHKERRQ(ierr);
567 
568     for (i = 0; i < N; ++i) {
569       ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
570       ierr = MatDenseGetColumnVecRead(W, i, &x);
571       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
572         ierr = VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
573         ierr = VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
574       } else { /* interpolate the overlapping 0-block solution to the local solution */
575         ierr = VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
576         ierr = VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
577       }
578       ierr = MatDenseRestoreColumnVecRead(W, i, &x);
579 
580       ierr = MatDenseGetColumnVecWrite(Y, i, &x);
581       /* add the local solution to the global solution including the ghost nodes */
582       ierr = VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse);CHKERRQ(ierr);
583       ierr = VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse);CHKERRQ(ierr);
584       ierr = MatDenseRestoreColumnVecWrite(Y, i, &x);
585     }
586     ierr = MatDestroy(&W);CHKERRQ(ierr);
587   } else SETERRQ1(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
588   PetscFunctionReturn(0);
589 }
590 
591 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
592 {
593   PC_ASM         *osm = (PC_ASM*)pc->data;
594   PetscErrorCode ierr;
595   PetscInt       i,n_local_true = osm->n_local_true;
596   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
597 
598   PetscFunctionBegin;
599   /*
600      Support for limiting the restriction or interpolation to only local
601      subdomain values (leaving the other values 0).
602 
603      Note: these are reversed from the PCApply_ASM() because we are applying the
604      transpose of the three terms
605   */
606 
607   if (!(osm->type & PC_ASM_INTERPOLATE)) {
608     forward = SCATTER_FORWARD_LOCAL;
609     /* have to zero the work RHS since scatter may leave some slots empty */
610     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
611   }
612   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
613 
614   /* zero the global and the local solutions */
615   ierr = VecSet(y, 0.0);CHKERRQ(ierr);
616   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
617 
618   /* Copy the global RHS to local RHS including the ghost nodes */
619   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
620   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
621 
622   /* Restrict local RHS to the overlapping 0-block RHS */
623   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
624   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
625 
626   /* do the local solves */
627   for (i = 0; i < n_local_true; ++i) {
628 
629     /* solve the overlapping i-block */
630     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
631     ierr = KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
632     ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
633     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
634 
635     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
636       ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
637       ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
638     } else { /* interpolate the overlapping i-block solution to the local solution */
639       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
640       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
641     }
642 
643     if (i < n_local_true-1) {
644       /* Restrict local RHS to the overlapping (i+1)-block RHS */
645       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
646       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
647     }
648   }
649   /* Add the local solution to the global solution including the ghost nodes */
650   ierr = VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
651   ierr = VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
652 
653   PetscFunctionReturn(0);
654 
655 }
656 
657 static PetscErrorCode PCReset_ASM(PC pc)
658 {
659   PC_ASM         *osm = (PC_ASM*)pc->data;
660   PetscErrorCode ierr;
661   PetscInt       i;
662 
663   PetscFunctionBegin;
664   if (osm->ksp) {
665     for (i=0; i<osm->n_local_true; i++) {
666       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
667     }
668   }
669   if (osm->pmat) {
670     if (osm->n_local_true > 0) {
671       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
672     }
673   }
674   if (osm->lrestriction) {
675     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
676     for (i=0; i<osm->n_local_true; i++) {
677       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
678       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
679       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
680       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
681     }
682     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
683     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
684     ierr = PetscFree(osm->x);CHKERRQ(ierr);
685     ierr = PetscFree(osm->y);CHKERRQ(ierr);
686 
687   }
688   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
689   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
690   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
691   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
692   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
693     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
694   }
695 
696   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
697 
698   osm->is       = NULL;
699   osm->is_local = NULL;
700   PetscFunctionReturn(0);
701 }
702 
703 static PetscErrorCode PCDestroy_ASM(PC pc)
704 {
705   PC_ASM         *osm = (PC_ASM*)pc->data;
706   PetscErrorCode ierr;
707   PetscInt       i;
708 
709   PetscFunctionBegin;
710   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
711   if (osm->ksp) {
712     for (i=0; i<osm->n_local_true; i++) {
713       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
714     }
715     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
716   }
717   ierr = PetscFree(pc->data);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
722 {
723   PC_ASM         *osm = (PC_ASM*)pc->data;
724   PetscErrorCode ierr;
725   PetscInt       blocks,ovl;
726   PetscBool      flg;
727   PCASMType      asmtype;
728   PCCompositeType loctype;
729   char           sub_mat_type[256];
730 
731   PetscFunctionBegin;
732   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
733   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
734   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
735   if (flg) {
736     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
737     osm->dm_subdomains = PETSC_FALSE;
738   }
739   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
740   if (flg) {
741     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
742     osm->dm_subdomains = PETSC_FALSE;
743   }
744   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
745   if (flg) {
746     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
747     osm->dm_subdomains = PETSC_FALSE;
748   }
749   flg  = PETSC_FALSE;
750   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
751   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
752   flg  = PETSC_FALSE;
753   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
754   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
755   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
756   if (flg) {
757     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
758   }
759   ierr = PetscOptionsTail();CHKERRQ(ierr);
760   PetscFunctionReturn(0);
761 }
762 
763 /*------------------------------------------------------------------------------------*/
764 
765 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
766 {
767   PC_ASM         *osm = (PC_ASM*)pc->data;
768   PetscErrorCode ierr;
769   PetscInt       i;
770 
771   PetscFunctionBegin;
772   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
773   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
774 
775   if (!pc->setupcalled) {
776     if (is) {
777       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
778     }
779     if (is_local) {
780       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
781     }
782     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
783 
784     osm->n_local_true = n;
785     osm->is           = NULL;
786     osm->is_local     = NULL;
787     if (is) {
788       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
789       for (i=0; i<n; i++) osm->is[i] = is[i];
790       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
791       osm->overlap = -1;
792     }
793     if (is_local) {
794       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
795       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
796       if (!is) {
797         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
798         for (i=0; i<osm->n_local_true; i++) {
799           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
800             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
801             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
802           } else {
803             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
804             osm->is[i] = osm->is_local[i];
805           }
806         }
807       }
808     }
809   }
810   PetscFunctionReturn(0);
811 }
812 
813 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
814 {
815   PC_ASM         *osm = (PC_ASM*)pc->data;
816   PetscErrorCode ierr;
817   PetscMPIInt    rank,size;
818   PetscInt       n;
819 
820   PetscFunctionBegin;
821   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
822   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
823 
824   /*
825      Split the subdomains equally among all processors
826   */
827   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
828   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
829   n    = N/size + ((N % size) > rank);
830   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
831   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
832   if (!pc->setupcalled) {
833     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
834 
835     osm->n_local_true = n;
836     osm->is           = NULL;
837     osm->is_local     = NULL;
838   }
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
843 {
844   PC_ASM *osm = (PC_ASM*)pc->data;
845 
846   PetscFunctionBegin;
847   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
848   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
849   if (!pc->setupcalled) osm->overlap = ovl;
850   PetscFunctionReturn(0);
851 }
852 
853 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
854 {
855   PC_ASM *osm = (PC_ASM*)pc->data;
856 
857   PetscFunctionBegin;
858   osm->type     = type;
859   osm->type_set = PETSC_TRUE;
860   PetscFunctionReturn(0);
861 }
862 
863 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
864 {
865   PC_ASM *osm = (PC_ASM*)pc->data;
866 
867   PetscFunctionBegin;
868   *type = osm->type;
869   PetscFunctionReturn(0);
870 }
871 
872 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
873 {
874   PC_ASM *osm = (PC_ASM *) pc->data;
875 
876   PetscFunctionBegin;
877   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
878   osm->loctype = type;
879   PetscFunctionReturn(0);
880 }
881 
882 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
883 {
884   PC_ASM *osm = (PC_ASM *) pc->data;
885 
886   PetscFunctionBegin;
887   *type = osm->loctype;
888   PetscFunctionReturn(0);
889 }
890 
891 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
892 {
893   PC_ASM *osm = (PC_ASM*)pc->data;
894 
895   PetscFunctionBegin;
896   osm->sort_indices = doSort;
897   PetscFunctionReturn(0);
898 }
899 
900 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
901 {
902   PC_ASM         *osm = (PC_ASM*)pc->data;
903   PetscErrorCode ierr;
904 
905   PetscFunctionBegin;
906   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
907 
908   if (n_local) *n_local = osm->n_local_true;
909   if (first_local) {
910     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
911     *first_local -= osm->n_local_true;
912   }
913   if (ksp) {
914     /* Assume that local solves are now different; not necessarily
915        true though!  This flag is used only for PCView_ASM() */
916     *ksp                   = osm->ksp;
917     osm->same_local_solves = PETSC_FALSE;
918   }
919   PetscFunctionReturn(0);
920 }
921 
922 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
923 {
924   PC_ASM         *osm = (PC_ASM*)pc->data;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
928   PetscValidPointer(sub_mat_type,2);
929   *sub_mat_type = osm->sub_mat_type;
930   PetscFunctionReturn(0);
931 }
932 
933 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
934 {
935   PetscErrorCode    ierr;
936   PC_ASM            *osm = (PC_ASM*)pc->data;
937 
938   PetscFunctionBegin;
939   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
940   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
941   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 /*@C
946     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
947 
948     Collective on pc
949 
950     Input Parameters:
951 +   pc - the preconditioner context
952 .   n - the number of subdomains for this processor (default value = 1)
953 .   is - the index set that defines the subdomains for this processor
954          (or NULL for PETSc to determine subdomains)
955 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
956          (or NULL to not provide these)
957 
958     Options Database Key:
959     To set the total number of subdomain blocks rather than specify the
960     index sets, use the option
961 .    -pc_asm_local_blocks <blks> - Sets local blocks
962 
963     Notes:
964     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
965 
966     By default the ASM preconditioner uses 1 block per processor.
967 
968     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
969 
970     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
971     back to form the global solution (this is the standard restricted additive Schwarz method)
972 
973     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
974     no code to handle that case.
975 
976     Level: advanced
977 
978 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
979           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
980 @*/
981 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
982 {
983   PetscErrorCode ierr;
984 
985   PetscFunctionBegin;
986   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
987   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
988   PetscFunctionReturn(0);
989 }
990 
991 /*@C
992     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
993     additive Schwarz preconditioner.  Either all or no processors in the
994     PC communicator must call this routine, with the same index sets.
995 
996     Collective on pc
997 
998     Input Parameters:
999 +   pc - the preconditioner context
1000 .   N  - the number of subdomains for all processors
1001 .   is - the index sets that define the subdomains for all processors
1002          (or NULL to ask PETSc to determine the subdomains)
1003 -   is_local - the index sets that define the local part of the subdomains for this processor
1004          (or NULL to not provide this information)
1005 
1006     Options Database Key:
1007     To set the total number of subdomain blocks rather than specify the
1008     index sets, use the option
1009 .    -pc_asm_blocks <blks> - Sets total blocks
1010 
1011     Notes:
1012     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
1013 
1014     By default the ASM preconditioner uses 1 block per processor.
1015 
1016     These index sets cannot be destroyed until after completion of the
1017     linear solves for which the ASM preconditioner is being used.
1018 
1019     Use PCASMSetLocalSubdomains() to set local subdomains.
1020 
1021     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
1022 
1023     Level: advanced
1024 
1025 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1026           PCASMCreateSubdomains2D()
1027 @*/
1028 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1029 {
1030   PetscErrorCode ierr;
1031 
1032   PetscFunctionBegin;
1033   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1034   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 /*@
1039     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1040     additive Schwarz preconditioner.  Either all or no processors in the
1041     PC communicator must call this routine.
1042 
1043     Logically Collective on pc
1044 
1045     Input Parameters:
1046 +   pc  - the preconditioner context
1047 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1048 
1049     Options Database Key:
1050 .   -pc_asm_overlap <ovl> - Sets overlap
1051 
1052     Notes:
1053     By default the ASM preconditioner uses 1 block per processor.  To use
1054     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1055     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1056 
1057     The overlap defaults to 1, so if one desires that no additional
1058     overlap be computed beyond what may have been set with a call to
1059     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1060     must be set to be 0.  In particular, if one does not explicitly set
1061     the subdomains an application code, then all overlap would be computed
1062     internally by PETSc, and using an overlap of 0 would result in an ASM
1063     variant that is equivalent to the block Jacobi preconditioner.
1064 
1065     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1066     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1067 
1068     Note that one can define initial index sets with any overlap via
1069     PCASMSetLocalSubdomains(); the routine
1070     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1071     if desired.
1072 
1073     Level: intermediate
1074 
1075 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1076           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1077 @*/
1078 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1079 {
1080   PetscErrorCode ierr;
1081 
1082   PetscFunctionBegin;
1083   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1084   PetscValidLogicalCollectiveInt(pc,ovl,2);
1085   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1086   PetscFunctionReturn(0);
1087 }
1088 
1089 /*@
1090     PCASMSetType - Sets the type of restriction and interpolation used
1091     for local problems in the additive Schwarz method.
1092 
1093     Logically Collective on pc
1094 
1095     Input Parameters:
1096 +   pc  - the preconditioner context
1097 -   type - variant of ASM, one of
1098 .vb
1099       PC_ASM_BASIC       - full interpolation and restriction
1100       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1101       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1102       PC_ASM_NONE        - local processor restriction and interpolation
1103 .ve
1104 
1105     Options Database Key:
1106 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1107 
1108     Notes:
1109     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1110     to limit the local processor interpolation
1111 
1112     Level: intermediate
1113 
1114 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1115           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1116 @*/
1117 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1118 {
1119   PetscErrorCode ierr;
1120 
1121   PetscFunctionBegin;
1122   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1123   PetscValidLogicalCollectiveEnum(pc,type,2);
1124   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 /*@
1129     PCASMGetType - Gets the type of restriction and interpolation used
1130     for local problems in the additive Schwarz method.
1131 
1132     Logically Collective on pc
1133 
1134     Input Parameter:
1135 .   pc  - the preconditioner context
1136 
1137     Output Parameter:
1138 .   type - variant of ASM, one of
1139 
1140 .vb
1141       PC_ASM_BASIC       - full interpolation and restriction
1142       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1143       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1144       PC_ASM_NONE        - local processor restriction and interpolation
1145 .ve
1146 
1147     Options Database Key:
1148 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1149 
1150     Level: intermediate
1151 
1152 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1153           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1154 @*/
1155 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1156 {
1157   PetscErrorCode ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1161   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1162   PetscFunctionReturn(0);
1163 }
1164 
1165 /*@
1166   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1167 
1168   Logically Collective on pc
1169 
1170   Input Parameters:
1171 + pc  - the preconditioner context
1172 - type - type of composition, one of
1173 .vb
1174   PC_COMPOSITE_ADDITIVE       - local additive combination
1175   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1176 .ve
1177 
1178   Options Database Key:
1179 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1180 
1181   Level: intermediate
1182 
1183 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1184 @*/
1185 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1186 {
1187   PetscErrorCode ierr;
1188 
1189   PetscFunctionBegin;
1190   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1191   PetscValidLogicalCollectiveEnum(pc, type, 2);
1192   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 /*@
1197   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1198 
1199   Logically Collective on pc
1200 
1201   Input Parameter:
1202 . pc  - the preconditioner context
1203 
1204   Output Parameter:
1205 . type - type of composition, one of
1206 .vb
1207   PC_COMPOSITE_ADDITIVE       - local additive combination
1208   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1209 .ve
1210 
1211   Options Database Key:
1212 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1213 
1214   Level: intermediate
1215 
1216 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1217 @*/
1218 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1219 {
1220   PetscErrorCode ierr;
1221 
1222   PetscFunctionBegin;
1223   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1224   PetscValidPointer(type, 2);
1225   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1226   PetscFunctionReturn(0);
1227 }
1228 
1229 /*@
1230     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1231 
1232     Logically Collective on pc
1233 
1234     Input Parameters:
1235 +   pc  - the preconditioner context
1236 -   doSort - sort the subdomain indices
1237 
1238     Level: intermediate
1239 
1240 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1241           PCASMCreateSubdomains2D()
1242 @*/
1243 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1244 {
1245   PetscErrorCode ierr;
1246 
1247   PetscFunctionBegin;
1248   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1249   PetscValidLogicalCollectiveBool(pc,doSort,2);
1250   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1251   PetscFunctionReturn(0);
1252 }
1253 
1254 /*@C
1255    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1256    this processor.
1257 
1258    Collective on pc iff first_local is requested
1259 
1260    Input Parameter:
1261 .  pc - the preconditioner context
1262 
1263    Output Parameters:
1264 +  n_local - the number of blocks on this processor or NULL
1265 .  first_local - the global number of the first block on this processor or NULL,
1266                  all processors must request or all must pass NULL
1267 -  ksp - the array of KSP contexts
1268 
1269    Note:
1270    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1271 
1272    You must call KSPSetUp() before calling PCASMGetSubKSP().
1273 
1274    Fortran note:
1275    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1276 
1277    Level: advanced
1278 
1279 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1280           PCASMCreateSubdomains2D(),
1281 @*/
1282 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1283 {
1284   PetscErrorCode ierr;
1285 
1286   PetscFunctionBegin;
1287   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1288   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1289   PetscFunctionReturn(0);
1290 }
1291 
1292 /* -------------------------------------------------------------------------------------*/
1293 /*MC
1294    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1295            its own KSP object.
1296 
1297    Options Database Keys:
1298 +  -pc_asm_blocks <blks> - Sets total blocks
1299 .  -pc_asm_overlap <ovl> - Sets overlap
1300 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1301 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1302 
1303      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1304       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1305       -pc_asm_type basic to use the standard ASM.
1306 
1307    Notes:
1308     Each processor can have one or more blocks, but a block cannot be shared by more
1309      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1310 
1311      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1312         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1313 
1314      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1315          and set the options directly on the resulting KSP object (you can access its PC
1316          with KSPGetPC())
1317 
1318    Level: beginner
1319 
1320     References:
1321 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1322      Courant Institute, New York University Technical report
1323 -   2. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1324     Cambridge University Press.
1325 
1326 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1327            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1328            PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1329 
1330 M*/
1331 
1332 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1333 {
1334   PetscErrorCode ierr;
1335   PC_ASM         *osm;
1336 
1337   PetscFunctionBegin;
1338   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1339 
1340   osm->n                 = PETSC_DECIDE;
1341   osm->n_local           = 0;
1342   osm->n_local_true      = PETSC_DECIDE;
1343   osm->overlap           = 1;
1344   osm->ksp               = NULL;
1345   osm->restriction       = NULL;
1346   osm->lprolongation     = NULL;
1347   osm->lrestriction      = NULL;
1348   osm->x                 = NULL;
1349   osm->y                 = NULL;
1350   osm->is                = NULL;
1351   osm->is_local          = NULL;
1352   osm->mat               = NULL;
1353   osm->pmat              = NULL;
1354   osm->type              = PC_ASM_RESTRICT;
1355   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1356   osm->same_local_solves = PETSC_TRUE;
1357   osm->sort_indices      = PETSC_TRUE;
1358   osm->dm_subdomains     = PETSC_FALSE;
1359   osm->sub_mat_type      = NULL;
1360 
1361   pc->data                 = (void*)osm;
1362   pc->ops->apply           = PCApply_ASM;
1363   pc->ops->matapply        = PCMatApply_ASM;
1364   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1365   pc->ops->setup           = PCSetUp_ASM;
1366   pc->ops->reset           = PCReset_ASM;
1367   pc->ops->destroy         = PCDestroy_ASM;
1368   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1369   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1370   pc->ops->view            = PCView_ASM;
1371   pc->ops->applyrichardson = NULL;
1372 
1373   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1374   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1375   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1376   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1377   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1378   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1379   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1380   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1381   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1382   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1384   PetscFunctionReturn(0);
1385 }
1386 
1387 /*@C
1388    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1389    preconditioner for a any problem on a general grid.
1390 
1391    Collective
1392 
1393    Input Parameters:
1394 +  A - The global matrix operator
1395 -  n - the number of local blocks
1396 
1397    Output Parameters:
1398 .  outis - the array of index sets defining the subdomains
1399 
1400    Level: advanced
1401 
1402    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1403     from these if you use PCASMSetLocalSubdomains()
1404 
1405     In the Fortran version you must provide the array outis[] already allocated of length n.
1406 
1407 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1408 @*/
1409 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1410 {
1411   MatPartitioning mpart;
1412   const char      *prefix;
1413   PetscInt        i,j,rstart,rend,bs;
1414   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1415   Mat             Ad     = NULL, adj;
1416   IS              ispart,isnumb,*is;
1417   PetscErrorCode  ierr;
1418 
1419   PetscFunctionBegin;
1420   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1421   PetscValidPointer(outis,3);
1422   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1423 
1424   /* Get prefix, row distribution, and block size */
1425   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1426   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1427   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1428   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1429 
1430   /* Get diagonal block from matrix if possible */
1431   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1432   if (hasop) {
1433     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1434   }
1435   if (Ad) {
1436     ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1437     if (!isbaij) {ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1438   }
1439   if (Ad && n > 1) {
1440     PetscBool match,done;
1441     /* Try to setup a good matrix partitioning if available */
1442     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1443     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1444     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1445     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1446     if (!match) {
1447       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1448     }
1449     if (!match) { /* assume a "good" partitioner is available */
1450       PetscInt       na;
1451       const PetscInt *ia,*ja;
1452       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1453       if (done) {
1454         /* Build adjacency matrix by hand. Unfortunately a call to
1455            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1456            remove the block-aij structure and we cannot expect
1457            MatPartitioning to split vertices as we need */
1458         PetscInt       i,j,len,nnz,cnt,*iia=NULL,*jja=NULL;
1459         const PetscInt *row;
1460         nnz = 0;
1461         for (i=0; i<na; i++) { /* count number of nonzeros */
1462           len = ia[i+1] - ia[i];
1463           row = ja + ia[i];
1464           for (j=0; j<len; j++) {
1465             if (row[j] == i) { /* don't count diagonal */
1466               len--; break;
1467             }
1468           }
1469           nnz += len;
1470         }
1471         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1472         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1473         nnz    = 0;
1474         iia[0] = 0;
1475         for (i=0; i<na; i++) { /* fill adjacency */
1476           cnt = 0;
1477           len = ia[i+1] - ia[i];
1478           row = ja + ia[i];
1479           for (j=0; j<len; j++) {
1480             if (row[j] != i) { /* if not diagonal */
1481               jja[nnz+cnt++] = row[j];
1482             }
1483           }
1484           nnz     += cnt;
1485           iia[i+1] = nnz;
1486         }
1487         /* Partitioning of the adjacency matrix */
1488         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1489         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1490         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1491         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1492         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1493         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1494         foundpart = PETSC_TRUE;
1495       }
1496       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1497     }
1498     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1499   }
1500 
1501   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1502   *outis = is;
1503 
1504   if (!foundpart) {
1505 
1506     /* Partitioning by contiguous chunks of rows */
1507 
1508     PetscInt mbs   = (rend-rstart)/bs;
1509     PetscInt start = rstart;
1510     for (i=0; i<n; i++) {
1511       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1512       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1513       start += count;
1514     }
1515 
1516   } else {
1517 
1518     /* Partitioning by adjacency of diagonal block  */
1519 
1520     const PetscInt *numbering;
1521     PetscInt       *count,nidx,*indices,*newidx,start=0;
1522     /* Get node count in each partition */
1523     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1524     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1525     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1526       for (i=0; i<n; i++) count[i] *= bs;
1527     }
1528     /* Build indices from node numbering */
1529     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1530     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1531     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1532     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1533     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1534     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1535     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1536       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1537       for (i=0; i<nidx; i++) {
1538         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1539       }
1540       ierr    = PetscFree(indices);CHKERRQ(ierr);
1541       nidx   *= bs;
1542       indices = newidx;
1543     }
1544     /* Shift to get global indices */
1545     for (i=0; i<nidx; i++) indices[i] += rstart;
1546 
1547     /* Build the index sets for each block */
1548     for (i=0; i<n; i++) {
1549       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1550       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1551       start += count[i];
1552     }
1553 
1554     ierr = PetscFree(count);CHKERRQ(ierr);
1555     ierr = PetscFree(indices);CHKERRQ(ierr);
1556     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1557     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1558 
1559   }
1560   PetscFunctionReturn(0);
1561 }
1562 
1563 /*@C
1564    PCASMDestroySubdomains - Destroys the index sets created with
1565    PCASMCreateSubdomains(). Should be called after setting subdomains
1566    with PCASMSetLocalSubdomains().
1567 
1568    Collective
1569 
1570    Input Parameters:
1571 +  n - the number of index sets
1572 .  is - the array of index sets
1573 -  is_local - the array of local index sets, can be NULL
1574 
1575    Level: advanced
1576 
1577 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1578 @*/
1579 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1580 {
1581   PetscInt       i;
1582   PetscErrorCode ierr;
1583 
1584   PetscFunctionBegin;
1585   if (n <= 0) PetscFunctionReturn(0);
1586   if (is) {
1587     PetscValidPointer(is,2);
1588     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1589     ierr = PetscFree(is);CHKERRQ(ierr);
1590   }
1591   if (is_local) {
1592     PetscValidPointer(is_local,3);
1593     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1594     ierr = PetscFree(is_local);CHKERRQ(ierr);
1595   }
1596   PetscFunctionReturn(0);
1597 }
1598 
1599 /*@
1600    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1601    preconditioner for a two-dimensional problem on a regular grid.
1602 
1603    Not Collective
1604 
1605    Input Parameters:
1606 +  m, n - the number of mesh points in the x and y directions
1607 .  M, N - the number of subdomains in the x and y directions
1608 .  dof - degrees of freedom per node
1609 -  overlap - overlap in mesh lines
1610 
1611    Output Parameters:
1612 +  Nsub - the number of subdomains created
1613 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1614 -  is_local - array of index sets defining non-overlapping subdomains
1615 
1616    Note:
1617    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1618    preconditioners.  More general related routines are
1619    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1620 
1621    Level: advanced
1622 
1623 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1624           PCASMSetOverlap()
1625 @*/
1626 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1627 {
1628   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1629   PetscErrorCode ierr;
1630   PetscInt       nidx,*idx,loc,ii,jj,count;
1631 
1632   PetscFunctionBegin;
1633   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1634 
1635   *Nsub     = N*M;
1636   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1637   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1638   ystart    = 0;
1639   loc_outer = 0;
1640   for (i=0; i<N; i++) {
1641     height = n/N + ((n % N) > i); /* height of subdomain */
1642     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1643     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1644     yright = ystart + height + overlap; if (yright > n) yright = n;
1645     xstart = 0;
1646     for (j=0; j<M; j++) {
1647       width = m/M + ((m % M) > j); /* width of subdomain */
1648       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1649       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1650       xright = xstart + width + overlap; if (xright > m) xright = m;
1651       nidx   = (xright - xleft)*(yright - yleft);
1652       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1653       loc    = 0;
1654       for (ii=yleft; ii<yright; ii++) {
1655         count = m*ii + xleft;
1656         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1657       }
1658       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1659       if (overlap == 0) {
1660         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1661 
1662         (*is_local)[loc_outer] = (*is)[loc_outer];
1663       } else {
1664         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1665           for (jj=xstart; jj<xstart+width; jj++) {
1666             idx[loc++] = m*ii + jj;
1667           }
1668         }
1669         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1670       }
1671       ierr    = PetscFree(idx);CHKERRQ(ierr);
1672       xstart += width;
1673       loc_outer++;
1674     }
1675     ystart += height;
1676   }
1677   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1678   PetscFunctionReturn(0);
1679 }
1680 
1681 /*@C
1682     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1683     only) for the additive Schwarz preconditioner.
1684 
1685     Not Collective
1686 
1687     Input Parameter:
1688 .   pc - the preconditioner context
1689 
1690     Output Parameters:
1691 +   n - the number of subdomains for this processor (default value = 1)
1692 .   is - the index sets that define the subdomains for this processor
1693 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1694 
1695 
1696     Notes:
1697     The IS numbering is in the parallel, global numbering of the vector.
1698 
1699     Level: advanced
1700 
1701 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1702           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1703 @*/
1704 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1705 {
1706   PC_ASM         *osm = (PC_ASM*)pc->data;
1707   PetscErrorCode ierr;
1708   PetscBool      match;
1709 
1710   PetscFunctionBegin;
1711   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1712   PetscValidIntPointer(n,2);
1713   if (is) PetscValidPointer(is,3);
1714   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1715   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1716   if (n) *n = osm->n_local_true;
1717   if (is) *is = osm->is;
1718   if (is_local) *is_local = osm->is_local;
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 /*@C
1723     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1724     only) for the additive Schwarz preconditioner.
1725 
1726     Not Collective
1727 
1728     Input Parameter:
1729 .   pc - the preconditioner context
1730 
1731     Output Parameters:
1732 +   n - the number of matrices for this processor (default value = 1)
1733 -   mat - the matrices
1734 
1735     Level: advanced
1736 
1737     Notes:
1738     Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1739 
1740            Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1741 
1742 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1743           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1744 @*/
1745 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1746 {
1747   PC_ASM         *osm;
1748   PetscErrorCode ierr;
1749   PetscBool      match;
1750 
1751   PetscFunctionBegin;
1752   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1753   PetscValidIntPointer(n,2);
1754   if (mat) PetscValidPointer(mat,3);
1755   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1756   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1757   if (!match) {
1758     if (n) *n = 0;
1759     if (mat) *mat = NULL;
1760   } else {
1761     osm = (PC_ASM*)pc->data;
1762     if (n) *n = osm->n_local_true;
1763     if (mat) *mat = osm->pmat;
1764   }
1765   PetscFunctionReturn(0);
1766 }
1767 
1768 /*@
1769     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1770 
1771     Logically Collective
1772 
1773     Input Parameter:
1774 +   pc  - the preconditioner
1775 -   flg - boolean indicating whether to use subdomains defined by the DM
1776 
1777     Options Database Key:
1778 .   -pc_asm_dm_subdomains
1779 
1780     Level: intermediate
1781 
1782     Notes:
1783     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1784     so setting either of the first two effectively turns the latter off.
1785 
1786 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1787           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1788 @*/
1789 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1790 {
1791   PC_ASM         *osm = (PC_ASM*)pc->data;
1792   PetscErrorCode ierr;
1793   PetscBool      match;
1794 
1795   PetscFunctionBegin;
1796   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1797   PetscValidLogicalCollectiveBool(pc,flg,2);
1798   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1799   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1800   if (match) {
1801     osm->dm_subdomains = flg;
1802   }
1803   PetscFunctionReturn(0);
1804 }
1805 
1806 /*@
1807     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1808     Not Collective
1809 
1810     Input Parameter:
1811 .   pc  - the preconditioner
1812 
1813     Output Parameter:
1814 .   flg - boolean indicating whether to use subdomains defined by the DM
1815 
1816     Level: intermediate
1817 
1818 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1819           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1820 @*/
1821 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1822 {
1823   PC_ASM         *osm = (PC_ASM*)pc->data;
1824   PetscErrorCode ierr;
1825   PetscBool      match;
1826 
1827   PetscFunctionBegin;
1828   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1829   PetscValidBoolPointer(flg,2);
1830   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1831   if (match) *flg = osm->dm_subdomains;
1832   else *flg = PETSC_FALSE;
1833   PetscFunctionReturn(0);
1834 }
1835 
1836 /*@
1837      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1838 
1839    Not Collective
1840 
1841    Input Parameter:
1842 .  pc - the PC
1843 
1844    Output Parameter:
1845 .  -pc_asm_sub_mat_type - name of matrix type
1846 
1847    Level: advanced
1848 
1849 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1850 @*/
1851 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type)
1852 {
1853   PetscErrorCode ierr;
1854 
1855   PetscFunctionBegin;
1856   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1857   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1858   PetscFunctionReturn(0);
1859 }
1860 
1861 /*@
1862      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1863 
1864    Collective on Mat
1865 
1866    Input Parameters:
1867 +  pc             - the PC object
1868 -  sub_mat_type   - matrix type
1869 
1870    Options Database Key:
1871 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1872 
1873    Notes:
1874    See "${PETSC_DIR}/include/petscmat.h" for available types
1875 
1876   Level: advanced
1877 
1878 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1879 @*/
1880 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1881 {
1882   PetscErrorCode ierr;
1883 
1884   PetscFunctionBegin;
1885   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1886   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1887   PetscFunctionReturn(0);
1888 }
1889