xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 
15 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
16 {
17   PC_ASM            *osm = (PC_ASM*)pc->data;
18   PetscMPIInt       rank;
19   PetscInt          i,bsz;
20   PetscBool         iascii,isstring;
21   PetscViewer       sviewer;
22   PetscViewerFormat format;
23   const char        *prefix;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
28   if (iascii) {
29     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
30     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap));
31     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n));
32     PetscCall(PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps));
33     PetscCall(PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]));
34     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n"));
35     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]));
36     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
37     PetscCall(PetscViewerGetFormat(viewer,&format));
38     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
39       if (osm->ksp) {
40         PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
41         PetscCall(PCGetOptionsPrefix(pc,&prefix));
42         PetscCall(PetscViewerASCIIPrintf(viewer,"  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n",prefix?prefix:""));
43         PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
44         if (rank == 0) {
45           PetscCall(PetscViewerASCIIPushTab(viewer));
46           PetscCall(KSPView(osm->ksp[0],sviewer));
47           PetscCall(PetscViewerASCIIPopTab(viewer));
48         }
49         PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
50       }
51     } else {
52       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
53       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true));
54       PetscCall(PetscViewerFlush(viewer));
55       PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for each block is in the following KSP and PC objects:\n"));
56       PetscCall(PetscViewerASCIIPushTab(viewer));
57       PetscCall(PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n"));
58       PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
59       for (i=0; i<osm->n_local_true; i++) {
60         PetscCall(ISGetLocalSize(osm->is[i],&bsz));
61         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz));
62         PetscCall(KSPView(osm->ksp[i],sviewer));
63         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n"));
64       }
65       PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
66       PetscCall(PetscViewerASCIIPopTab(viewer));
67       PetscCall(PetscViewerFlush(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0],sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
75   }
76   PetscFunctionReturn(0);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM*)pc->data;
82   const char     *prefix;
83   char           fname[PETSC_MAX_PATH_LEN+1];
84   PetscViewer    viewer, sviewer;
85   char           *s;
86   PetscInt       i,j,nidx;
87   const PetscInt *idx;
88   PetscMPIInt    rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc,&prefix));
94   PetscCall(PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,sizeof(fname),NULL));
95   if (fname[0] == 0) PetscCall(PetscStrcpy(fname,"stdout"));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer));
97   for (i=0; i<osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i],&nidx));
100       PetscCall(ISGetIndices(osm->is[i],&idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len  16*(nidx+1)+512
103       PetscCall(PetscMalloc1(len,&s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i));
107       for (j=0; j<nidx; j++) {
108         PetscCall(PetscViewerStringSPrintf(sviewer,"%D ",idx[j]));
109       }
110       PetscCall(ISRestoreIndices(osm->is[i],&idx));
111       PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
112       PetscCall(PetscViewerDestroy(&sviewer));
113       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
114       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, s));
115       PetscCall(PetscViewerFlush(viewer));
116       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
117       PetscCall(PetscFree(s));
118       if (osm->is_local) {
119         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
120 #define len  16*(nidx+1)+512
121         PetscCall(PetscMalloc1(len, &s));
122         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
123 #undef len
124         PetscCall(PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i));
125         PetscCall(ISGetLocalSize(osm->is_local[i],&nidx));
126         PetscCall(ISGetIndices(osm->is_local[i],&idx));
127         for (j=0; j<nidx; j++) {
128           PetscCall(PetscViewerStringSPrintf(sviewer,"%D ",idx[j]));
129         }
130         PetscCall(ISRestoreIndices(osm->is_local[i],&idx));
131         PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
132         PetscCall(PetscViewerDestroy(&sviewer));
133         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
134         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, s));
135         PetscCall(PetscViewerFlush(viewer));
136         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
137         PetscCall(PetscFree(s));
138       }
139     } else {
140       /* Participate in collective viewer calls. */
141       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
142       PetscCall(PetscViewerFlush(viewer));
143       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
144       /* Assume either all ranks have is_local or none do. */
145       if (osm->is_local) {
146         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
147         PetscCall(PetscViewerFlush(viewer));
148         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
149       }
150     }
151   }
152   PetscCall(PetscViewerFlush(viewer));
153   PetscCall(PetscViewerDestroy(&viewer));
154   PetscFunctionReturn(0);
155 }
156 
157 static PetscErrorCode PCSetUp_ASM(PC pc)
158 {
159   PC_ASM         *osm = (PC_ASM*)pc->data;
160   PetscBool      flg;
161   PetscInt       i,m,m_local;
162   MatReuse       scall = MAT_REUSE_MATRIX;
163   IS             isl;
164   KSP            ksp;
165   PC             subpc;
166   const char     *prefix,*pprefix;
167   Vec            vec;
168   DM             *domain_dm = NULL;
169 
170   PetscFunctionBegin;
171   if (!pc->setupcalled) {
172     PetscInt m;
173 
174     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
175     if (osm->n_local_true == PETSC_DECIDE) {
176       /* no subdomains given */
177       /* try pc->dm first, if allowed */
178       if (osm->dm_subdomains && pc->dm) {
179         PetscInt  num_domains, d;
180         char      **domain_names;
181         IS        *inner_domain_is, *outer_domain_is;
182         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
183         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
184                               A future improvement of this code might allow one to use
185                               DM-defined subdomains and also increase the overlap,
186                               but that is not currently supported */
187         if (num_domains) {
188           PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
189         }
190         for (d = 0; d < num_domains; ++d) {
191           if (domain_names)    PetscCall(PetscFree(domain_names[d]));
192           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
193           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
194         }
195         PetscCall(PetscFree(domain_names));
196         PetscCall(PetscFree(inner_domain_is));
197         PetscCall(PetscFree(outer_domain_is));
198       }
199       if (osm->n_local_true == PETSC_DECIDE) {
200         /* still no subdomains; use one subdomain per processor */
201         osm->n_local_true = 1;
202       }
203     }
204     { /* determine the global and max number of subdomains */
205       struct {PetscInt max,sum;} inwork,outwork;
206       PetscMPIInt size;
207 
208       inwork.max   = osm->n_local_true;
209       inwork.sum   = osm->n_local_true;
210       PetscCallMPI(MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc)));
211       osm->n_local = outwork.max;
212       osm->n       = outwork.sum;
213 
214       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
215       if (outwork.max == 1 && outwork.sum == size) {
216         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
217         PetscCall(MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE));
218       }
219     }
220     if (!osm->is) { /* create the index sets */
221       PetscCall(PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is));
222     }
223     if (osm->n_local_true > 1 && !osm->is_local) {
224       PetscCall(PetscMalloc1(osm->n_local_true,&osm->is_local));
225       for (i=0; i<osm->n_local_true; i++) {
226         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
227           PetscCall(ISDuplicate(osm->is[i],&osm->is_local[i]));
228           PetscCall(ISCopy(osm->is[i],osm->is_local[i]));
229         } else {
230           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
231           osm->is_local[i] = osm->is[i];
232         }
233       }
234     }
235     PetscCall(PCGetOptionsPrefix(pc,&prefix));
236     flg  = PETSC_FALSE;
237     PetscCall(PetscOptionsHasName(NULL,prefix,"-pc_asm_print_subdomains",&flg));
238     if (flg) PetscCall(PCASMPrintSubdomains(pc));
239 
240     if (osm->overlap > 0) {
241       /* Extend the "overlapping" regions by a number of steps */
242       PetscCall(MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap));
243     }
244     if (osm->sort_indices) {
245       for (i=0; i<osm->n_local_true; i++) {
246         PetscCall(ISSort(osm->is[i]));
247         if (osm->is_local) {
248           PetscCall(ISSort(osm->is_local[i]));
249         }
250       }
251     }
252 
253     if (!osm->ksp) {
254       /* Create the local solvers */
255       PetscCall(PetscMalloc1(osm->n_local_true,&osm->ksp));
256       if (domain_dm) {
257         PetscCall(PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n"));
258       }
259       for (i=0; i<osm->n_local_true; i++) {
260         PetscCall(KSPCreate(PETSC_COMM_SELF,&ksp));
261         PetscCall(KSPSetErrorIfNotConverged(ksp,pc->erroriffailure));
262         PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp));
263         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1));
264         PetscCall(KSPSetType(ksp,KSPPREONLY));
265         PetscCall(KSPGetPC(ksp,&subpc));
266         PetscCall(PCGetOptionsPrefix(pc,&prefix));
267         PetscCall(KSPSetOptionsPrefix(ksp,prefix));
268         PetscCall(KSPAppendOptionsPrefix(ksp,"sub_"));
269         if (domain_dm) {
270           PetscCall(KSPSetDM(ksp, domain_dm[i]));
271           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
272           PetscCall(DMDestroy(&domain_dm[i]));
273         }
274         osm->ksp[i] = ksp;
275       }
276       if (domain_dm) {
277         PetscCall(PetscFree(domain_dm));
278       }
279     }
280 
281     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
282     PetscCall(ISSortRemoveDups(osm->lis));
283     PetscCall(ISGetLocalSize(osm->lis, &m));
284 
285     scall = MAT_INITIAL_MATRIX;
286   } else {
287     /*
288        Destroy the blocks from the previous iteration
289     */
290     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
291       PetscCall(MatDestroyMatrices(osm->n_local_true,&osm->pmat));
292       scall = MAT_INITIAL_MATRIX;
293     }
294   }
295 
296   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
297   if ((scall == MAT_REUSE_MATRIX) && osm->sub_mat_type) {
298     if (osm->n_local_true > 0) {
299       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
300     }
301     scall = MAT_INITIAL_MATRIX;
302   }
303 
304   /*
305      Extract out the submatrices
306   */
307   PetscCall(MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat));
308   if (scall == MAT_INITIAL_MATRIX) {
309     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix));
310     for (i=0; i<osm->n_local_true; i++) {
311       PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]));
312       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix));
313     }
314   }
315 
316   /* Convert the types of the submatrices (if needbe) */
317   if (osm->sub_mat_type) {
318     for (i=0; i<osm->n_local_true; i++) {
319       PetscCall(MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i])));
320     }
321   }
322 
323   if (!pc->setupcalled) {
324     VecType vtype;
325 
326     /* Create the local work vectors (from the local matrices) and scatter contexts */
327     PetscCall(MatCreateVecs(pc->pmat,&vec,NULL));
328 
329     PetscCheckFalse(osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE),PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
330     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
331       PetscCall(PetscMalloc1(osm->n_local_true,&osm->lprolongation));
332     }
333     PetscCall(PetscMalloc1(osm->n_local_true,&osm->lrestriction));
334     PetscCall(PetscMalloc1(osm->n_local_true,&osm->x));
335     PetscCall(PetscMalloc1(osm->n_local_true,&osm->y));
336 
337     PetscCall(ISGetLocalSize(osm->lis,&m));
338     PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
339     PetscCall(MatGetVecType(osm->pmat[0],&vtype));
340     PetscCall(VecCreate(PETSC_COMM_SELF,&osm->lx));
341     PetscCall(VecSetSizes(osm->lx,m,m));
342     PetscCall(VecSetType(osm->lx,vtype));
343     PetscCall(VecDuplicate(osm->lx, &osm->ly));
344     PetscCall(VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction));
345     PetscCall(ISDestroy(&isl));
346 
347     for (i=0; i<osm->n_local_true; ++i) {
348       ISLocalToGlobalMapping ltog;
349       IS                     isll;
350       const PetscInt         *idx_is;
351       PetscInt               *idx_lis,nout;
352 
353       PetscCall(ISGetLocalSize(osm->is[i],&m));
354       PetscCall(MatCreateVecs(osm->pmat[i],&osm->x[i],NULL));
355       PetscCall(VecDuplicate(osm->x[i],&osm->y[i]));
356 
357       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
358       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
359       PetscCall(ISGetLocalSize(osm->is[i],&m));
360       PetscCall(ISGetIndices(osm->is[i], &idx_is));
361       PetscCall(PetscMalloc1(m,&idx_lis));
362       PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis));
363       PetscCheckFalse(nout != m,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
364       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
365       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll));
366       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367       PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
368       PetscCall(VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]));
369       PetscCall(ISDestroy(&isll));
370       PetscCall(ISDestroy(&isl));
371       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
372         ISLocalToGlobalMapping ltog;
373         IS                     isll,isll_local;
374         const PetscInt         *idx_local;
375         PetscInt               *idx1, *idx2, nout;
376 
377         PetscCall(ISGetLocalSize(osm->is_local[i],&m_local));
378         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
379 
380         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog));
381         PetscCall(PetscMalloc1(m_local,&idx1));
382         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1));
383         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
384         PetscCheckFalse(nout != m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
385         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll));
386 
387         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
388         PetscCall(PetscMalloc1(m_local,&idx2));
389         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2));
390         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
391         PetscCheckFalse(nout != m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
392         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local));
393 
394         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
395         PetscCall(VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]));
396 
397         PetscCall(ISDestroy(&isll));
398         PetscCall(ISDestroy(&isll_local));
399       }
400     }
401     PetscCall(VecDestroy(&vec));
402   }
403 
404   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
405     IS      *cis;
406     PetscInt c;
407 
408     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
409     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
410     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
411     PetscCall(PetscFree(cis));
412   }
413 
414   /* Return control to the user so that the submatrices can be modified (e.g., to apply
415      different boundary conditions for the submatrices than for the global problem) */
416   PetscCall(PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP));
417 
418   /*
419      Loop over subdomains putting them into local ksp
420   */
421   PetscCall(KSPGetOptionsPrefix(osm->ksp[0],&prefix));
422   for (i=0; i<osm->n_local_true; i++) {
423     PetscCall(KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]));
424     PetscCall(MatSetOptionsPrefix(osm->pmat[i],prefix));
425     if (!pc->setupcalled) {
426       PetscCall(KSPSetFromOptions(osm->ksp[i]));
427     }
428   }
429   PetscFunctionReturn(0);
430 }
431 
432 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
433 {
434   PC_ASM             *osm = (PC_ASM*)pc->data;
435   PetscInt           i;
436   KSPConvergedReason reason;
437 
438   PetscFunctionBegin;
439   for (i=0; i<osm->n_local_true; i++) {
440     PetscCall(KSPSetUp(osm->ksp[i]));
441     PetscCall(KSPGetConvergedReason(osm->ksp[i],&reason));
442     if (reason == KSP_DIVERGED_PC_FAILED) {
443       pc->failedreason = PC_SUBPC_ERROR;
444     }
445   }
446   PetscFunctionReturn(0);
447 }
448 
449 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
450 {
451   PC_ASM         *osm = (PC_ASM*)pc->data;
452   PetscInt       i,n_local_true = osm->n_local_true;
453   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
454 
455   PetscFunctionBegin;
456   /*
457      support for limiting the restriction or interpolation to only local
458      subdomain values (leaving the other values 0).
459   */
460   if (!(osm->type & PC_ASM_RESTRICT)) {
461     forward = SCATTER_FORWARD_LOCAL;
462     /* have to zero the work RHS since scatter may leave some slots empty */
463     PetscCall(VecSet(osm->lx, 0.0));
464   }
465   if (!(osm->type & PC_ASM_INTERPOLATE)) {
466     reverse = SCATTER_REVERSE_LOCAL;
467   }
468 
469   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
470     /* zero the global and the local solutions */
471     PetscCall(VecSet(y, 0.0));
472     PetscCall(VecSet(osm->ly, 0.0));
473 
474     /* copy the global RHS to local RHS including the ghost nodes */
475     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
476     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
477 
478     /* restrict local RHS to the overlapping 0-block RHS */
479     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
480     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
481 
482     /* do the local solves */
483     for (i = 0; i < n_local_true; ++i) {
484 
485       /* solve the overlapping i-block */
486       PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i],0));
487       PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
488       PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
489       PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
490 
491       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
492         PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
493         PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
494       } else { /* interpolate the overlapping i-block solution to the local solution */
495         PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
496         PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
497       }
498 
499       if (i < n_local_true-1) {
500         /* restrict local RHS to the overlapping (i+1)-block RHS */
501         PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
502         PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
503 
504         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
505           /* update the overlapping (i+1)-block RHS using the current local solution */
506           PetscCall(MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]));
507           PetscCall(VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]));
508         }
509       }
510     }
511     /* add the local solution to the global solution including the ghost nodes */
512     PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
513     PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
514   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
515   PetscFunctionReturn(0);
516 }
517 
518 static PetscErrorCode PCMatApply_ASM(PC pc,Mat X,Mat Y)
519 {
520   PC_ASM         *osm = (PC_ASM*)pc->data;
521   Mat            Z,W;
522   Vec            x;
523   PetscInt       i,m,N;
524   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
525 
526   PetscFunctionBegin;
527   PetscCheckFalse(osm->n_local_true > 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Not yet implemented");
528   /*
529      support for limiting the restriction or interpolation to only local
530      subdomain values (leaving the other values 0).
531   */
532   if (!(osm->type & PC_ASM_RESTRICT)) {
533     forward = SCATTER_FORWARD_LOCAL;
534     /* have to zero the work RHS since scatter may leave some slots empty */
535     PetscCall(VecSet(osm->lx, 0.0));
536   }
537   if (!(osm->type & PC_ASM_INTERPOLATE)) {
538     reverse = SCATTER_REVERSE_LOCAL;
539   }
540   PetscCall(VecGetLocalSize(osm->x[0], &m));
541   PetscCall(MatGetSize(X, NULL, &N));
542   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
543   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
544     /* zero the global and the local solutions */
545     PetscCall(MatZeroEntries(Y));
546     PetscCall(VecSet(osm->ly, 0.0));
547 
548     for (i = 0; i < N; ++i) {
549       PetscCall(MatDenseGetColumnVecRead(X, i, &x));
550       /* copy the global RHS to local RHS including the ghost nodes */
551       PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
552       PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
553       PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
554 
555       PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
556       /* restrict local RHS to the overlapping 0-block RHS */
557       PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
558       PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
559       PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
560     }
561     PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
562     /* solve the overlapping 0-block */
563     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
564     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
565     PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
566     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W,0));
567     PetscCall(MatDestroy(&Z));
568 
569     for (i = 0; i < N; ++i) {
570       PetscCall(VecSet(osm->ly, 0.0));
571       PetscCall(MatDenseGetColumnVecRead(W, i, &x));
572       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
573         PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
574         PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
575       } else { /* interpolate the overlapping 0-block solution to the local solution */
576         PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
577         PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
578       }
579       PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
580 
581       PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
582       /* add the local solution to the global solution including the ghost nodes */
583       PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
584       PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
585       PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
586     }
587     PetscCall(MatDestroy(&W));
588   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
589   PetscFunctionReturn(0);
590 }
591 
592 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
593 {
594   PC_ASM         *osm = (PC_ASM*)pc->data;
595   PetscInt       i,n_local_true = osm->n_local_true;
596   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
597 
598   PetscFunctionBegin;
599   /*
600      Support for limiting the restriction or interpolation to only local
601      subdomain values (leaving the other values 0).
602 
603      Note: these are reversed from the PCApply_ASM() because we are applying the
604      transpose of the three terms
605   */
606 
607   if (!(osm->type & PC_ASM_INTERPOLATE)) {
608     forward = SCATTER_FORWARD_LOCAL;
609     /* have to zero the work RHS since scatter may leave some slots empty */
610     PetscCall(VecSet(osm->lx, 0.0));
611   }
612   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
613 
614   /* zero the global and the local solutions */
615   PetscCall(VecSet(y, 0.0));
616   PetscCall(VecSet(osm->ly, 0.0));
617 
618   /* Copy the global RHS to local RHS including the ghost nodes */
619   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
620   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
621 
622   /* Restrict local RHS to the overlapping 0-block RHS */
623   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
624   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
625 
626   /* do the local solves */
627   for (i = 0; i < n_local_true; ++i) {
628 
629     /* solve the overlapping i-block */
630     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
631     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
632     PetscCall(KSPCheckSolve(osm->ksp[i],pc,osm->y[i]));
633     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
634 
635     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
636       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
637       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
638     } else { /* interpolate the overlapping i-block solution to the local solution */
639       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
640       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
641     }
642 
643     if (i < n_local_true-1) {
644       /* Restrict local RHS to the overlapping (i+1)-block RHS */
645       PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
646       PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
647     }
648   }
649   /* Add the local solution to the global solution including the ghost nodes */
650   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
651   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
652 
653   PetscFunctionReturn(0);
654 
655 }
656 
657 static PetscErrorCode PCReset_ASM(PC pc)
658 {
659   PC_ASM         *osm = (PC_ASM*)pc->data;
660   PetscInt       i;
661 
662   PetscFunctionBegin;
663   if (osm->ksp) {
664     for (i=0; i<osm->n_local_true; i++) {
665       PetscCall(KSPReset(osm->ksp[i]));
666     }
667   }
668   if (osm->pmat) {
669     if (osm->n_local_true > 0) {
670       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
671     }
672   }
673   if (osm->lrestriction) {
674     PetscCall(VecScatterDestroy(&osm->restriction));
675     for (i=0; i<osm->n_local_true; i++) {
676       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
677       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
678       PetscCall(VecDestroy(&osm->x[i]));
679       PetscCall(VecDestroy(&osm->y[i]));
680     }
681     PetscCall(PetscFree(osm->lrestriction));
682     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
683     PetscCall(PetscFree(osm->x));
684     PetscCall(PetscFree(osm->y));
685 
686   }
687   PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
688   PetscCall(ISDestroy(&osm->lis));
689   PetscCall(VecDestroy(&osm->lx));
690   PetscCall(VecDestroy(&osm->ly));
691   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
692     PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
693   }
694 
695   PetscCall(PetscFree(osm->sub_mat_type));
696 
697   osm->is       = NULL;
698   osm->is_local = NULL;
699   PetscFunctionReturn(0);
700 }
701 
702 static PetscErrorCode PCDestroy_ASM(PC pc)
703 {
704   PC_ASM         *osm = (PC_ASM*)pc->data;
705   PetscInt       i;
706 
707   PetscFunctionBegin;
708   PetscCall(PCReset_ASM(pc));
709   if (osm->ksp) {
710     for (i=0; i<osm->n_local_true; i++) {
711       PetscCall(KSPDestroy(&osm->ksp[i]));
712     }
713     PetscCall(PetscFree(osm->ksp));
714   }
715   PetscCall(PetscFree(pc->data));
716 
717   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",NULL));
718   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",NULL));
719   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",NULL));
720   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",NULL));
721   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",NULL));
722   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",NULL));
723   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",NULL));
724   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",NULL));
725   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",NULL));
726   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",NULL));
727   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",NULL));
728   PetscFunctionReturn(0);
729 }
730 
731 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
732 {
733   PC_ASM         *osm = (PC_ASM*)pc->data;
734   PetscInt       blocks,ovl;
735   PetscBool      flg;
736   PCASMType      asmtype;
737   PCCompositeType loctype;
738   char           sub_mat_type[256];
739 
740   PetscFunctionBegin;
741   PetscCall(PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options"));
742   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg));
743   PetscCall(PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg));
744   if (flg) {
745     PetscCall(PCASMSetTotalSubdomains(pc,blocks,NULL,NULL));
746     osm->dm_subdomains = PETSC_FALSE;
747   }
748   PetscCall(PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg));
749   if (flg) {
750     PetscCall(PCASMSetLocalSubdomains(pc,blocks,NULL,NULL));
751     osm->dm_subdomains = PETSC_FALSE;
752   }
753   PetscCall(PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg));
754   if (flg) {
755     PetscCall(PCASMSetOverlap(pc,ovl));
756     osm->dm_subdomains = PETSC_FALSE;
757   }
758   flg  = PETSC_FALSE;
759   PetscCall(PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg));
760   if (flg) PetscCall(PCASMSetType(pc,asmtype));
761   flg  = PETSC_FALSE;
762   PetscCall(PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg));
763   if (flg) PetscCall(PCASMSetLocalType(pc,loctype));
764   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg));
765   if (flg) {
766     PetscCall(PCASMSetSubMatType(pc,sub_mat_type));
767   }
768   PetscCall(PetscOptionsTail());
769   PetscFunctionReturn(0);
770 }
771 
772 /*------------------------------------------------------------------------------------*/
773 
774 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
775 {
776   PC_ASM         *osm = (PC_ASM*)pc->data;
777   PetscInt       i;
778 
779   PetscFunctionBegin;
780   PetscCheckFalse(n < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
781   PetscCheckFalse(pc->setupcalled && (n != osm->n_local_true || is),PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
782 
783   if (!pc->setupcalled) {
784     if (is) {
785       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
786     }
787     if (is_local) {
788       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
789     }
790     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
791 
792     osm->n_local_true = n;
793     osm->is           = NULL;
794     osm->is_local     = NULL;
795     if (is) {
796       PetscCall(PetscMalloc1(n,&osm->is));
797       for (i=0; i<n; i++) osm->is[i] = is[i];
798       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
799       osm->overlap = -1;
800     }
801     if (is_local) {
802       PetscCall(PetscMalloc1(n,&osm->is_local));
803       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
804       if (!is) {
805         PetscCall(PetscMalloc1(osm->n_local_true,&osm->is));
806         for (i=0; i<osm->n_local_true; i++) {
807           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
808             PetscCall(ISDuplicate(osm->is_local[i],&osm->is[i]));
809             PetscCall(ISCopy(osm->is_local[i],osm->is[i]));
810           } else {
811             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
812             osm->is[i] = osm->is_local[i];
813           }
814         }
815       }
816     }
817   }
818   PetscFunctionReturn(0);
819 }
820 
821 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
822 {
823   PC_ASM         *osm = (PC_ASM*)pc->data;
824   PetscMPIInt    rank,size;
825   PetscInt       n;
826 
827   PetscFunctionBegin;
828   PetscCheckFalse(N < 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
829   PetscCheckFalse(is || is_local,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
830 
831   /*
832      Split the subdomains equally among all processors
833   */
834   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
835   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
836   n    = N/size + ((N % size) > rank);
837   PetscCheck(n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
838   PetscCheckFalse(pc->setupcalled && n != osm->n_local_true,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
839   if (!pc->setupcalled) {
840     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
841 
842     osm->n_local_true = n;
843     osm->is           = NULL;
844     osm->is_local     = NULL;
845   }
846   PetscFunctionReturn(0);
847 }
848 
849 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
850 {
851   PC_ASM *osm = (PC_ASM*)pc->data;
852 
853   PetscFunctionBegin;
854   PetscCheckFalse(ovl < 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
855   PetscCheckFalse(pc->setupcalled && ovl != osm->overlap,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
856   if (!pc->setupcalled) osm->overlap = ovl;
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
861 {
862   PC_ASM *osm = (PC_ASM*)pc->data;
863 
864   PetscFunctionBegin;
865   osm->type     = type;
866   osm->type_set = PETSC_TRUE;
867   PetscFunctionReturn(0);
868 }
869 
870 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
871 {
872   PC_ASM *osm = (PC_ASM*)pc->data;
873 
874   PetscFunctionBegin;
875   *type = osm->type;
876   PetscFunctionReturn(0);
877 }
878 
879 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
880 {
881   PC_ASM *osm = (PC_ASM *) pc->data;
882 
883   PetscFunctionBegin;
884   PetscCheckFalse(type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
885   osm->loctype = type;
886   PetscFunctionReturn(0);
887 }
888 
889 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
890 {
891   PC_ASM *osm = (PC_ASM *) pc->data;
892 
893   PetscFunctionBegin;
894   *type = osm->loctype;
895   PetscFunctionReturn(0);
896 }
897 
898 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
899 {
900   PC_ASM *osm = (PC_ASM*)pc->data;
901 
902   PetscFunctionBegin;
903   osm->sort_indices = doSort;
904   PetscFunctionReturn(0);
905 }
906 
907 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
908 {
909   PC_ASM         *osm = (PC_ASM*)pc->data;
910 
911   PetscFunctionBegin;
912   PetscCheckFalse(osm->n_local_true < 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
913 
914   if (n_local) *n_local = osm->n_local_true;
915   if (first_local) {
916     PetscCallMPI(MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc)));
917     *first_local -= osm->n_local_true;
918   }
919   if (ksp) *ksp   = osm->ksp;
920   PetscFunctionReturn(0);
921 }
922 
923 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
924 {
925   PC_ASM         *osm = (PC_ASM*)pc->data;
926 
927   PetscFunctionBegin;
928   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
929   PetscValidPointer(sub_mat_type,2);
930   *sub_mat_type = osm->sub_mat_type;
931   PetscFunctionReturn(0);
932 }
933 
934 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
935 {
936   PC_ASM            *osm = (PC_ASM*)pc->data;
937 
938   PetscFunctionBegin;
939   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
940   PetscCall(PetscFree(osm->sub_mat_type));
941   PetscCall(PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type));
942   PetscFunctionReturn(0);
943 }
944 
945 /*@C
946     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
947 
948     Collective on pc
949 
950     Input Parameters:
951 +   pc - the preconditioner context
952 .   n - the number of subdomains for this processor (default value = 1)
953 .   is - the index set that defines the subdomains for this processor
954          (or NULL for PETSc to determine subdomains)
955 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
956          (or NULL to not provide these)
957 
958     Options Database Key:
959     To set the total number of subdomain blocks rather than specify the
960     index sets, use the option
961 .    -pc_asm_local_blocks <blks> - Sets local blocks
962 
963     Notes:
964     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
965 
966     By default the ASM preconditioner uses 1 block per processor.
967 
968     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
969 
970     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
971     back to form the global solution (this is the standard restricted additive Schwarz method)
972 
973     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
974     no code to handle that case.
975 
976     Level: advanced
977 
978 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
979           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
980 @*/
981 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
982 {
983   PetscFunctionBegin;
984   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
985   PetscCall(PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local)));
986   PetscFunctionReturn(0);
987 }
988 
989 /*@C
990     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
991     additive Schwarz preconditioner.  Either all or no processors in the
992     PC communicator must call this routine, with the same index sets.
993 
994     Collective on pc
995 
996     Input Parameters:
997 +   pc - the preconditioner context
998 .   N  - the number of subdomains for all processors
999 .   is - the index sets that define the subdomains for all processors
1000          (or NULL to ask PETSc to determine the subdomains)
1001 -   is_local - the index sets that define the local part of the subdomains for this processor
1002          (or NULL to not provide this information)
1003 
1004     Options Database Key:
1005     To set the total number of subdomain blocks rather than specify the
1006     index sets, use the option
1007 .    -pc_asm_blocks <blks> - Sets total blocks
1008 
1009     Notes:
1010     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
1011 
1012     By default the ASM preconditioner uses 1 block per processor.
1013 
1014     These index sets cannot be destroyed until after completion of the
1015     linear solves for which the ASM preconditioner is being used.
1016 
1017     Use PCASMSetLocalSubdomains() to set local subdomains.
1018 
1019     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
1020 
1021     Level: advanced
1022 
1023 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1024           PCASMCreateSubdomains2D()
1025 @*/
1026 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1027 {
1028   PetscFunctionBegin;
1029   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1030   PetscCall(PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local)));
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 /*@
1035     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1036     additive Schwarz preconditioner.  Either all or no processors in the
1037     PC communicator must call this routine.
1038 
1039     Logically Collective on pc
1040 
1041     Input Parameters:
1042 +   pc  - the preconditioner context
1043 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1044 
1045     Options Database Key:
1046 .   -pc_asm_overlap <ovl> - Sets overlap
1047 
1048     Notes:
1049     By default the ASM preconditioner uses 1 block per processor.  To use
1050     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1051     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1052 
1053     The overlap defaults to 1, so if one desires that no additional
1054     overlap be computed beyond what may have been set with a call to
1055     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1056     must be set to be 0.  In particular, if one does not explicitly set
1057     the subdomains an application code, then all overlap would be computed
1058     internally by PETSc, and using an overlap of 0 would result in an ASM
1059     variant that is equivalent to the block Jacobi preconditioner.
1060 
1061     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1062     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1063 
1064     Note that one can define initial index sets with any overlap via
1065     PCASMSetLocalSubdomains(); the routine
1066     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1067     if desired.
1068 
1069     Level: intermediate
1070 
1071 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1072           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1073 @*/
1074 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1075 {
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1078   PetscValidLogicalCollectiveInt(pc,ovl,2);
1079   PetscCall(PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl)));
1080   PetscFunctionReturn(0);
1081 }
1082 
1083 /*@
1084     PCASMSetType - Sets the type of restriction and interpolation used
1085     for local problems in the additive Schwarz method.
1086 
1087     Logically Collective on pc
1088 
1089     Input Parameters:
1090 +   pc  - the preconditioner context
1091 -   type - variant of ASM, one of
1092 .vb
1093       PC_ASM_BASIC       - full interpolation and restriction
1094       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1095       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1096       PC_ASM_NONE        - local processor restriction and interpolation
1097 .ve
1098 
1099     Options Database Key:
1100 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1101 
1102     Notes:
1103     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1104     to limit the local processor interpolation
1105 
1106     Level: intermediate
1107 
1108 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1109           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1110 @*/
1111 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1112 {
1113   PetscFunctionBegin;
1114   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1115   PetscValidLogicalCollectiveEnum(pc,type,2);
1116   PetscCall(PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type)));
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 /*@
1121     PCASMGetType - Gets the type of restriction and interpolation used
1122     for local problems in the additive Schwarz method.
1123 
1124     Logically Collective on pc
1125 
1126     Input Parameter:
1127 .   pc  - the preconditioner context
1128 
1129     Output Parameter:
1130 .   type - variant of ASM, one of
1131 
1132 .vb
1133       PC_ASM_BASIC       - full interpolation and restriction
1134       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1135       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1136       PC_ASM_NONE        - local processor restriction and interpolation
1137 .ve
1138 
1139     Options Database Key:
1140 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1141 
1142     Level: intermediate
1143 
1144 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1145           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1146 @*/
1147 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1148 {
1149   PetscFunctionBegin;
1150   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1151   PetscCall(PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type)));
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 /*@
1156   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1157 
1158   Logically Collective on pc
1159 
1160   Input Parameters:
1161 + pc  - the preconditioner context
1162 - type - type of composition, one of
1163 .vb
1164   PC_COMPOSITE_ADDITIVE       - local additive combination
1165   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1166 .ve
1167 
1168   Options Database Key:
1169 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1170 
1171   Level: intermediate
1172 
1173 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1174 @*/
1175 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1176 {
1177   PetscFunctionBegin;
1178   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1179   PetscValidLogicalCollectiveEnum(pc, type, 2);
1180   PetscCall(PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type)));
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 /*@
1185   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1186 
1187   Logically Collective on pc
1188 
1189   Input Parameter:
1190 . pc  - the preconditioner context
1191 
1192   Output Parameter:
1193 . type - type of composition, one of
1194 .vb
1195   PC_COMPOSITE_ADDITIVE       - local additive combination
1196   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1197 .ve
1198 
1199   Options Database Key:
1200 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1201 
1202   Level: intermediate
1203 
1204 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1205 @*/
1206 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1207 {
1208   PetscFunctionBegin;
1209   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1210   PetscValidPointer(type, 2);
1211   PetscCall(PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type)));
1212   PetscFunctionReturn(0);
1213 }
1214 
1215 /*@
1216     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1217 
1218     Logically Collective on pc
1219 
1220     Input Parameters:
1221 +   pc  - the preconditioner context
1222 -   doSort - sort the subdomain indices
1223 
1224     Level: intermediate
1225 
1226 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1227           PCASMCreateSubdomains2D()
1228 @*/
1229 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1230 {
1231   PetscFunctionBegin;
1232   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1233   PetscValidLogicalCollectiveBool(pc,doSort,2);
1234   PetscCall(PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort)));
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 /*@C
1239    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1240    this processor.
1241 
1242    Collective on pc iff first_local is requested
1243 
1244    Input Parameter:
1245 .  pc - the preconditioner context
1246 
1247    Output Parameters:
1248 +  n_local - the number of blocks on this processor or NULL
1249 .  first_local - the global number of the first block on this processor or NULL,
1250                  all processors must request or all must pass NULL
1251 -  ksp - the array of KSP contexts
1252 
1253    Note:
1254    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1255 
1256    You must call KSPSetUp() before calling PCASMGetSubKSP().
1257 
1258    Fortran note:
1259    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1260 
1261    Level: advanced
1262 
1263 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1264           PCASMCreateSubdomains2D(),
1265 @*/
1266 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1267 {
1268   PetscFunctionBegin;
1269   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1270   PetscCall(PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp)));
1271   PetscFunctionReturn(0);
1272 }
1273 
1274 /* -------------------------------------------------------------------------------------*/
1275 /*MC
1276    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1277            its own KSP object.
1278 
1279    Options Database Keys:
1280 +  -pc_asm_blocks <blks> - Sets total blocks
1281 .  -pc_asm_overlap <ovl> - Sets overlap
1282 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1283 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1284 
1285      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1286       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1287       -pc_asm_type basic to use the standard ASM.
1288 
1289    Notes:
1290     Each processor can have one or more blocks, but a block cannot be shared by more
1291      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1292 
1293      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1294         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1295 
1296      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1297          and set the options directly on the resulting KSP object (you can access its PC
1298          with KSPGetPC())
1299 
1300    Level: beginner
1301 
1302     References:
1303 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1304      Courant Institute, New York University Technical report
1305 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1306     Cambridge University Press.
1307 
1308 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1309            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1310            PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1311 
1312 M*/
1313 
1314 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1315 {
1316   PC_ASM         *osm;
1317 
1318   PetscFunctionBegin;
1319   PetscCall(PetscNewLog(pc,&osm));
1320 
1321   osm->n                 = PETSC_DECIDE;
1322   osm->n_local           = 0;
1323   osm->n_local_true      = PETSC_DECIDE;
1324   osm->overlap           = 1;
1325   osm->ksp               = NULL;
1326   osm->restriction       = NULL;
1327   osm->lprolongation     = NULL;
1328   osm->lrestriction      = NULL;
1329   osm->x                 = NULL;
1330   osm->y                 = NULL;
1331   osm->is                = NULL;
1332   osm->is_local          = NULL;
1333   osm->mat               = NULL;
1334   osm->pmat              = NULL;
1335   osm->type              = PC_ASM_RESTRICT;
1336   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1337   osm->sort_indices      = PETSC_TRUE;
1338   osm->dm_subdomains     = PETSC_FALSE;
1339   osm->sub_mat_type      = NULL;
1340 
1341   pc->data                 = (void*)osm;
1342   pc->ops->apply           = PCApply_ASM;
1343   pc->ops->matapply        = PCMatApply_ASM;
1344   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1345   pc->ops->setup           = PCSetUp_ASM;
1346   pc->ops->reset           = PCReset_ASM;
1347   pc->ops->destroy         = PCDestroy_ASM;
1348   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1349   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1350   pc->ops->view            = PCView_ASM;
1351   pc->ops->applyrichardson = NULL;
1352 
1353   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM));
1354   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM));
1355   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM));
1356   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM));
1357   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM));
1358   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM));
1359   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM));
1360   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM));
1361   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM));
1362   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM));
1363   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM));
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 /*@C
1368    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1369    preconditioner for any problem on a general grid.
1370 
1371    Collective
1372 
1373    Input Parameters:
1374 +  A - The global matrix operator
1375 -  n - the number of local blocks
1376 
1377    Output Parameters:
1378 .  outis - the array of index sets defining the subdomains
1379 
1380    Level: advanced
1381 
1382    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1383     from these if you use PCASMSetLocalSubdomains()
1384 
1385     In the Fortran version you must provide the array outis[] already allocated of length n.
1386 
1387 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1388 @*/
1389 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1390 {
1391   MatPartitioning mpart;
1392   const char      *prefix;
1393   PetscInt        i,j,rstart,rend,bs;
1394   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1395   Mat             Ad     = NULL, adj;
1396   IS              ispart,isnumb,*is;
1397 
1398   PetscFunctionBegin;
1399   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1400   PetscValidPointer(outis,3);
1401   PetscCheckFalse(n < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1402 
1403   /* Get prefix, row distribution, and block size */
1404   PetscCall(MatGetOptionsPrefix(A,&prefix));
1405   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
1406   PetscCall(MatGetBlockSize(A,&bs));
1407   PetscCheckFalse(rstart/bs*bs != rstart || rend/bs*bs != rend,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1408 
1409   /* Get diagonal block from matrix if possible */
1410   PetscCall(MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop));
1411   if (hasop) {
1412     PetscCall(MatGetDiagonalBlock(A,&Ad));
1413   }
1414   if (Ad) {
1415     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij));
1416     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij));
1417   }
1418   if (Ad && n > 1) {
1419     PetscBool match,done;
1420     /* Try to setup a good matrix partitioning if available */
1421     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF,&mpart));
1422     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix));
1423     PetscCall(MatPartitioningSetFromOptions(mpart));
1424     PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match));
1425     if (!match) {
1426       PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match));
1427     }
1428     if (!match) { /* assume a "good" partitioner is available */
1429       PetscInt       na;
1430       const PetscInt *ia,*ja;
1431       PetscCall(MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1432       if (done) {
1433         /* Build adjacency matrix by hand. Unfortunately a call to
1434            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1435            remove the block-aij structure and we cannot expect
1436            MatPartitioning to split vertices as we need */
1437         PetscInt       i,j,len,nnz,cnt,*iia=NULL,*jja=NULL;
1438         const PetscInt *row;
1439         nnz = 0;
1440         for (i=0; i<na; i++) { /* count number of nonzeros */
1441           len = ia[i+1] - ia[i];
1442           row = ja + ia[i];
1443           for (j=0; j<len; j++) {
1444             if (row[j] == i) { /* don't count diagonal */
1445               len--; break;
1446             }
1447           }
1448           nnz += len;
1449         }
1450         PetscCall(PetscMalloc1(na+1,&iia));
1451         PetscCall(PetscMalloc1(nnz,&jja));
1452         nnz    = 0;
1453         iia[0] = 0;
1454         for (i=0; i<na; i++) { /* fill adjacency */
1455           cnt = 0;
1456           len = ia[i+1] - ia[i];
1457           row = ja + ia[i];
1458           for (j=0; j<len; j++) {
1459             if (row[j] != i) { /* if not diagonal */
1460               jja[nnz+cnt++] = row[j];
1461             }
1462           }
1463           nnz     += cnt;
1464           iia[i+1] = nnz;
1465         }
1466         /* Partitioning of the adjacency matrix */
1467         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj));
1468         PetscCall(MatPartitioningSetAdjacency(mpart,adj));
1469         PetscCall(MatPartitioningSetNParts(mpart,n));
1470         PetscCall(MatPartitioningApply(mpart,&ispart));
1471         PetscCall(ISPartitioningToNumbering(ispart,&isnumb));
1472         PetscCall(MatDestroy(&adj));
1473         foundpart = PETSC_TRUE;
1474       }
1475       PetscCall(MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1476     }
1477     PetscCall(MatPartitioningDestroy(&mpart));
1478   }
1479 
1480   PetscCall(PetscMalloc1(n,&is));
1481   *outis = is;
1482 
1483   if (!foundpart) {
1484 
1485     /* Partitioning by contiguous chunks of rows */
1486 
1487     PetscInt mbs   = (rend-rstart)/bs;
1488     PetscInt start = rstart;
1489     for (i=0; i<n; i++) {
1490       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1491       PetscCall(ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]));
1492       start += count;
1493     }
1494 
1495   } else {
1496 
1497     /* Partitioning by adjacency of diagonal block  */
1498 
1499     const PetscInt *numbering;
1500     PetscInt       *count,nidx,*indices,*newidx,start=0;
1501     /* Get node count in each partition */
1502     PetscCall(PetscMalloc1(n,&count));
1503     PetscCall(ISPartitioningCount(ispart,n,count));
1504     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1505       for (i=0; i<n; i++) count[i] *= bs;
1506     }
1507     /* Build indices from node numbering */
1508     PetscCall(ISGetLocalSize(isnumb,&nidx));
1509     PetscCall(PetscMalloc1(nidx,&indices));
1510     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1511     PetscCall(ISGetIndices(isnumb,&numbering));
1512     PetscCall(PetscSortIntWithPermutation(nidx,numbering,indices));
1513     PetscCall(ISRestoreIndices(isnumb,&numbering));
1514     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1515       PetscCall(PetscMalloc1(nidx*bs,&newidx));
1516       for (i=0; i<nidx; i++) {
1517         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1518       }
1519       PetscCall(PetscFree(indices));
1520       nidx   *= bs;
1521       indices = newidx;
1522     }
1523     /* Shift to get global indices */
1524     for (i=0; i<nidx; i++) indices[i] += rstart;
1525 
1526     /* Build the index sets for each block */
1527     for (i=0; i<n; i++) {
1528       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]));
1529       PetscCall(ISSort(is[i]));
1530       start += count[i];
1531     }
1532 
1533     PetscCall(PetscFree(count));
1534     PetscCall(PetscFree(indices));
1535     PetscCall(ISDestroy(&isnumb));
1536     PetscCall(ISDestroy(&ispart));
1537 
1538   }
1539   PetscFunctionReturn(0);
1540 }
1541 
1542 /*@C
1543    PCASMDestroySubdomains - Destroys the index sets created with
1544    PCASMCreateSubdomains(). Should be called after setting subdomains
1545    with PCASMSetLocalSubdomains().
1546 
1547    Collective
1548 
1549    Input Parameters:
1550 +  n - the number of index sets
1551 .  is - the array of index sets
1552 -  is_local - the array of local index sets, can be NULL
1553 
1554    Level: advanced
1555 
1556 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1557 @*/
1558 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1559 {
1560   PetscInt       i;
1561 
1562   PetscFunctionBegin;
1563   if (n <= 0) PetscFunctionReturn(0);
1564   if (is) {
1565     PetscValidPointer(is,2);
1566     for (i=0; i<n; i++) PetscCall(ISDestroy(&is[i]));
1567     PetscCall(PetscFree(is));
1568   }
1569   if (is_local) {
1570     PetscValidPointer(is_local,3);
1571     for (i=0; i<n; i++) PetscCall(ISDestroy(&is_local[i]));
1572     PetscCall(PetscFree(is_local));
1573   }
1574   PetscFunctionReturn(0);
1575 }
1576 
1577 /*@
1578    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1579    preconditioner for a two-dimensional problem on a regular grid.
1580 
1581    Not Collective
1582 
1583    Input Parameters:
1584 +  m   - the number of mesh points in the x direction
1585 .  n   - the number of mesh points in the y direction
1586 .  M   - the number of subdomains in the x direction
1587 .  N   - the number of subdomains in the y direction
1588 .  dof - degrees of freedom per node
1589 -  overlap - overlap in mesh lines
1590 
1591    Output Parameters:
1592 +  Nsub - the number of subdomains created
1593 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1594 -  is_local - array of index sets defining non-overlapping subdomains
1595 
1596    Note:
1597    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1598    preconditioners.  More general related routines are
1599    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1600 
1601    Level: advanced
1602 
1603 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1604           PCASMSetOverlap()
1605 @*/
1606 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1607 {
1608   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1609   PetscInt       nidx,*idx,loc,ii,jj,count;
1610 
1611   PetscFunctionBegin;
1612   PetscCheckFalse(dof != 1,PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1613 
1614   *Nsub     = N*M;
1615   PetscCall(PetscMalloc1(*Nsub,is));
1616   PetscCall(PetscMalloc1(*Nsub,is_local));
1617   ystart    = 0;
1618   loc_outer = 0;
1619   for (i=0; i<N; i++) {
1620     height = n/N + ((n % N) > i); /* height of subdomain */
1621     PetscCheckFalse(height < 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1622     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1623     yright = ystart + height + overlap; if (yright > n) yright = n;
1624     xstart = 0;
1625     for (j=0; j<M; j++) {
1626       width = m/M + ((m % M) > j); /* width of subdomain */
1627       PetscCheckFalse(width < 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1628       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1629       xright = xstart + width + overlap; if (xright > m) xright = m;
1630       nidx   = (xright - xleft)*(yright - yleft);
1631       PetscCall(PetscMalloc1(nidx,&idx));
1632       loc    = 0;
1633       for (ii=yleft; ii<yright; ii++) {
1634         count = m*ii + xleft;
1635         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1636       }
1637       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer));
1638       if (overlap == 0) {
1639         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1640 
1641         (*is_local)[loc_outer] = (*is)[loc_outer];
1642       } else {
1643         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1644           for (jj=xstart; jj<xstart+width; jj++) {
1645             idx[loc++] = m*ii + jj;
1646           }
1647         }
1648         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer));
1649       }
1650       PetscCall(PetscFree(idx));
1651       xstart += width;
1652       loc_outer++;
1653     }
1654     ystart += height;
1655   }
1656   for (i=0; i<*Nsub; i++) PetscCall(ISSort((*is)[i]));
1657   PetscFunctionReturn(0);
1658 }
1659 
1660 /*@C
1661     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1662     only) for the additive Schwarz preconditioner.
1663 
1664     Not Collective
1665 
1666     Input Parameter:
1667 .   pc - the preconditioner context
1668 
1669     Output Parameters:
1670 +   n - if requested, the number of subdomains for this processor (default value = 1)
1671 .   is - if requested, the index sets that define the subdomains for this processor
1672 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1673 
1674     Notes:
1675     The IS numbering is in the parallel, global numbering of the vector.
1676 
1677     Level: advanced
1678 
1679 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1680           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1681 @*/
1682 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1683 {
1684   PC_ASM         *osm = (PC_ASM*)pc->data;
1685   PetscBool      match;
1686 
1687   PetscFunctionBegin;
1688   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1689   if (n) PetscValidIntPointer(n,2);
1690   if (is) PetscValidPointer(is,3);
1691   if (is_local) PetscValidPointer(is_local,4);
1692   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1693   PetscCheck(match,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1694   if (n) *n = osm->n_local_true;
1695   if (is) *is = osm->is;
1696   if (is_local) *is_local = osm->is_local;
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 /*@C
1701     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1702     only) for the additive Schwarz preconditioner.
1703 
1704     Not Collective
1705 
1706     Input Parameter:
1707 .   pc - the preconditioner context
1708 
1709     Output Parameters:
1710 +   n - if requested, the number of matrices for this processor (default value = 1)
1711 -   mat - if requested, the matrices
1712 
1713     Level: advanced
1714 
1715     Notes:
1716     Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1717 
1718            Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1719 
1720 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1721           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1722 @*/
1723 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1724 {
1725   PC_ASM         *osm;
1726   PetscBool      match;
1727 
1728   PetscFunctionBegin;
1729   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1730   if (n) PetscValidIntPointer(n,2);
1731   if (mat) PetscValidPointer(mat,3);
1732   PetscCheck(pc->setupcalled,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1733   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1734   if (!match) {
1735     if (n) *n = 0;
1736     if (mat) *mat = NULL;
1737   } else {
1738     osm = (PC_ASM*)pc->data;
1739     if (n) *n = osm->n_local_true;
1740     if (mat) *mat = osm->pmat;
1741   }
1742   PetscFunctionReturn(0);
1743 }
1744 
1745 /*@
1746     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1747 
1748     Logically Collective
1749 
1750     Input Parameters:
1751 +   pc  - the preconditioner
1752 -   flg - boolean indicating whether to use subdomains defined by the DM
1753 
1754     Options Database Key:
1755 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the DM
1756 
1757     Level: intermediate
1758 
1759     Notes:
1760     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1761     so setting either of the first two effectively turns the latter off.
1762 
1763 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1764           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1765 @*/
1766 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1767 {
1768   PC_ASM         *osm = (PC_ASM*)pc->data;
1769   PetscBool      match;
1770 
1771   PetscFunctionBegin;
1772   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1773   PetscValidLogicalCollectiveBool(pc,flg,2);
1774   PetscCheck(!pc->setupcalled,((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1775   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1776   if (match) {
1777     osm->dm_subdomains = flg;
1778   }
1779   PetscFunctionReturn(0);
1780 }
1781 
1782 /*@
1783     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1784     Not Collective
1785 
1786     Input Parameter:
1787 .   pc  - the preconditioner
1788 
1789     Output Parameter:
1790 .   flg - boolean indicating whether to use subdomains defined by the DM
1791 
1792     Level: intermediate
1793 
1794 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1795           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1796 @*/
1797 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1798 {
1799   PC_ASM         *osm = (PC_ASM*)pc->data;
1800   PetscBool      match;
1801 
1802   PetscFunctionBegin;
1803   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1804   PetscValidBoolPointer(flg,2);
1805   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1806   if (match) *flg = osm->dm_subdomains;
1807   else *flg = PETSC_FALSE;
1808   PetscFunctionReturn(0);
1809 }
1810 
1811 /*@
1812      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1813 
1814    Not Collective
1815 
1816    Input Parameter:
1817 .  pc - the PC
1818 
1819    Output Parameter:
1820 .  -pc_asm_sub_mat_type - name of matrix type
1821 
1822    Level: advanced
1823 
1824 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1825 @*/
1826 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type)
1827 {
1828   PetscFunctionBegin;
1829   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1830   PetscCall(PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type)));
1831   PetscFunctionReturn(0);
1832 }
1833 
1834 /*@
1835      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1836 
1837    Collective on Mat
1838 
1839    Input Parameters:
1840 +  pc             - the PC object
1841 -  sub_mat_type   - matrix type
1842 
1843    Options Database Key:
1844 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1845 
1846    Notes:
1847    See "${PETSC_DIR}/include/petscmat.h" for available types
1848 
1849   Level: advanced
1850 
1851 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1852 @*/
1853 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1854 {
1855   PetscFunctionBegin;
1856   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1857   PetscCall(PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type)));
1858   PetscFunctionReturn(0);
1859 }
1860