xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision 2b3cbbda7ef6bfb59aeed26278d0c91b4282b9fd)
1 
2 #include <../src/ksp/pc/impls/is/pcis.h> /*I "petscpc.h" I*/
3 
4 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
5 {
6   PC_IS *pcis = (PC_IS*)pc->data;
7 
8   PetscFunctionBegin;
9   pcis->use_stiffness_scaling = use;
10   PetscFunctionReturn(0);
11 }
12 
13 /*@
14  PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
15                               local matrices' diagonal.
16 
17    Not collective
18 
19    Input Parameters:
20 +  pc - the preconditioning context
21 -  use - whether or not pcis use matrix diagonal to build partition of unity.
22 
23    Level: intermediate
24 
25    Notes:
26 
27 .seealso: `PCBDDC`
28 @*/
29 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
30 {
31   PetscFunctionBegin;
32   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
33   PetscValidLogicalCollectiveBool(pc,use,2);
34   PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));
35   PetscFunctionReturn(0);
36 }
37 
38 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
39 {
40   PC_IS          *pcis = (PC_IS*)pc->data;
41 
42   PetscFunctionBegin;
43   PetscCall(PetscObjectReference((PetscObject)scaling_factors));
44   PetscCall(VecDestroy(&pcis->D));
45   pcis->D = scaling_factors;
46   if (pc->setupcalled) {
47     PetscInt sn;
48 
49     PetscCall(VecGetSize(pcis->D,&sn));
50     if (sn == pcis->n) {
51       PetscCall(VecScatterBegin(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
52       PetscCall(VecScatterEnd(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
53       PetscCall(VecDestroy(&pcis->D));
54       PetscCall(VecDuplicate(pcis->vec1_B,&pcis->D));
55       PetscCall(VecCopy(pcis->vec1_B,pcis->D));
56     } else PetscCheck(sn == pcis->n_B,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT,pcis->n_B,pcis->n,sn);
57   }
58   PetscFunctionReturn(0);
59 }
60 
61 /*@
62  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
63 
64    Not collective
65 
66    Input Parameters:
67 +  pc - the preconditioning context
68 -  scaling_factors - scaling factors for the subdomain
69 
70    Level: intermediate
71 
72    Notes:
73    Intended to use with jumping coefficients cases.
74 
75 .seealso: `PCBDDC`
76 @*/
77 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
78 {
79   PetscFunctionBegin;
80   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
81   PetscValidHeaderSpecific(scaling_factors,VEC_CLASSID,2);
82   PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));
83   PetscFunctionReturn(0);
84 }
85 
86 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
87 {
88   PC_IS *pcis = (PC_IS*)pc->data;
89 
90   PetscFunctionBegin;
91   pcis->scaling_factor = scal;
92   if (pcis->D) {
93 
94     PetscCall(VecSet(pcis->D,pcis->scaling_factor));
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 /*@
100  PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
101 
102    Not collective
103 
104    Input Parameters:
105 +  pc - the preconditioning context
106 -  scal - scaling factor for the subdomain
107 
108    Level: intermediate
109 
110    Notes:
111    Intended to use with jumping coefficients cases.
112 
113 .seealso: `PCBDDC`
114 @*/
115 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
116 {
117   PetscFunctionBegin;
118   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
119   PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));
120   PetscFunctionReturn(0);
121 }
122 
123 /* -------------------------------------------------------------------------- */
124 /*
125    PCISSetUp -
126 */
127 PetscErrorCode  PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
128 {
129   PC_IS          *pcis  = (PC_IS*)(pc->data);
130   Mat_IS         *matis;
131   MatReuse       reuse;
132   PetscBool      flg,issbaij;
133 
134   PetscFunctionBegin;
135   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg));
136   PetscCheck(flg,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Requires preconditioning matrix of type MATIS");
137   matis = (Mat_IS*)pc->pmat->data;
138   if (pc->useAmat) {
139     PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat,MATIS,&flg));
140     PetscCheck(flg,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Requires linear system matrix of type MATIS");
141   }
142 
143   /* first time creation, get info on substructuring */
144   if (!pc->setupcalled) {
145     PetscInt    n_I;
146     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
147     PetscBT     bt;
148     PetscInt    i,j;
149 
150     /* get info on mapping */
151     PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
152     PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
153     pcis->mapping = matis->rmapping;
154     PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping,&pcis->n));
155     PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared)));
156 
157     /* Identifying interior and interface nodes, in local numbering */
158     PetscCall(PetscBTCreate(pcis->n,&bt));
159     for (i=0;i<pcis->n_neigh;i++)
160       for (j=0;j<pcis->n_shared[i];j++) {
161         PetscCall(PetscBTSet(bt,pcis->shared[i][j]));
162       }
163 
164     /* Creating local and global index sets for interior and inteface nodes. */
165     PetscCall(PetscMalloc1(pcis->n,&idx_I_local));
166     PetscCall(PetscMalloc1(pcis->n,&idx_B_local));
167     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
168       if (!PetscBTLookup(bt,i)) {
169         idx_I_local[n_I] = i;
170         n_I++;
171       } else {
172         idx_B_local[pcis->n_B] = i;
173         pcis->n_B++;
174       }
175     }
176 
177     /* Getting the global numbering */
178     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
179     idx_I_global = idx_B_local + pcis->n_B;
180     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping,pcis->n_B,idx_B_local,idx_B_global));
181     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping,n_I,idx_I_local,idx_I_global));
182 
183     /* Creating the index sets */
184     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local));
185     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc),pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global));
186     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local));
187     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc),n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global));
188 
189     /* Freeing memory */
190     PetscCall(PetscFree(idx_B_local));
191     PetscCall(PetscFree(idx_I_local));
192     PetscCall(PetscBTDestroy(&bt));
193 
194     /* Creating work vectors and arrays */
195     PetscCall(VecDuplicate(matis->x,&pcis->vec1_N));
196     PetscCall(VecDuplicate(pcis->vec1_N,&pcis->vec2_N));
197     PetscCall(VecCreate(PETSC_COMM_SELF,&pcis->vec1_D));
198     PetscCall(VecSetSizes(pcis->vec1_D,pcis->n-pcis->n_B,PETSC_DECIDE));
199     PetscCall(VecSetType(pcis->vec1_D,((PetscObject)pcis->vec1_N)->type_name));
200     PetscCall(VecDuplicate(pcis->vec1_D,&pcis->vec2_D));
201     PetscCall(VecDuplicate(pcis->vec1_D,&pcis->vec3_D));
202     PetscCall(VecDuplicate(pcis->vec1_D,&pcis->vec4_D));
203     PetscCall(VecCreate(PETSC_COMM_SELF,&pcis->vec1_B));
204     PetscCall(VecSetSizes(pcis->vec1_B,pcis->n_B,PETSC_DECIDE));
205     PetscCall(VecSetType(pcis->vec1_B,((PetscObject)pcis->vec1_N)->type_name));
206     PetscCall(VecDuplicate(pcis->vec1_B,&pcis->vec2_B));
207     PetscCall(VecDuplicate(pcis->vec1_B,&pcis->vec3_B));
208     PetscCall(MatCreateVecs(pc->pmat,&pcis->vec1_global,NULL));
209     PetscCall(PetscMalloc1(pcis->n,&pcis->work_N));
210     /* scaling vector */
211     if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
212       PetscCall(VecDuplicate(pcis->vec1_B,&pcis->D));
213       PetscCall(VecSet(pcis->D,pcis->scaling_factor));
214     }
215 
216     /* Creating the scatter contexts */
217     PetscCall(VecScatterCreate(pcis->vec1_N,pcis->is_I_local,pcis->vec1_D,(IS)0,&pcis->N_to_D));
218     PetscCall(VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D));
219     PetscCall(VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B));
220     PetscCall(VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B));
221 
222     /* map from boundary to local */
223     PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&pcis->BtoNmap));
224   }
225 
226   {
227     PetscInt sn;
228 
229     PetscCall(VecGetSize(pcis->D,&sn));
230     if (sn == pcis->n) {
231       PetscCall(VecScatterBegin(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
232       PetscCall(VecScatterEnd(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
233       PetscCall(VecDestroy(&pcis->D));
234       PetscCall(VecDuplicate(pcis->vec1_B,&pcis->D));
235       PetscCall(VecCopy(pcis->vec1_B,pcis->D));
236     } else PetscCheck(sn == pcis->n_B,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT,pcis->n_B,pcis->n,sn);
237   }
238 
239   /*
240     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
241     is such that interior nodes come first than the interface ones, we have
242 
243         [ A_II | A_IB ]
244     A = [------+------]
245         [ A_BI | A_BB ]
246   */
247   if (computematrices) {
248     PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
249     PetscInt  bs,ibs;
250 
251     reuse = MAT_INITIAL_MATRIX;
252     if (pcis->reusesubmatrices && pc->setupcalled) {
253       if (pc->flag == SAME_NONZERO_PATTERN) {
254         reuse = MAT_REUSE_MATRIX;
255       } else {
256         reuse = MAT_INITIAL_MATRIX;
257       }
258     }
259     if (reuse == MAT_INITIAL_MATRIX) {
260       PetscCall(MatDestroy(&pcis->A_II));
261       PetscCall(MatDestroy(&pcis->pA_II));
262       PetscCall(MatDestroy(&pcis->A_IB));
263       PetscCall(MatDestroy(&pcis->A_BI));
264       PetscCall(MatDestroy(&pcis->A_BB));
265     }
266 
267     PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping,&ibs));
268     PetscCall(MatGetBlockSize(matis->A,&bs));
269     PetscCall(MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->pA_II));
270     if (amat) {
271       Mat_IS *amatis = (Mat_IS*)pc->mat->data;
272       PetscCall(MatCreateSubMatrix(amatis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->A_II));
273     } else {
274       PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
275       PetscCall(MatDestroy(&pcis->A_II));
276       pcis->A_II = pcis->pA_II;
277     }
278     PetscCall(MatSetBlockSize(pcis->A_II,bs == ibs ? bs : 1));
279     PetscCall(MatSetBlockSize(pcis->pA_II,bs == ibs ? bs : 1));
280     PetscCall(MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,reuse,&pcis->A_BB));
281     PetscCall(PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij));
282     if (!issbaij) {
283       PetscCall(MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB));
284       PetscCall(MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI));
285     } else {
286       Mat newmat;
287 
288       PetscCall(MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat));
289       PetscCall(MatCreateSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB));
290       PetscCall(MatCreateSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI));
291       PetscCall(MatDestroy(&newmat));
292     }
293     PetscCall(MatSetBlockSize(pcis->A_BB,bs == ibs ? bs : 1));
294   }
295 
296   /* Creating scaling vector D */
297   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options,((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL));
298   if (pcis->use_stiffness_scaling) {
299     PetscScalar *a;
300     PetscInt    i,n;
301 
302     if (pcis->A_BB) {
303       PetscCall(MatGetDiagonal(pcis->A_BB,pcis->D));
304     } else {
305       PetscCall(MatGetDiagonal(matis->A,pcis->vec1_N));
306       PetscCall(VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD));
307       PetscCall(VecScatterEnd(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD));
308     }
309     PetscCall(VecAbs(pcis->D));
310     PetscCall(VecGetLocalSize(pcis->D,&n));
311     PetscCall(VecGetArray(pcis->D,&a));
312     for (i=0;i<n;i++) if (PetscAbsScalar(a[i])<PETSC_SMALL) a[i] = 1.0;
313     PetscCall(VecRestoreArray(pcis->D,&a));
314   }
315   PetscCall(VecSet(pcis->vec1_global,0.0));
316   PetscCall(VecScatterBegin(pcis->global_to_B,pcis->D,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE));
317   PetscCall(VecScatterEnd(pcis->global_to_B,pcis->D,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE));
318   PetscCall(VecScatterBegin(pcis->global_to_B,pcis->vec1_global,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
319   PetscCall(VecScatterEnd(pcis->global_to_B,pcis->vec1_global,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
320   PetscCall(VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B));
321   /* See historical note 01, at the bottom of this file. */
322 
323   /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
324   if (computesolvers) {
325     PC pc_ctx;
326 
327     pcis->pure_neumann = matis->pure_neumann;
328     /* Dirichlet */
329     PetscCall(KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D));
330     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D,pc->erroriffailure));
331     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1));
332     PetscCall(KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II));
333     PetscCall(KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_"));
334     PetscCall(KSPGetPC(pcis->ksp_D,&pc_ctx));
335     PetscCall(PCSetType(pc_ctx,PCLU));
336     PetscCall(KSPSetType(pcis->ksp_D,KSPPREONLY));
337     PetscCall(KSPSetFromOptions(pcis->ksp_D));
338     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
339     PetscCall(KSPSetUp(pcis->ksp_D));
340     /* Neumann */
341     PetscCall(KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N));
342     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N,pc->erroriffailure));
343     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1));
344     PetscCall(KSPSetOperators(pcis->ksp_N,matis->A,matis->A));
345     PetscCall(KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_"));
346     PetscCall(KSPGetPC(pcis->ksp_N,&pc_ctx));
347     PetscCall(PCSetType(pc_ctx,PCLU));
348     PetscCall(KSPSetType(pcis->ksp_N,KSPPREONLY));
349     PetscCall(KSPSetFromOptions(pcis->ksp_N));
350     {
351       PetscBool damp_fixed                    = PETSC_FALSE,
352                 remove_nullspace_fixed        = PETSC_FALSE,
353                 set_damping_factor_floating   = PETSC_FALSE,
354                 not_damp_floating             = PETSC_FALSE,
355                 not_remove_nullspace_floating = PETSC_FALSE;
356       PetscReal fixed_factor,
357                 floating_factor;
358 
359       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed));
360       if (!damp_fixed) fixed_factor = 0.0;
361       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL));
362 
363       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL));
364 
365       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&floating_factor,&set_damping_factor_floating));
366       if (!set_damping_factor_floating) floating_factor = 0.0;
367       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL));
368       if (!set_damping_factor_floating) floating_factor = 1.e-12;
369 
370       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating,NULL));
371 
372       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating,NULL));
373 
374       if (pcis->pure_neumann) {  /* floating subdomain */
375         if (!(not_damp_floating)) {
376           PetscCall(PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO));
377           PetscCall(PCFactorSetShiftAmount(pc_ctx,floating_factor));
378         }
379         if (!(not_remove_nullspace_floating)) {
380           MatNullSpace nullsp;
381           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp));
382           PetscCall(MatSetNullSpace(matis->A,nullsp));
383           PetscCall(MatNullSpaceDestroy(&nullsp));
384         }
385       } else {  /* fixed subdomain */
386         if (damp_fixed) {
387           PetscCall(PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO));
388           PetscCall(PCFactorSetShiftAmount(pc_ctx,floating_factor));
389         }
390         if (remove_nullspace_fixed) {
391           MatNullSpace nullsp;
392           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp));
393           PetscCall(MatSetNullSpace(matis->A,nullsp));
394           PetscCall(MatNullSpaceDestroy(&nullsp));
395         }
396       }
397     }
398     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
399     PetscCall(KSPSetUp(pcis->ksp_N));
400   }
401   PetscFunctionReturn(0);
402 }
403 
404 /* -------------------------------------------------------------------------- */
405 /*
406    PCISDestroy -
407 */
408 PetscErrorCode  PCISDestroy(PC pc)
409 {
410   PC_IS *pcis;
411 
412   PetscFunctionBegin;
413   if (!pc) PetscFunctionReturn(0);
414   pcis = (PC_IS*)(pc->data);
415   PetscCall(ISDestroy(&pcis->is_B_local));
416   PetscCall(ISDestroy(&pcis->is_I_local));
417   PetscCall(ISDestroy(&pcis->is_B_global));
418   PetscCall(ISDestroy(&pcis->is_I_global));
419   PetscCall(MatDestroy(&pcis->A_II));
420   PetscCall(MatDestroy(&pcis->pA_II));
421   PetscCall(MatDestroy(&pcis->A_IB));
422   PetscCall(MatDestroy(&pcis->A_BI));
423   PetscCall(MatDestroy(&pcis->A_BB));
424   PetscCall(VecDestroy(&pcis->D));
425   PetscCall(KSPDestroy(&pcis->ksp_N));
426   PetscCall(KSPDestroy(&pcis->ksp_D));
427   PetscCall(VecDestroy(&pcis->vec1_N));
428   PetscCall(VecDestroy(&pcis->vec2_N));
429   PetscCall(VecDestroy(&pcis->vec1_D));
430   PetscCall(VecDestroy(&pcis->vec2_D));
431   PetscCall(VecDestroy(&pcis->vec3_D));
432   PetscCall(VecDestroy(&pcis->vec4_D));
433   PetscCall(VecDestroy(&pcis->vec1_B));
434   PetscCall(VecDestroy(&pcis->vec2_B));
435   PetscCall(VecDestroy(&pcis->vec3_B));
436   PetscCall(VecDestroy(&pcis->vec1_global));
437   PetscCall(VecScatterDestroy(&pcis->global_to_D));
438   PetscCall(VecScatterDestroy(&pcis->N_to_B));
439   PetscCall(VecScatterDestroy(&pcis->N_to_D));
440   PetscCall(VecScatterDestroy(&pcis->global_to_B));
441   PetscCall(PetscFree(pcis->work_N));
442   if (pcis->n_neigh > -1) {
443     PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared)));
444   }
445   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
446   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
447   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL));
448   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL));
449   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL));
450   PetscFunctionReturn(0);
451 }
452 
453 /* -------------------------------------------------------------------------- */
454 /*
455    PCISCreate -
456 */
457 PetscErrorCode  PCISCreate(PC pc)
458 {
459   PC_IS          *pcis = (PC_IS*)(pc->data);
460 
461   PetscFunctionBegin;
462   if (!pcis) {
463     PetscCall(PetscNewLog(pc,&pcis));
464     pc->data = pcis;
465   }
466   pcis->n_neigh          = -1;
467   pcis->scaling_factor   = 1.0;
468   pcis->reusesubmatrices = PETSC_TRUE;
469   /* composing functions */
470   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",PCISSetUseStiffnessScaling_IS));
471   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",PCISSetSubdomainScalingFactor_IS));
472   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",PCISSetSubdomainDiagonalScaling_IS));
473   PetscFunctionReturn(0);
474 }
475 
476 /* -------------------------------------------------------------------------- */
477 /*
478    PCISApplySchur -
479 
480    Input parameters:
481 .  pc - preconditioner context
482 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
483 
484    Output parameters:
485 .  vec1_B - result of Schur complement applied to chunk
486 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
487 .  vec1_D - garbage (used as work space)
488 .  vec2_D - garbage (used as work space)
489 
490 */
491 PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
492 {
493   PC_IS          *pcis = (PC_IS*)(pc->data);
494 
495   PetscFunctionBegin;
496   if (!vec2_B) vec2_B = v;
497 
498   PetscCall(MatMult(pcis->A_BB,v,vec1_B));
499   PetscCall(MatMult(pcis->A_IB,v,vec1_D));
500   PetscCall(KSPSolve(pcis->ksp_D,vec1_D,vec2_D));
501   PetscCall(KSPCheckSolve(pcis->ksp_D,pc,vec2_D));
502   PetscCall(MatMult(pcis->A_BI,vec2_D,vec2_B));
503   PetscCall(VecAXPY(vec1_B,-1.0,vec2_B));
504   PetscFunctionReturn(0);
505 }
506 
507 /* -------------------------------------------------------------------------- */
508 /*
509    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
510    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
511    mode.
512 
513    Input parameters:
514 .  pc - preconditioner context
515 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
516 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
517 
518    Output parameter:
519 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
520 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
521 
522    Notes:
523    The entries in the array that do not correspond to interface nodes remain unaltered.
524 */
525 PetscErrorCode  PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
526 {
527   PetscInt       i;
528   const PetscInt *idex;
529   PetscScalar    *array_B;
530   PC_IS          *pcis = (PC_IS*)(pc->data);
531 
532   PetscFunctionBegin;
533   PetscCall(VecGetArray(v_B,&array_B));
534   PetscCall(ISGetIndices(pcis->is_B_local,&idex));
535 
536   if (smode == SCATTER_FORWARD) {
537     if (imode == INSERT_VALUES) {
538       for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
539     } else {  /* ADD_VALUES */
540       for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
541     }
542   } else {  /* SCATTER_REVERSE */
543     if (imode == INSERT_VALUES) {
544       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
545     } else {  /* ADD_VALUES */
546       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
547     }
548   }
549   PetscCall(ISRestoreIndices(pcis->is_B_local,&idex));
550   PetscCall(VecRestoreArray(v_B,&array_B));
551   PetscFunctionReturn(0);
552 }
553 
554 /* -------------------------------------------------------------------------- */
555 /*
556    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
557    More precisely, solves the problem:
558                                         [ A_II  A_IB ] [ . ]   [ 0 ]
559                                         [            ] [   ] = [   ]
560                                         [ A_BI  A_BB ] [ x ]   [ b ]
561 
562    Input parameters:
563 .  pc - preconditioner context
564 .  b - vector of local interface nodes (including ghosts)
565 
566    Output parameters:
567 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
568        complement to b
569 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
570 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
571 
572 */
573 PetscErrorCode  PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
574 {
575   PC_IS          *pcis = (PC_IS*)(pc->data);
576 
577   PetscFunctionBegin;
578   /*
579     Neumann solvers.
580     Applying the inverse of the local Schur complement, i.e, solving a Neumann
581     Problem with zero at the interior nodes of the RHS and extracting the interface
582     part of the solution. inverse Schur complement is applied to b and the result
583     is stored in x.
584   */
585   /* Setting the RHS vec1_N */
586   PetscCall(VecSet(vec1_N,0.0));
587   PetscCall(VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE));
588   PetscCall(VecScatterEnd  (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE));
589   /* Checking for consistency of the RHS */
590   {
591     PetscBool flg = PETSC_FALSE;
592     PetscCall(PetscOptionsGetBool(NULL,NULL,"-pc_is_check_consistency",&flg,NULL));
593     if (flg) {
594       PetscScalar average;
595       PetscViewer viewer;
596       PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer));
597 
598       PetscCall(VecSum(vec1_N,&average));
599       average = average / ((PetscReal)pcis->n);
600       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
601       if (pcis->pure_neumann) {
602         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,(double)PetscAbsScalar(average)));
603       } else {
604         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed.    Average = % 1.14e\n",PetscGlobalRank,(double)PetscAbsScalar(average)));
605       }
606       PetscCall(PetscViewerFlush(viewer));
607       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
608     }
609   }
610   /* Solving the system for vec2_N */
611   PetscCall(KSPSolve(pcis->ksp_N,vec1_N,vec2_N));
612   PetscCall(KSPCheckSolve(pcis->ksp_N,pc,vec2_N));
613   /* Extracting the local interface vector out of the solution */
614   PetscCall(VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD));
615   PetscCall(VecScatterEnd  (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD));
616   PetscFunctionReturn(0);
617 }
618