xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision d49ee5fea16a8ec2d6bb1ebf5638f00c84fde60f)
1 
2 #include <petsc/private/pcisimpl.h> /*I "petscpc.h" I*/
3 
4 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
5 {
6   PC_IS *pcis = (PC_IS *)pc->data;
7 
8   PetscFunctionBegin;
9   pcis->use_stiffness_scaling = use;
10   PetscFunctionReturn(PETSC_SUCCESS);
11 }
12 
13 /*@
14   PCISSetUseStiffnessScaling - Tells `PCIS` to construct partition of unity using
15   the local matrices' diagonal entries
16 
17   Logically Collective
18 
19   Input Parameters:
20 + pc  - the preconditioning context
21 - use - whether or not it should use matrix diagonal to build partition of unity.
22 
23   Level: intermediate
24 
25   Developer Notes:
26   There is no manual page for `PCIS` nor some of its methods
27 
28 .seealso: `PCIS`, `PCBDDC`
29 @*/
30 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
31 {
32   PetscFunctionBegin;
33   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
34   PetscValidLogicalCollectiveBool(pc, use, 2);
35   PetscTryMethod(pc, "PCISSetUseStiffnessScaling_C", (PC, PetscBool), (pc, use));
36   PetscFunctionReturn(PETSC_SUCCESS);
37 }
38 
39 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
40 {
41   PC_IS *pcis = (PC_IS *)pc->data;
42 
43   PetscFunctionBegin;
44   PetscCall(PetscObjectReference((PetscObject)scaling_factors));
45   PetscCall(VecDestroy(&pcis->D));
46   pcis->D = scaling_factors;
47   if (pc->setupcalled) {
48     PetscInt sn;
49 
50     PetscCall(VecGetSize(pcis->D, &sn));
51     if (sn == pcis->n) {
52       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
53       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
54       PetscCall(VecDestroy(&pcis->D));
55       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
56       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
57     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
58   }
59   PetscFunctionReturn(PETSC_SUCCESS);
60 }
61 
62 /*@
63   PCISSetSubdomainDiagonalScaling - Set diagonal scaling for `PCIS`.
64 
65   Logically Collective
66 
67   Input Parameters:
68 + pc              - the preconditioning context
69 - scaling_factors - scaling factors for the subdomain
70 
71   Level: intermediate
72 
73   Note:
74   Intended for use with jumping coefficients cases.
75 
76   Developer Notes:
77   There is no manual page for `PCIS` nor some of its methods
78 
79 .seealso: `PCIS`, `PCBDDC`
80 @*/
81 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
82 {
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
85   PetscValidHeaderSpecific(scaling_factors, VEC_CLASSID, 2);
86   PetscTryMethod(pc, "PCISSetSubdomainDiagonalScaling_C", (PC, Vec), (pc, scaling_factors));
87   PetscFunctionReturn(PETSC_SUCCESS);
88 }
89 
90 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
91 {
92   PC_IS *pcis = (PC_IS *)pc->data;
93 
94   PetscFunctionBegin;
95   pcis->scaling_factor = scal;
96   if (pcis->D) PetscCall(VecSet(pcis->D, pcis->scaling_factor));
97   PetscFunctionReturn(PETSC_SUCCESS);
98 }
99 
100 /*@
101   PCISSetSubdomainScalingFactor - Set scaling factor for `PCIS`.
102 
103   Not Collective
104 
105   Input Parameters:
106 + pc   - the preconditioning context
107 - scal - scaling factor for the subdomain
108 
109   Level: intermediate
110 
111   Note:
112   Intended for use with the jumping coefficients cases.
113 
114   Developer Notes:
115   There is no manual page for `PCIS` nor some of its methods
116 
117 .seealso: `PCIS`, `PCBDDC`
118 @*/
119 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
120 {
121   PetscFunctionBegin;
122   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
123   PetscTryMethod(pc, "PCISSetSubdomainScalingFactor_C", (PC, PetscScalar), (pc, scal));
124   PetscFunctionReturn(PETSC_SUCCESS);
125 }
126 
127 PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
128 {
129   PC_IS    *pcis = (PC_IS *)(pc->data);
130   Mat_IS   *matis;
131   MatReuse  reuse;
132   PetscBool flg, issbaij;
133 
134   PetscFunctionBegin;
135   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &flg));
136   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires preconditioning matrix of type MATIS");
137   matis = (Mat_IS *)pc->pmat->data;
138   if (pc->useAmat) {
139     PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATIS, &flg));
140     PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires linear system matrix of type MATIS");
141   }
142 
143   /* first time creation, get info on substructuring */
144   if (!pc->setupcalled) {
145     PetscInt  n_I;
146     PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global;
147     PetscBT   bt;
148     PetscInt  i, j;
149 
150     /* get info on mapping */
151     PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
152     PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
153     pcis->mapping = matis->rmapping;
154     PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping, &pcis->n));
155     PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
156 
157     /* Identifying interior and interface nodes, in local numbering */
158     PetscCall(PetscBTCreate(pcis->n, &bt));
159     for (i = 0; i < pcis->n_neigh; i++)
160       for (j = 0; j < pcis->n_shared[i]; j++) PetscCall(PetscBTSet(bt, pcis->shared[i][j]));
161 
162     /* Creating local and global index sets for interior and interface nodes. */
163     PetscCall(PetscMalloc1(pcis->n, &idx_I_local));
164     PetscCall(PetscMalloc1(pcis->n, &idx_B_local));
165     for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) {
166       if (!PetscBTLookup(bt, i)) {
167         idx_I_local[n_I] = i;
168         n_I++;
169       } else {
170         idx_B_local[pcis->n_B] = i;
171         pcis->n_B++;
172       }
173     }
174 
175     /* Getting the global numbering */
176     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
177     idx_I_global = idx_B_local + pcis->n_B;
178     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcis->n_B, idx_B_local, idx_B_global));
179     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, n_I, idx_I_local, idx_I_global));
180 
181     /* Creating the index sets */
182     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pcis->n_B, idx_B_local, PETSC_COPY_VALUES, &pcis->is_B_local));
183     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), pcis->n_B, idx_B_global, PETSC_COPY_VALUES, &pcis->is_B_global));
184     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_I, idx_I_local, PETSC_COPY_VALUES, &pcis->is_I_local));
185     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), n_I, idx_I_global, PETSC_COPY_VALUES, &pcis->is_I_global));
186 
187     /* Freeing memory */
188     PetscCall(PetscFree(idx_B_local));
189     PetscCall(PetscFree(idx_I_local));
190     PetscCall(PetscBTDestroy(&bt));
191 
192     /* Creating work vectors and arrays */
193     PetscCall(VecDuplicate(matis->x, &pcis->vec1_N));
194     PetscCall(VecDuplicate(pcis->vec1_N, &pcis->vec2_N));
195     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_D));
196     PetscCall(VecSetSizes(pcis->vec1_D, pcis->n - pcis->n_B, PETSC_DECIDE));
197     PetscCall(VecSetType(pcis->vec1_D, ((PetscObject)pcis->vec1_N)->type_name));
198     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec2_D));
199     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec3_D));
200     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec4_D));
201     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_B));
202     PetscCall(VecSetSizes(pcis->vec1_B, pcis->n_B, PETSC_DECIDE));
203     PetscCall(VecSetType(pcis->vec1_B, ((PetscObject)pcis->vec1_N)->type_name));
204     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec2_B));
205     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec3_B));
206     PetscCall(MatCreateVecs(pc->pmat, &pcis->vec1_global, NULL));
207     PetscCall(PetscMalloc1(pcis->n, &pcis->work_N));
208     /* scaling vector */
209     if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
210       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
211       PetscCall(VecSet(pcis->D, pcis->scaling_factor));
212     }
213 
214     /* Creating the scatter contexts */
215     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_I_local, pcis->vec1_D, (IS)0, &pcis->N_to_D));
216     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_I_global, pcis->vec1_D, (IS)0, &pcis->global_to_D));
217     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_B_local, pcis->vec1_B, (IS)0, &pcis->N_to_B));
218     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_B_global, pcis->vec1_B, (IS)0, &pcis->global_to_B));
219 
220     /* map from boundary to local */
221     PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local, &pcis->BtoNmap));
222   }
223 
224   {
225     PetscInt sn;
226 
227     PetscCall(VecGetSize(pcis->D, &sn));
228     if (sn == pcis->n) {
229       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
230       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
231       PetscCall(VecDestroy(&pcis->D));
232       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
233       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
234     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
235   }
236 
237   /*
238     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
239     is such that interior nodes come first than the interface ones, we have
240 
241         [ A_II | A_IB ]
242     A = [------+------]
243         [ A_BI | A_BB ]
244   */
245   if (computematrices) {
246     PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
247     PetscInt  bs, ibs;
248 
249     reuse = MAT_INITIAL_MATRIX;
250     if (pcis->reusesubmatrices && pc->setupcalled) {
251       if (pc->flag == SAME_NONZERO_PATTERN) {
252         reuse = MAT_REUSE_MATRIX;
253       } else {
254         reuse = MAT_INITIAL_MATRIX;
255       }
256     }
257     if (reuse == MAT_INITIAL_MATRIX) {
258       PetscCall(MatDestroy(&pcis->A_II));
259       PetscCall(MatDestroy(&pcis->pA_II));
260       PetscCall(MatDestroy(&pcis->A_IB));
261       PetscCall(MatDestroy(&pcis->A_BI));
262       PetscCall(MatDestroy(&pcis->A_BB));
263     }
264 
265     PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping, &ibs));
266     PetscCall(MatGetBlockSize(matis->A, &bs));
267     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->pA_II));
268     if (amat) {
269       Mat_IS *amatis = (Mat_IS *)pc->mat->data;
270       PetscCall(MatCreateSubMatrix(amatis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->A_II));
271     } else {
272       PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
273       PetscCall(MatDestroy(&pcis->A_II));
274       pcis->A_II = pcis->pA_II;
275     }
276     PetscCall(MatSetBlockSize(pcis->A_II, bs == ibs ? bs : 1));
277     PetscCall(MatSetBlockSize(pcis->pA_II, bs == ibs ? bs : 1));
278     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_B_local, reuse, &pcis->A_BB));
279     PetscCall(PetscObjectTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
280     if (!issbaij) {
281       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
282       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
283     } else {
284       Mat newmat;
285 
286       PetscCall(MatConvert(matis->A, MATSEQBAIJ, MAT_INITIAL_MATRIX, &newmat));
287       PetscCall(MatCreateSubMatrix(newmat, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
288       PetscCall(MatCreateSubMatrix(newmat, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
289       PetscCall(MatDestroy(&newmat));
290     }
291     PetscCall(MatSetBlockSize(pcis->A_BB, bs == ibs ? bs : 1));
292   }
293 
294   /* Creating scaling vector D */
295   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_is_use_stiffness_scaling", &pcis->use_stiffness_scaling, NULL));
296   if (pcis->use_stiffness_scaling) {
297     PetscScalar *a;
298     PetscInt     i, n;
299 
300     if (pcis->A_BB) {
301       PetscCall(MatGetDiagonal(pcis->A_BB, pcis->D));
302     } else {
303       PetscCall(MatGetDiagonal(matis->A, pcis->vec1_N));
304       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
305       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
306     }
307     PetscCall(VecAbs(pcis->D));
308     PetscCall(VecGetLocalSize(pcis->D, &n));
309     PetscCall(VecGetArray(pcis->D, &a));
310     for (i = 0; i < n; i++)
311       if (PetscAbsScalar(a[i]) < PETSC_SMALL) a[i] = 1.0;
312     PetscCall(VecRestoreArray(pcis->D, &a));
313   }
314   PetscCall(VecSet(pcis->vec1_global, 0.0));
315   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
316   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
317   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
318   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
319   PetscCall(VecPointwiseDivide(pcis->D, pcis->D, pcis->vec1_B));
320   /* See historical note 01, at the bottom of this file. */
321 
322   /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
323   if (computesolvers) {
324     PC pc_ctx;
325 
326     pcis->pure_neumann = matis->pure_neumann;
327     /* Dirichlet */
328     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_D));
329     PetscCall(KSPSetNestLevel(pcis->ksp_D, pc->kspnestlevel));
330     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D, pc->erroriffailure));
331     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D, (PetscObject)pc, 1));
332     PetscCall(KSPSetOperators(pcis->ksp_D, pcis->A_II, pcis->A_II));
333     PetscCall(KSPSetOptionsPrefix(pcis->ksp_D, "is_localD_"));
334     PetscCall(KSPGetPC(pcis->ksp_D, &pc_ctx));
335     PetscCall(PCSetType(pc_ctx, PCLU));
336     PetscCall(KSPSetType(pcis->ksp_D, KSPPREONLY));
337     PetscCall(KSPSetFromOptions(pcis->ksp_D));
338     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
339     PetscCall(KSPSetUp(pcis->ksp_D));
340     /* Neumann */
341     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_N));
342     PetscCall(KSPSetNestLevel(pcis->ksp_N, pc->kspnestlevel));
343     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N, pc->erroriffailure));
344     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N, (PetscObject)pc, 1));
345     PetscCall(KSPSetOperators(pcis->ksp_N, matis->A, matis->A));
346     PetscCall(KSPSetOptionsPrefix(pcis->ksp_N, "is_localN_"));
347     PetscCall(KSPGetPC(pcis->ksp_N, &pc_ctx));
348     PetscCall(PCSetType(pc_ctx, PCLU));
349     PetscCall(KSPSetType(pcis->ksp_N, KSPPREONLY));
350     PetscCall(KSPSetFromOptions(pcis->ksp_N));
351     {
352       PetscBool damp_fixed = PETSC_FALSE, remove_nullspace_fixed = PETSC_FALSE, set_damping_factor_floating = PETSC_FALSE, not_damp_floating = PETSC_FALSE, not_remove_nullspace_floating = PETSC_FALSE;
353       PetscReal fixed_factor, floating_factor;
354 
355       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &fixed_factor, &damp_fixed));
356       if (!damp_fixed) fixed_factor = 0.0;
357       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &damp_fixed, NULL));
358 
359       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_remove_nullspace_fixed", &remove_nullspace_fixed, NULL));
360 
361       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &floating_factor, &set_damping_factor_floating));
362       if (!set_damping_factor_floating) floating_factor = 0.0;
363       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &set_damping_factor_floating, NULL));
364       if (!set_damping_factor_floating) floating_factor = 1.e-12;
365 
366       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_damp_floating", &not_damp_floating, NULL));
367 
368       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_remove_nullspace_floating", &not_remove_nullspace_floating, NULL));
369 
370       if (pcis->pure_neumann) { /* floating subdomain */
371         if (!(not_damp_floating)) {
372           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
373           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
374         }
375         if (!(not_remove_nullspace_floating)) {
376           MatNullSpace nullsp;
377           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
378           PetscCall(MatSetNullSpace(matis->A, nullsp));
379           PetscCall(MatNullSpaceDestroy(&nullsp));
380         }
381       } else { /* fixed subdomain */
382         if (damp_fixed) {
383           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
384           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
385         }
386         if (remove_nullspace_fixed) {
387           MatNullSpace nullsp;
388           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
389           PetscCall(MatSetNullSpace(matis->A, nullsp));
390           PetscCall(MatNullSpaceDestroy(&nullsp));
391         }
392       }
393     }
394     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
395     PetscCall(KSPSetUp(pcis->ksp_N));
396   }
397   PetscFunctionReturn(PETSC_SUCCESS);
398 }
399 
400 /*
401    PCISDestroy -
402 */
403 PetscErrorCode PCISDestroy(PC pc)
404 {
405   PC_IS *pcis;
406 
407   PetscFunctionBegin;
408   if (!pc) PetscFunctionReturn(PETSC_SUCCESS);
409   pcis = (PC_IS *)(pc->data);
410   PetscCall(ISDestroy(&pcis->is_B_local));
411   PetscCall(ISDestroy(&pcis->is_I_local));
412   PetscCall(ISDestroy(&pcis->is_B_global));
413   PetscCall(ISDestroy(&pcis->is_I_global));
414   PetscCall(MatDestroy(&pcis->A_II));
415   PetscCall(MatDestroy(&pcis->pA_II));
416   PetscCall(MatDestroy(&pcis->A_IB));
417   PetscCall(MatDestroy(&pcis->A_BI));
418   PetscCall(MatDestroy(&pcis->A_BB));
419   PetscCall(VecDestroy(&pcis->D));
420   PetscCall(KSPDestroy(&pcis->ksp_N));
421   PetscCall(KSPDestroy(&pcis->ksp_D));
422   PetscCall(VecDestroy(&pcis->vec1_N));
423   PetscCall(VecDestroy(&pcis->vec2_N));
424   PetscCall(VecDestroy(&pcis->vec1_D));
425   PetscCall(VecDestroy(&pcis->vec2_D));
426   PetscCall(VecDestroy(&pcis->vec3_D));
427   PetscCall(VecDestroy(&pcis->vec4_D));
428   PetscCall(VecDestroy(&pcis->vec1_B));
429   PetscCall(VecDestroy(&pcis->vec2_B));
430   PetscCall(VecDestroy(&pcis->vec3_B));
431   PetscCall(VecDestroy(&pcis->vec1_global));
432   PetscCall(VecScatterDestroy(&pcis->global_to_D));
433   PetscCall(VecScatterDestroy(&pcis->N_to_B));
434   PetscCall(VecScatterDestroy(&pcis->N_to_D));
435   PetscCall(VecScatterDestroy(&pcis->global_to_B));
436   PetscCall(PetscFree(pcis->work_N));
437   if (pcis->n_neigh > -1) PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
438   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
439   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
440   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", NULL));
441   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", NULL));
442   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", NULL));
443   PetscFunctionReturn(PETSC_SUCCESS);
444 }
445 
446 /*
447    PCISCreate -
448 */
449 PetscErrorCode PCISCreate(PC pc)
450 {
451   PC_IS *pcis = (PC_IS *)(pc->data);
452 
453   PetscFunctionBegin;
454   if (!pcis) {
455     PetscCall(PetscNew(&pcis));
456     pc->data = pcis;
457   }
458   pcis->n_neigh          = -1;
459   pcis->scaling_factor   = 1.0;
460   pcis->reusesubmatrices = PETSC_TRUE;
461   /* composing functions */
462   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", PCISSetUseStiffnessScaling_IS));
463   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", PCISSetSubdomainScalingFactor_IS));
464   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", PCISSetSubdomainDiagonalScaling_IS));
465   PetscFunctionReturn(PETSC_SUCCESS);
466 }
467 
468 /*
469    PCISApplySchur -
470 
471    Input parameters:
472 .  pc - preconditioner context
473 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
474 
475    Output parameters:
476 .  vec1_B - result of Schur complement applied to chunk
477 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
478 .  vec1_D - garbage (used as work space)
479 .  vec2_D - garbage (used as work space)
480 
481 */
482 PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
483 {
484   PC_IS *pcis = (PC_IS *)(pc->data);
485 
486   PetscFunctionBegin;
487   if (!vec2_B) vec2_B = v;
488 
489   PetscCall(MatMult(pcis->A_BB, v, vec1_B));
490   PetscCall(MatMult(pcis->A_IB, v, vec1_D));
491   PetscCall(KSPSolve(pcis->ksp_D, vec1_D, vec2_D));
492   PetscCall(KSPCheckSolve(pcis->ksp_D, pc, vec2_D));
493   PetscCall(MatMult(pcis->A_BI, vec2_D, vec2_B));
494   PetscCall(VecAXPY(vec1_B, -1.0, vec2_B));
495   PetscFunctionReturn(PETSC_SUCCESS);
496 }
497 
498 /*
499   PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
500   including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
501   mode.
502 
503   Input Parameters:
504 + pc      - preconditioner context
505 . array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
506 - v_B     - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
507 
508   Output Parameter:
509 + array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
510 - v_B     - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
511 
512   Note:
513   The entries in the array that do not correspond to interface nodes remain unaltered.
514 */
515 PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
516 {
517   PetscInt        i;
518   const PetscInt *idex;
519   PetscScalar    *array_B;
520   PC_IS          *pcis = (PC_IS *)(pc->data);
521 
522   PetscFunctionBegin;
523   PetscCall(VecGetArray(v_B, &array_B));
524   PetscCall(ISGetIndices(pcis->is_B_local, &idex));
525 
526   if (smode == SCATTER_FORWARD) {
527     if (imode == INSERT_VALUES) {
528       for (i = 0; i < pcis->n_B; i++) array_B[i] = array_N[idex[i]];
529     } else { /* ADD_VALUES */
530       for (i = 0; i < pcis->n_B; i++) array_B[i] += array_N[idex[i]];
531     }
532   } else { /* SCATTER_REVERSE */
533     if (imode == INSERT_VALUES) {
534       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] = array_B[i];
535     } else { /* ADD_VALUES */
536       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] += array_B[i];
537     }
538   }
539   PetscCall(ISRestoreIndices(pcis->is_B_local, &idex));
540   PetscCall(VecRestoreArray(v_B, &array_B));
541   PetscFunctionReturn(PETSC_SUCCESS);
542 }
543 
544 /*
545    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
546    More precisely, solves the problem:
547                                         [ A_II  A_IB ] [ . ]   [ 0 ]
548                                         [            ] [   ] = [   ]
549                                         [ A_BI  A_BB ] [ x ]   [ b ]
550 
551    Input parameters:
552 .  pc - preconditioner context
553 .  b - vector of local interface nodes (including ghosts)
554 
555    Output parameters:
556 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
557        complement to b
558 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
559 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
560 
561 */
562 PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
563 {
564   PC_IS *pcis = (PC_IS *)(pc->data);
565 
566   PetscFunctionBegin;
567   /*
568     Neumann solvers.
569     Applying the inverse of the local Schur complement, i.e, solving a Neumann
570     Problem with zero at the interior nodes of the RHS and extracting the interface
571     part of the solution. inverse Schur complement is applied to b and the result
572     is stored in x.
573   */
574   /* Setting the RHS vec1_N */
575   PetscCall(VecSet(vec1_N, 0.0));
576   PetscCall(VecScatterBegin(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
577   PetscCall(VecScatterEnd(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
578   /* Checking for consistency of the RHS */
579   {
580     PetscBool flg = PETSC_FALSE;
581     PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_is_check_consistency", &flg, NULL));
582     if (flg) {
583       PetscScalar average;
584       PetscViewer viewer;
585       PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));
586 
587       PetscCall(VecSum(vec1_N, &average));
588       average = average / ((PetscReal)pcis->n);
589       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
590       if (pcis->pure_neumann) {
591         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is floating. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
592       } else {
593         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is fixed.    Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
594       }
595       PetscCall(PetscViewerFlush(viewer));
596       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
597     }
598   }
599   /* Solving the system for vec2_N */
600   PetscCall(KSPSolve(pcis->ksp_N, vec1_N, vec2_N));
601   PetscCall(KSPCheckSolve(pcis->ksp_N, pc, vec2_N));
602   /* Extracting the local interface vector out of the solution */
603   PetscCall(VecScatterBegin(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
604   PetscCall(VecScatterEnd(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
605   PetscFunctionReturn(PETSC_SUCCESS);
606 }
607