xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision a4e35b1925eceef64945ea472b84f2bf06a67b5e)
1 
2 #include <petsc/private/pcisimpl.h> /*I "petscpc.h" I*/
3 
4 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
5 {
6   PC_IS *pcis = (PC_IS *)pc->data;
7 
8   PetscFunctionBegin;
9   pcis->use_stiffness_scaling = use;
10   PetscFunctionReturn(PETSC_SUCCESS);
11 }
12 
13 /*@
14   PCISSetUseStiffnessScaling - Tells `PCIS` to construct partition of unity using
15   the local matrices' diagonal entries
16 
17   Logically Collective
18 
19   Input Parameters:
20 + pc  - the preconditioning context
21 - use - whether or not it should use matrix diagonal to build partition of unity.
22 
23   Level: intermediate
24 
25 .seealso: `PCBDDC`, `PCNN`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
26           `PCISSetSubdomainScalingFactor()`,
27           `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
28 @*/
29 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
30 {
31   PetscFunctionBegin;
32   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
33   PetscValidLogicalCollectiveBool(pc, use, 2);
34   PetscTryMethod(pc, "PCISSetUseStiffnessScaling_C", (PC, PetscBool), (pc, use));
35   PetscFunctionReturn(PETSC_SUCCESS);
36 }
37 
38 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
39 {
40   PC_IS *pcis = (PC_IS *)pc->data;
41 
42   PetscFunctionBegin;
43   PetscCall(PetscObjectReference((PetscObject)scaling_factors));
44   PetscCall(VecDestroy(&pcis->D));
45   pcis->D = scaling_factors;
46   if (pc->setupcalled) {
47     PetscInt sn;
48 
49     PetscCall(VecGetSize(pcis->D, &sn));
50     if (sn == pcis->n) {
51       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
52       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
53       PetscCall(VecDestroy(&pcis->D));
54       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
55       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
56     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
57   }
58   PetscFunctionReturn(PETSC_SUCCESS);
59 }
60 
61 /*@
62   PCISSetSubdomainDiagonalScaling - Set diagonal scaling for `PCIS`.
63 
64   Logically Collective
65 
66   Input Parameters:
67 + pc              - the preconditioning context
68 - scaling_factors - scaling factors for the subdomain
69 
70   Level: intermediate
71 
72   Note:
73   Intended for use with jumping coefficients cases.
74 
75 .seealso: `PCBDDC`, `PCNN`, `PCISScatterArrayNToVecB()`,
76           `PCISSetSubdomainScalingFactor()`, `PCISSetUseStiffnessScaling()`,
77           `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
78 @*/
79 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
80 {
81   PetscFunctionBegin;
82   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
83   PetscValidHeaderSpecific(scaling_factors, VEC_CLASSID, 2);
84   PetscTryMethod(pc, "PCISSetSubdomainDiagonalScaling_C", (PC, Vec), (pc, scaling_factors));
85   PetscFunctionReturn(PETSC_SUCCESS);
86 }
87 
88 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
89 {
90   PC_IS *pcis = (PC_IS *)pc->data;
91 
92   PetscFunctionBegin;
93   pcis->scaling_factor = scal;
94   if (pcis->D) PetscCall(VecSet(pcis->D, pcis->scaling_factor));
95   PetscFunctionReturn(PETSC_SUCCESS);
96 }
97 
98 /*@
99   PCISSetSubdomainScalingFactor - Set scaling factor for `PCIS`.
100 
101   Not Collective
102 
103   Input Parameters:
104 + pc   - the preconditioning context
105 - scal - scaling factor for the subdomain
106 
107   Level: intermediate
108 
109   Note:
110   Intended for use with the jumping coefficients cases.
111 
112 .seealso: `PCBDDC`, `PCNN`, `PCISScatterArrayNToVecB()`,
113           `PCISSetSubdomainDiagonalScaling()`, `PCISSetUseStiffnessScaling()`,
114           `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
115 @*/
116 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
117 {
118   PetscFunctionBegin;
119   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
120   PetscTryMethod(pc, "PCISSetSubdomainScalingFactor_C", (PC, PetscScalar), (pc, scal));
121   PetscFunctionReturn(PETSC_SUCCESS);
122 }
123 
124 /*@
125   PCISSetUp - sets up the `PC_IS` portion of `PCNN` and `PCBDDC` preconditioner context as part of their setup process
126 
127   Input Parameters:
128 + pc              - the `PC` object, must be of type `PCNN` or `PCBDDC`
129 . computematrices - Extract the blocks `A_II`, `A_BI`, `A_IB` and `A_BB` from the matrix
130 - computesolvers  - Create the `KSP` for the local Dirichlet and Neumann problems
131 
132   Level: advanced
133 
134 .seealso: `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
135           `PCISSetSubdomainScalingFactor()`,
136           `PCISReset()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
137 @*/
138 PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
139 {
140   PC_IS    *pcis = (PC_IS *)(pc->data);
141   Mat_IS   *matis;
142   MatReuse  reuse;
143   PetscBool flg, issbaij;
144 
145   PetscFunctionBegin;
146   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &flg));
147   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires preconditioning matrix of type MATIS");
148   matis = (Mat_IS *)pc->pmat->data;
149   if (pc->useAmat) {
150     PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATIS, &flg));
151     PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires linear system matrix of type MATIS");
152   }
153 
154   /* first time creation, get info on substructuring */
155   if (!pc->setupcalled) {
156     PetscInt  n_I;
157     PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global;
158     PetscBT   bt;
159     PetscInt  i, j;
160 
161     /* get info on mapping */
162     PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
163     PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
164     pcis->mapping = matis->rmapping;
165     PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping, &pcis->n));
166     PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
167 
168     /* Identifying interior and interface nodes, in local numbering */
169     PetscCall(PetscBTCreate(pcis->n, &bt));
170     for (i = 0; i < pcis->n_neigh; i++)
171       for (j = 0; j < pcis->n_shared[i]; j++) PetscCall(PetscBTSet(bt, pcis->shared[i][j]));
172 
173     /* Creating local and global index sets for interior and interface nodes. */
174     PetscCall(PetscMalloc1(pcis->n, &idx_I_local));
175     PetscCall(PetscMalloc1(pcis->n, &idx_B_local));
176     for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) {
177       if (!PetscBTLookup(bt, i)) {
178         idx_I_local[n_I] = i;
179         n_I++;
180       } else {
181         idx_B_local[pcis->n_B] = i;
182         pcis->n_B++;
183       }
184     }
185 
186     /* Getting the global numbering */
187     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
188     idx_I_global = idx_B_local + pcis->n_B;
189     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcis->n_B, idx_B_local, idx_B_global));
190     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, n_I, idx_I_local, idx_I_global));
191 
192     /* Creating the index sets */
193     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pcis->n_B, idx_B_local, PETSC_COPY_VALUES, &pcis->is_B_local));
194     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), pcis->n_B, idx_B_global, PETSC_COPY_VALUES, &pcis->is_B_global));
195     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_I, idx_I_local, PETSC_COPY_VALUES, &pcis->is_I_local));
196     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), n_I, idx_I_global, PETSC_COPY_VALUES, &pcis->is_I_global));
197 
198     /* Freeing memory */
199     PetscCall(PetscFree(idx_B_local));
200     PetscCall(PetscFree(idx_I_local));
201     PetscCall(PetscBTDestroy(&bt));
202 
203     /* Creating work vectors and arrays */
204     PetscCall(VecDuplicate(matis->x, &pcis->vec1_N));
205     PetscCall(VecDuplicate(pcis->vec1_N, &pcis->vec2_N));
206     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_D));
207     PetscCall(VecSetSizes(pcis->vec1_D, pcis->n - pcis->n_B, PETSC_DECIDE));
208     PetscCall(VecSetType(pcis->vec1_D, ((PetscObject)pcis->vec1_N)->type_name));
209     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec2_D));
210     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec3_D));
211     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec4_D));
212     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_B));
213     PetscCall(VecSetSizes(pcis->vec1_B, pcis->n_B, PETSC_DECIDE));
214     PetscCall(VecSetType(pcis->vec1_B, ((PetscObject)pcis->vec1_N)->type_name));
215     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec2_B));
216     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec3_B));
217     PetscCall(MatCreateVecs(pc->pmat, &pcis->vec1_global, NULL));
218     PetscCall(PetscMalloc1(pcis->n, &pcis->work_N));
219     /* scaling vector */
220     if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
221       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
222       PetscCall(VecSet(pcis->D, pcis->scaling_factor));
223     }
224 
225     /* Creating the scatter contexts */
226     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_I_local, pcis->vec1_D, (IS)0, &pcis->N_to_D));
227     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_I_global, pcis->vec1_D, (IS)0, &pcis->global_to_D));
228     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_B_local, pcis->vec1_B, (IS)0, &pcis->N_to_B));
229     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_B_global, pcis->vec1_B, (IS)0, &pcis->global_to_B));
230 
231     /* map from boundary to local */
232     PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local, &pcis->BtoNmap));
233   }
234 
235   {
236     PetscInt sn;
237 
238     PetscCall(VecGetSize(pcis->D, &sn));
239     if (sn == pcis->n) {
240       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
241       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
242       PetscCall(VecDestroy(&pcis->D));
243       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
244       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
245     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
246   }
247 
248   /*
249     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
250     is such that interior nodes come first than the interface ones, we have
251 
252         [ A_II | A_IB ]
253     A = [------+------]
254         [ A_BI | A_BB ]
255   */
256   if (computematrices) {
257     PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
258     PetscInt  bs, ibs;
259 
260     reuse = MAT_INITIAL_MATRIX;
261     if (pcis->reusesubmatrices && pc->setupcalled) {
262       if (pc->flag == SAME_NONZERO_PATTERN) {
263         reuse = MAT_REUSE_MATRIX;
264       } else {
265         reuse = MAT_INITIAL_MATRIX;
266       }
267     }
268     if (reuse == MAT_INITIAL_MATRIX) {
269       PetscCall(MatDestroy(&pcis->A_II));
270       PetscCall(MatDestroy(&pcis->pA_II));
271       PetscCall(MatDestroy(&pcis->A_IB));
272       PetscCall(MatDestroy(&pcis->A_BI));
273       PetscCall(MatDestroy(&pcis->A_BB));
274     }
275 
276     PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping, &ibs));
277     PetscCall(MatGetBlockSize(matis->A, &bs));
278     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->pA_II));
279     if (amat) {
280       Mat_IS *amatis = (Mat_IS *)pc->mat->data;
281       PetscCall(MatCreateSubMatrix(amatis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->A_II));
282     } else {
283       PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
284       PetscCall(MatDestroy(&pcis->A_II));
285       pcis->A_II = pcis->pA_II;
286     }
287     PetscCall(MatSetBlockSize(pcis->A_II, bs == ibs ? bs : 1));
288     PetscCall(MatSetBlockSize(pcis->pA_II, bs == ibs ? bs : 1));
289     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_B_local, reuse, &pcis->A_BB));
290     PetscCall(PetscObjectTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
291     if (!issbaij) {
292       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
293       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
294     } else {
295       Mat newmat;
296 
297       PetscCall(MatConvert(matis->A, MATSEQBAIJ, MAT_INITIAL_MATRIX, &newmat));
298       PetscCall(MatCreateSubMatrix(newmat, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
299       PetscCall(MatCreateSubMatrix(newmat, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
300       PetscCall(MatDestroy(&newmat));
301     }
302     PetscCall(MatSetBlockSize(pcis->A_BB, bs == ibs ? bs : 1));
303   }
304 
305   /* Creating scaling vector D */
306   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_is_use_stiffness_scaling", &pcis->use_stiffness_scaling, NULL));
307   if (pcis->use_stiffness_scaling) {
308     PetscScalar *a;
309     PetscInt     i, n;
310 
311     if (pcis->A_BB) {
312       PetscCall(MatGetDiagonal(pcis->A_BB, pcis->D));
313     } else {
314       PetscCall(MatGetDiagonal(matis->A, pcis->vec1_N));
315       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
316       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
317     }
318     PetscCall(VecAbs(pcis->D));
319     PetscCall(VecGetLocalSize(pcis->D, &n));
320     PetscCall(VecGetArray(pcis->D, &a));
321     for (i = 0; i < n; i++)
322       if (PetscAbsScalar(a[i]) < PETSC_SMALL) a[i] = 1.0;
323     PetscCall(VecRestoreArray(pcis->D, &a));
324   }
325   PetscCall(VecSet(pcis->vec1_global, 0.0));
326   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
327   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
328   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
329   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
330   PetscCall(VecPointwiseDivide(pcis->D, pcis->D, pcis->vec1_B));
331   /* See historical note 01, at the bottom of this file. */
332 
333   /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
334   if (computesolvers) {
335     PC pc_ctx;
336 
337     pcis->pure_neumann = matis->pure_neumann;
338     /* Dirichlet */
339     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_D));
340     PetscCall(KSPSetNestLevel(pcis->ksp_D, pc->kspnestlevel));
341     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D, pc->erroriffailure));
342     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D, (PetscObject)pc, 1));
343     PetscCall(KSPSetOperators(pcis->ksp_D, pcis->A_II, pcis->A_II));
344     PetscCall(KSPSetOptionsPrefix(pcis->ksp_D, "is_localD_"));
345     PetscCall(KSPGetPC(pcis->ksp_D, &pc_ctx));
346     PetscCall(PCSetType(pc_ctx, PCLU));
347     PetscCall(KSPSetType(pcis->ksp_D, KSPPREONLY));
348     PetscCall(KSPSetFromOptions(pcis->ksp_D));
349     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
350     PetscCall(KSPSetUp(pcis->ksp_D));
351     /* Neumann */
352     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_N));
353     PetscCall(KSPSetNestLevel(pcis->ksp_N, pc->kspnestlevel));
354     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N, pc->erroriffailure));
355     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N, (PetscObject)pc, 1));
356     PetscCall(KSPSetOperators(pcis->ksp_N, matis->A, matis->A));
357     PetscCall(KSPSetOptionsPrefix(pcis->ksp_N, "is_localN_"));
358     PetscCall(KSPGetPC(pcis->ksp_N, &pc_ctx));
359     PetscCall(PCSetType(pc_ctx, PCLU));
360     PetscCall(KSPSetType(pcis->ksp_N, KSPPREONLY));
361     PetscCall(KSPSetFromOptions(pcis->ksp_N));
362     {
363       PetscBool damp_fixed = PETSC_FALSE, remove_nullspace_fixed = PETSC_FALSE, set_damping_factor_floating = PETSC_FALSE, not_damp_floating = PETSC_FALSE, not_remove_nullspace_floating = PETSC_FALSE;
364       PetscReal fixed_factor, floating_factor;
365 
366       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &fixed_factor, &damp_fixed));
367       if (!damp_fixed) fixed_factor = 0.0;
368       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &damp_fixed, NULL));
369 
370       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_remove_nullspace_fixed", &remove_nullspace_fixed, NULL));
371 
372       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &floating_factor, &set_damping_factor_floating));
373       if (!set_damping_factor_floating) floating_factor = 0.0;
374       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &set_damping_factor_floating, NULL));
375       if (!set_damping_factor_floating) floating_factor = 1.e-12;
376 
377       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_damp_floating", &not_damp_floating, NULL));
378 
379       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_remove_nullspace_floating", &not_remove_nullspace_floating, NULL));
380 
381       if (pcis->pure_neumann) { /* floating subdomain */
382         if (!(not_damp_floating)) {
383           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
384           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
385         }
386         if (!(not_remove_nullspace_floating)) {
387           MatNullSpace nullsp;
388           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
389           PetscCall(MatSetNullSpace(matis->A, nullsp));
390           PetscCall(MatNullSpaceDestroy(&nullsp));
391         }
392       } else { /* fixed subdomain */
393         if (damp_fixed) {
394           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
395           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
396         }
397         if (remove_nullspace_fixed) {
398           MatNullSpace nullsp;
399           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
400           PetscCall(MatSetNullSpace(matis->A, nullsp));
401           PetscCall(MatNullSpaceDestroy(&nullsp));
402         }
403       }
404     }
405     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
406     PetscCall(KSPSetUp(pcis->ksp_N));
407   }
408   PetscFunctionReturn(PETSC_SUCCESS);
409 }
410 
411 /*@
412   PCISReset - Removes all the `PC_IS` parts of the `PC` implementation data structure
413 
414   Input Parameter:
415 . pc - the `PC` object, must be of type `PCNN` or `PCBDDC`
416 
417   Level: advanced
418 
419 .seealso: `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`, `PCISSetSubdomainScalingFactor()`,
420           `PCISInitialize()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
421 @*/
422 PetscErrorCode PCISReset(PC pc)
423 {
424   PC_IS    *pcis = (PC_IS *)(pc->data);
425   PetscBool correcttype;
426 
427   PetscFunctionBegin;
428   if (!pc) PetscFunctionReturn(PETSC_SUCCESS);
429   PetscCall(PetscObjectTypeCompareAny((PetscObject)pc, &correcttype, PCBDDC, PCNN, ""));
430   PetscCheck(correcttype, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC must be of type PCNN or PCBDDC");
431   PetscCall(ISDestroy(&pcis->is_B_local));
432   PetscCall(ISDestroy(&pcis->is_I_local));
433   PetscCall(ISDestroy(&pcis->is_B_global));
434   PetscCall(ISDestroy(&pcis->is_I_global));
435   PetscCall(MatDestroy(&pcis->A_II));
436   PetscCall(MatDestroy(&pcis->pA_II));
437   PetscCall(MatDestroy(&pcis->A_IB));
438   PetscCall(MatDestroy(&pcis->A_BI));
439   PetscCall(MatDestroy(&pcis->A_BB));
440   PetscCall(VecDestroy(&pcis->D));
441   PetscCall(KSPDestroy(&pcis->ksp_N));
442   PetscCall(KSPDestroy(&pcis->ksp_D));
443   PetscCall(VecDestroy(&pcis->vec1_N));
444   PetscCall(VecDestroy(&pcis->vec2_N));
445   PetscCall(VecDestroy(&pcis->vec1_D));
446   PetscCall(VecDestroy(&pcis->vec2_D));
447   PetscCall(VecDestroy(&pcis->vec3_D));
448   PetscCall(VecDestroy(&pcis->vec4_D));
449   PetscCall(VecDestroy(&pcis->vec1_B));
450   PetscCall(VecDestroy(&pcis->vec2_B));
451   PetscCall(VecDestroy(&pcis->vec3_B));
452   PetscCall(VecDestroy(&pcis->vec1_global));
453   PetscCall(VecScatterDestroy(&pcis->global_to_D));
454   PetscCall(VecScatterDestroy(&pcis->N_to_B));
455   PetscCall(VecScatterDestroy(&pcis->N_to_D));
456   PetscCall(VecScatterDestroy(&pcis->global_to_B));
457   PetscCall(PetscFree(pcis->work_N));
458   if (pcis->n_neigh > -1) PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
459   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
460   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
461   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", NULL));
462   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", NULL));
463   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", NULL));
464   PetscFunctionReturn(PETSC_SUCCESS);
465 }
466 
467 /*@
468   PCISInitialize - initializes the `PC_IS` portion of `PCNN` and `PCBDDC` preconditioner context
469 
470   Input Parameter:
471 . pc - the `PC` object, must be of type `PCNN` or `PCBDDC`
472 
473   Level: advanced
474 
475   Note:
476   There is no preconditioner the `PCIS` prefixed routines provide functionality needed by `PCNN` or `PCBDDC`
477 
478 .seealso: `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
479           `PCISSetSubdomainScalingFactor()`,
480           `PCISReset()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
481 @*/
482 PetscErrorCode PCISInitialize(PC pc)
483 {
484   PC_IS    *pcis = (PC_IS *)(pc->data);
485   PetscBool correcttype;
486 
487   PetscFunctionBegin;
488   PetscCheck(pcis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC_IS context must be created by caller");
489   PetscCall(PetscObjectTypeCompareAny((PetscObject)pc, &correcttype, PCBDDC, PCNN, ""));
490   PetscCheck(correcttype, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC must be of type PCNN or PCBDDC");
491   pcis->n_neigh          = -1;
492   pcis->scaling_factor   = 1.0;
493   pcis->reusesubmatrices = PETSC_TRUE;
494   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", PCISSetUseStiffnessScaling_IS));
495   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", PCISSetSubdomainScalingFactor_IS));
496   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", PCISSetSubdomainDiagonalScaling_IS));
497   PetscFunctionReturn(PETSC_SUCCESS);
498 }
499 
500 /*@
501   PCISApplySchur - applies the Schur complement arising from the `MATIS` inside the `PCNN` preconditioner
502 
503   Input Parameters:
504 + pc     - preconditioner context
505 . v      - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
506 . vec1_B - location to store the result of Schur complement applied to chunk
507 . vec2_B - workspace or `NULL`, `v` is used as workspace in that case
508 . vec1_D - work space
509 - vec2_D - work space
510 
511   Level: advanced
512 
513 .seealso: `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
514           `PCISSetSubdomainScalingFactor()`, `PCISApplyInvSchur()`,
515           `PCISReset()`, `PCISInitialize()`
516 @*/
517 PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
518 {
519   PC_IS *pcis = (PC_IS *)(pc->data);
520 
521   PetscFunctionBegin;
522   if (!vec2_B) vec2_B = v;
523 
524   PetscCall(MatMult(pcis->A_BB, v, vec1_B));
525   PetscCall(MatMult(pcis->A_IB, v, vec1_D));
526   PetscCall(KSPSolve(pcis->ksp_D, vec1_D, vec2_D));
527   PetscCall(KSPCheckSolve(pcis->ksp_D, pc, vec2_D));
528   PetscCall(MatMult(pcis->A_BI, vec2_D, vec2_B));
529   PetscCall(VecAXPY(vec1_B, -1.0, vec2_B));
530   PetscFunctionReturn(PETSC_SUCCESS);
531 }
532 
533 /*@
534   PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
535   including ghosts) into an interface vector, when in `SCATTER_FORWARD` mode, or vice-versa, when in `SCATTER_REVERSE`
536   mode.
537 
538   Input Parameters:
539 + pc      - preconditioner context
540 . array_N - [when in `SCATTER_FORWARD` mode] Array to be scattered into the vector otherwise output array
541 . imode   - insert mode, `ADD_VALUES` or `INSERT_VALUES`
542 . smode   - scatter mode, `SCATTER_FORWARD` or `SCATTER_REVERSE` mode]
543 - v_B     - [when in `SCATTER_REVERSE` mode] Vector to be scattered into the array, otherwise output vector
544 
545   Level: advanced
546 
547   Note:
548   The entries in the array that do not correspond to interface nodes remain unaltered.
549 
550 .seealso: `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`,
551           `PCISSetSubdomainScalingFactor()`, `PCISApplySchur()`, `PCISApplyInvSchur()`,
552           `PCISReset()`, `PCISInitialize()`, `InsertMode`
553 @*/
554 PetscErrorCode PCISScatterArrayNToVecB(PC pc, PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode)
555 {
556   PetscInt        i;
557   const PetscInt *idex;
558   PetscScalar    *array_B;
559   PC_IS          *pcis = (PC_IS *)(pc->data);
560 
561   PetscFunctionBegin;
562   PetscCall(VecGetArray(v_B, &array_B));
563   PetscCall(ISGetIndices(pcis->is_B_local, &idex));
564 
565   if (smode == SCATTER_FORWARD) {
566     if (imode == INSERT_VALUES) {
567       for (i = 0; i < pcis->n_B; i++) array_B[i] = array_N[idex[i]];
568     } else { /* ADD_VALUES */
569       for (i = 0; i < pcis->n_B; i++) array_B[i] += array_N[idex[i]];
570     }
571   } else { /* SCATTER_REVERSE */
572     if (imode == INSERT_VALUES) {
573       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] = array_B[i];
574     } else { /* ADD_VALUES */
575       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] += array_B[i];
576     }
577   }
578   PetscCall(ISRestoreIndices(pcis->is_B_local, &idex));
579   PetscCall(VecRestoreArray(v_B, &array_B));
580   PetscFunctionReturn(PETSC_SUCCESS);
581 }
582 
583 /*@
584   PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
585 
586   Input Parameters:
587 + pc     - preconditioner context
588 . b      - vector of local interface nodes (including ghosts)
589 . x      - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur complement to `b`
590 . vec1_N - vector of local nodes (interior and interface, including ghosts); used as work space
591 - vec2_N - vector of local nodes (interior and interface, including ghosts); used as work space
592 
593   Level: advanced
594 
595   Note:
596   Solves the problem
597 .vb
598   [ A_II  A_IB ] [ . ]   [ 0 ]
599   [            ] [   ] = [   ]
600   [ A_BI  A_BB ] [ x ]   [ b ]
601 .ve
602 
603 .seealso: `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
604           `PCISSetSubdomainScalingFactor()`,
605           `PCISReset()`, `PCISInitialize()`
606 @*/
607 PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
608 {
609   PC_IS *pcis = (PC_IS *)(pc->data);
610 
611   PetscFunctionBegin;
612   /*
613     Neumann solvers.
614     Applying the inverse of the local Schur complement, i.e, solving a Neumann
615     Problem with zero at the interior nodes of the RHS and extracting the interface
616     part of the solution. inverse Schur complement is applied to b and the result
617     is stored in x.
618   */
619   /* Setting the RHS vec1_N */
620   PetscCall(VecSet(vec1_N, 0.0));
621   PetscCall(VecScatterBegin(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
622   PetscCall(VecScatterEnd(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
623   /* Checking for consistency of the RHS */
624   {
625     PetscBool flg = PETSC_FALSE;
626     PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_is_check_consistency", &flg, NULL));
627     if (flg) {
628       PetscScalar average;
629       PetscViewer viewer;
630       PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));
631 
632       PetscCall(VecSum(vec1_N, &average));
633       average = average / ((PetscReal)pcis->n);
634       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
635       if (pcis->pure_neumann) {
636         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is floating. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
637       } else {
638         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is fixed.    Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
639       }
640       PetscCall(PetscViewerFlush(viewer));
641       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
642     }
643   }
644   /* Solving the system for vec2_N */
645   PetscCall(KSPSolve(pcis->ksp_N, vec1_N, vec2_N));
646   PetscCall(KSPCheckSolve(pcis->ksp_N, pc, vec2_N));
647   /* Extracting the local interface vector out of the solution */
648   PetscCall(VecScatterBegin(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
649   PetscCall(VecScatterEnd(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
650   PetscFunctionReturn(PETSC_SUCCESS);
651 }
652