xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision e0f5bfbec699682fa3e8b8532b1176849ea4e12a)
1 
2 #include <petsc/private/pcisimpl.h> /*I "petscpc.h" I*/
3 
4 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use) {
5   PC_IS *pcis = (PC_IS *)pc->data;
6 
7   PetscFunctionBegin;
8   pcis->use_stiffness_scaling = use;
9   PetscFunctionReturn(0);
10 }
11 
12 /*@
13    PCISSetUseStiffnessScaling - Tells `PCIS` to construct partition of unity using
14                               the local matrices' diagonal entries
15 
16    Logically Collective
17 
18    Input Parameters:
19 +  pc - the preconditioning context
20 -  use - whether or not pcis use matrix diagonal to build partition of unity.
21 
22    Level: intermediate
23 
24    Developer Note:
25    There is no manual page for `PCIS` nor some of its methods
26 
27 .seealso: `PCIS`, `PCBDDC`
28 @*/
29 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use) {
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
32   PetscValidLogicalCollectiveBool(pc, use, 2);
33   PetscTryMethod(pc, "PCISSetUseStiffnessScaling_C", (PC, PetscBool), (pc, use));
34   PetscFunctionReturn(0);
35 }
36 
37 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors) {
38   PC_IS *pcis = (PC_IS *)pc->data;
39 
40   PetscFunctionBegin;
41   PetscCall(PetscObjectReference((PetscObject)scaling_factors));
42   PetscCall(VecDestroy(&pcis->D));
43   pcis->D = scaling_factors;
44   if (pc->setupcalled) {
45     PetscInt sn;
46 
47     PetscCall(VecGetSize(pcis->D, &sn));
48     if (sn == pcis->n) {
49       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
50       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
51       PetscCall(VecDestroy(&pcis->D));
52       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
53       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
54     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
55   }
56   PetscFunctionReturn(0);
57 }
58 
59 /*@
60    PCISSetSubdomainDiagonalScaling - Set diagonal scaling for `PCIS`.
61 
62    Logically Collective
63 
64    Input Parameters:
65 +  pc - the preconditioning context
66 -  scaling_factors - scaling factors for the subdomain
67 
68    Level: intermediate
69 
70    Note:
71    Intended for use with jumping coefficients cases.
72 
73    Developer Note:
74    There is no manual page for `PCIS` nor some of its methods
75 
76 .seealso: `PCIS`, `PCBDDC`
77 @*/
78 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors) {
79   PetscFunctionBegin;
80   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
81   PetscValidHeaderSpecific(scaling_factors, VEC_CLASSID, 2);
82   PetscTryMethod(pc, "PCISSetSubdomainDiagonalScaling_C", (PC, Vec), (pc, scaling_factors));
83   PetscFunctionReturn(0);
84 }
85 
86 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal) {
87   PC_IS *pcis = (PC_IS *)pc->data;
88 
89   PetscFunctionBegin;
90   pcis->scaling_factor = scal;
91   if (pcis->D) PetscCall(VecSet(pcis->D, pcis->scaling_factor));
92   PetscFunctionReturn(0);
93 }
94 
95 /*@
96  PCISSetSubdomainScalingFactor - Set scaling factor for `PCIS`.
97 
98    Not collective
99 
100    Input Parameters:
101 +  pc - the preconditioning context
102 -  scal - scaling factor for the subdomain
103 
104    Level: intermediate
105 
106    Note:
107    Intended for use with the jumping coefficients cases.
108 
109    Developer Note:
110    There is no manual page for `PCIS` nor some of its methods
111 
112 .seealso: `PCIS`, `PCBDDC`
113 @*/
114 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal) {
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
117   PetscTryMethod(pc, "PCISSetSubdomainScalingFactor_C", (PC, PetscScalar), (pc, scal));
118   PetscFunctionReturn(0);
119 }
120 
121 PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers) {
122   PC_IS    *pcis = (PC_IS *)(pc->data);
123   Mat_IS   *matis;
124   MatReuse  reuse;
125   PetscBool flg, issbaij;
126 
127   PetscFunctionBegin;
128   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &flg));
129   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires preconditioning matrix of type MATIS");
130   matis = (Mat_IS *)pc->pmat->data;
131   if (pc->useAmat) {
132     PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATIS, &flg));
133     PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires linear system matrix of type MATIS");
134   }
135 
136   /* first time creation, get info on substructuring */
137   if (!pc->setupcalled) {
138     PetscInt  n_I;
139     PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global;
140     PetscBT   bt;
141     PetscInt  i, j;
142 
143     /* get info on mapping */
144     PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
145     PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
146     pcis->mapping = matis->rmapping;
147     PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping, &pcis->n));
148     PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
149 
150     /* Identifying interior and interface nodes, in local numbering */
151     PetscCall(PetscBTCreate(pcis->n, &bt));
152     for (i = 0; i < pcis->n_neigh; i++)
153       for (j = 0; j < pcis->n_shared[i]; j++) PetscCall(PetscBTSet(bt, pcis->shared[i][j]));
154 
155     /* Creating local and global index sets for interior and inteface nodes. */
156     PetscCall(PetscMalloc1(pcis->n, &idx_I_local));
157     PetscCall(PetscMalloc1(pcis->n, &idx_B_local));
158     for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) {
159       if (!PetscBTLookup(bt, i)) {
160         idx_I_local[n_I] = i;
161         n_I++;
162       } else {
163         idx_B_local[pcis->n_B] = i;
164         pcis->n_B++;
165       }
166     }
167 
168     /* Getting the global numbering */
169     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
170     idx_I_global = idx_B_local + pcis->n_B;
171     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcis->n_B, idx_B_local, idx_B_global));
172     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, n_I, idx_I_local, idx_I_global));
173 
174     /* Creating the index sets */
175     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pcis->n_B, idx_B_local, PETSC_COPY_VALUES, &pcis->is_B_local));
176     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), pcis->n_B, idx_B_global, PETSC_COPY_VALUES, &pcis->is_B_global));
177     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_I, idx_I_local, PETSC_COPY_VALUES, &pcis->is_I_local));
178     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), n_I, idx_I_global, PETSC_COPY_VALUES, &pcis->is_I_global));
179 
180     /* Freeing memory */
181     PetscCall(PetscFree(idx_B_local));
182     PetscCall(PetscFree(idx_I_local));
183     PetscCall(PetscBTDestroy(&bt));
184 
185     /* Creating work vectors and arrays */
186     PetscCall(VecDuplicate(matis->x, &pcis->vec1_N));
187     PetscCall(VecDuplicate(pcis->vec1_N, &pcis->vec2_N));
188     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_D));
189     PetscCall(VecSetSizes(pcis->vec1_D, pcis->n - pcis->n_B, PETSC_DECIDE));
190     PetscCall(VecSetType(pcis->vec1_D, ((PetscObject)pcis->vec1_N)->type_name));
191     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec2_D));
192     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec3_D));
193     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec4_D));
194     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_B));
195     PetscCall(VecSetSizes(pcis->vec1_B, pcis->n_B, PETSC_DECIDE));
196     PetscCall(VecSetType(pcis->vec1_B, ((PetscObject)pcis->vec1_N)->type_name));
197     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec2_B));
198     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec3_B));
199     PetscCall(MatCreateVecs(pc->pmat, &pcis->vec1_global, NULL));
200     PetscCall(PetscMalloc1(pcis->n, &pcis->work_N));
201     /* scaling vector */
202     if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
203       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
204       PetscCall(VecSet(pcis->D, pcis->scaling_factor));
205     }
206 
207     /* Creating the scatter contexts */
208     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_I_local, pcis->vec1_D, (IS)0, &pcis->N_to_D));
209     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_I_global, pcis->vec1_D, (IS)0, &pcis->global_to_D));
210     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_B_local, pcis->vec1_B, (IS)0, &pcis->N_to_B));
211     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_B_global, pcis->vec1_B, (IS)0, &pcis->global_to_B));
212 
213     /* map from boundary to local */
214     PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local, &pcis->BtoNmap));
215   }
216 
217   {
218     PetscInt sn;
219 
220     PetscCall(VecGetSize(pcis->D, &sn));
221     if (sn == pcis->n) {
222       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
223       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
224       PetscCall(VecDestroy(&pcis->D));
225       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
226       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
227     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
228   }
229 
230   /*
231     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
232     is such that interior nodes come first than the interface ones, we have
233 
234         [ A_II | A_IB ]
235     A = [------+------]
236         [ A_BI | A_BB ]
237   */
238   if (computematrices) {
239     PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
240     PetscInt  bs, ibs;
241 
242     reuse = MAT_INITIAL_MATRIX;
243     if (pcis->reusesubmatrices && pc->setupcalled) {
244       if (pc->flag == SAME_NONZERO_PATTERN) {
245         reuse = MAT_REUSE_MATRIX;
246       } else {
247         reuse = MAT_INITIAL_MATRIX;
248       }
249     }
250     if (reuse == MAT_INITIAL_MATRIX) {
251       PetscCall(MatDestroy(&pcis->A_II));
252       PetscCall(MatDestroy(&pcis->pA_II));
253       PetscCall(MatDestroy(&pcis->A_IB));
254       PetscCall(MatDestroy(&pcis->A_BI));
255       PetscCall(MatDestroy(&pcis->A_BB));
256     }
257 
258     PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping, &ibs));
259     PetscCall(MatGetBlockSize(matis->A, &bs));
260     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->pA_II));
261     if (amat) {
262       Mat_IS *amatis = (Mat_IS *)pc->mat->data;
263       PetscCall(MatCreateSubMatrix(amatis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->A_II));
264     } else {
265       PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
266       PetscCall(MatDestroy(&pcis->A_II));
267       pcis->A_II = pcis->pA_II;
268     }
269     PetscCall(MatSetBlockSize(pcis->A_II, bs == ibs ? bs : 1));
270     PetscCall(MatSetBlockSize(pcis->pA_II, bs == ibs ? bs : 1));
271     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_B_local, reuse, &pcis->A_BB));
272     PetscCall(PetscObjectTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
273     if (!issbaij) {
274       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
275       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
276     } else {
277       Mat newmat;
278 
279       PetscCall(MatConvert(matis->A, MATSEQBAIJ, MAT_INITIAL_MATRIX, &newmat));
280       PetscCall(MatCreateSubMatrix(newmat, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
281       PetscCall(MatCreateSubMatrix(newmat, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
282       PetscCall(MatDestroy(&newmat));
283     }
284     PetscCall(MatSetBlockSize(pcis->A_BB, bs == ibs ? bs : 1));
285   }
286 
287   /* Creating scaling vector D */
288   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_is_use_stiffness_scaling", &pcis->use_stiffness_scaling, NULL));
289   if (pcis->use_stiffness_scaling) {
290     PetscScalar *a;
291     PetscInt     i, n;
292 
293     if (pcis->A_BB) {
294       PetscCall(MatGetDiagonal(pcis->A_BB, pcis->D));
295     } else {
296       PetscCall(MatGetDiagonal(matis->A, pcis->vec1_N));
297       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
298       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
299     }
300     PetscCall(VecAbs(pcis->D));
301     PetscCall(VecGetLocalSize(pcis->D, &n));
302     PetscCall(VecGetArray(pcis->D, &a));
303     for (i = 0; i < n; i++)
304       if (PetscAbsScalar(a[i]) < PETSC_SMALL) a[i] = 1.0;
305     PetscCall(VecRestoreArray(pcis->D, &a));
306   }
307   PetscCall(VecSet(pcis->vec1_global, 0.0));
308   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
309   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
310   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
311   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
312   PetscCall(VecPointwiseDivide(pcis->D, pcis->D, pcis->vec1_B));
313   /* See historical note 01, at the bottom of this file. */
314 
315   /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
316   if (computesolvers) {
317     PC pc_ctx;
318 
319     pcis->pure_neumann = matis->pure_neumann;
320     /* Dirichlet */
321     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_D));
322     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D, pc->erroriffailure));
323     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D, (PetscObject)pc, 1));
324     PetscCall(KSPSetOperators(pcis->ksp_D, pcis->A_II, pcis->A_II));
325     PetscCall(KSPSetOptionsPrefix(pcis->ksp_D, "is_localD_"));
326     PetscCall(KSPGetPC(pcis->ksp_D, &pc_ctx));
327     PetscCall(PCSetType(pc_ctx, PCLU));
328     PetscCall(KSPSetType(pcis->ksp_D, KSPPREONLY));
329     PetscCall(KSPSetFromOptions(pcis->ksp_D));
330     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
331     PetscCall(KSPSetUp(pcis->ksp_D));
332     /* Neumann */
333     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_N));
334     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N, pc->erroriffailure));
335     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N, (PetscObject)pc, 1));
336     PetscCall(KSPSetOperators(pcis->ksp_N, matis->A, matis->A));
337     PetscCall(KSPSetOptionsPrefix(pcis->ksp_N, "is_localN_"));
338     PetscCall(KSPGetPC(pcis->ksp_N, &pc_ctx));
339     PetscCall(PCSetType(pc_ctx, PCLU));
340     PetscCall(KSPSetType(pcis->ksp_N, KSPPREONLY));
341     PetscCall(KSPSetFromOptions(pcis->ksp_N));
342     {
343       PetscBool damp_fixed = PETSC_FALSE, remove_nullspace_fixed = PETSC_FALSE, set_damping_factor_floating = PETSC_FALSE, not_damp_floating = PETSC_FALSE, not_remove_nullspace_floating = PETSC_FALSE;
344       PetscReal fixed_factor, floating_factor;
345 
346       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &fixed_factor, &damp_fixed));
347       if (!damp_fixed) fixed_factor = 0.0;
348       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &damp_fixed, NULL));
349 
350       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_remove_nullspace_fixed", &remove_nullspace_fixed, NULL));
351 
352       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &floating_factor, &set_damping_factor_floating));
353       if (!set_damping_factor_floating) floating_factor = 0.0;
354       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &set_damping_factor_floating, NULL));
355       if (!set_damping_factor_floating) floating_factor = 1.e-12;
356 
357       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_damp_floating", &not_damp_floating, NULL));
358 
359       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_remove_nullspace_floating", &not_remove_nullspace_floating, NULL));
360 
361       if (pcis->pure_neumann) { /* floating subdomain */
362         if (!(not_damp_floating)) {
363           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
364           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
365         }
366         if (!(not_remove_nullspace_floating)) {
367           MatNullSpace nullsp;
368           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
369           PetscCall(MatSetNullSpace(matis->A, nullsp));
370           PetscCall(MatNullSpaceDestroy(&nullsp));
371         }
372       } else { /* fixed subdomain */
373         if (damp_fixed) {
374           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
375           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
376         }
377         if (remove_nullspace_fixed) {
378           MatNullSpace nullsp;
379           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
380           PetscCall(MatSetNullSpace(matis->A, nullsp));
381           PetscCall(MatNullSpaceDestroy(&nullsp));
382         }
383       }
384     }
385     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
386     PetscCall(KSPSetUp(pcis->ksp_N));
387   }
388   PetscFunctionReturn(0);
389 }
390 
391 /*
392    PCISDestroy -
393 */
394 PetscErrorCode PCISDestroy(PC pc) {
395   PC_IS *pcis;
396 
397   PetscFunctionBegin;
398   if (!pc) PetscFunctionReturn(0);
399   pcis = (PC_IS *)(pc->data);
400   PetscCall(ISDestroy(&pcis->is_B_local));
401   PetscCall(ISDestroy(&pcis->is_I_local));
402   PetscCall(ISDestroy(&pcis->is_B_global));
403   PetscCall(ISDestroy(&pcis->is_I_global));
404   PetscCall(MatDestroy(&pcis->A_II));
405   PetscCall(MatDestroy(&pcis->pA_II));
406   PetscCall(MatDestroy(&pcis->A_IB));
407   PetscCall(MatDestroy(&pcis->A_BI));
408   PetscCall(MatDestroy(&pcis->A_BB));
409   PetscCall(VecDestroy(&pcis->D));
410   PetscCall(KSPDestroy(&pcis->ksp_N));
411   PetscCall(KSPDestroy(&pcis->ksp_D));
412   PetscCall(VecDestroy(&pcis->vec1_N));
413   PetscCall(VecDestroy(&pcis->vec2_N));
414   PetscCall(VecDestroy(&pcis->vec1_D));
415   PetscCall(VecDestroy(&pcis->vec2_D));
416   PetscCall(VecDestroy(&pcis->vec3_D));
417   PetscCall(VecDestroy(&pcis->vec4_D));
418   PetscCall(VecDestroy(&pcis->vec1_B));
419   PetscCall(VecDestroy(&pcis->vec2_B));
420   PetscCall(VecDestroy(&pcis->vec3_B));
421   PetscCall(VecDestroy(&pcis->vec1_global));
422   PetscCall(VecScatterDestroy(&pcis->global_to_D));
423   PetscCall(VecScatterDestroy(&pcis->N_to_B));
424   PetscCall(VecScatterDestroy(&pcis->N_to_D));
425   PetscCall(VecScatterDestroy(&pcis->global_to_B));
426   PetscCall(PetscFree(pcis->work_N));
427   if (pcis->n_neigh > -1) PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
428   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
429   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
430   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", NULL));
431   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", NULL));
432   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", NULL));
433   PetscFunctionReturn(0);
434 }
435 
436 /*
437    PCISCreate -
438 */
439 PetscErrorCode PCISCreate(PC pc) {
440   PC_IS *pcis = (PC_IS *)(pc->data);
441 
442   PetscFunctionBegin;
443   if (!pcis) {
444     PetscCall(PetscNew(&pcis));
445     pc->data = pcis;
446   }
447   pcis->n_neigh          = -1;
448   pcis->scaling_factor   = 1.0;
449   pcis->reusesubmatrices = PETSC_TRUE;
450   /* composing functions */
451   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", PCISSetUseStiffnessScaling_IS));
452   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", PCISSetSubdomainScalingFactor_IS));
453   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", PCISSetSubdomainDiagonalScaling_IS));
454   PetscFunctionReturn(0);
455 }
456 
457 /*
458    PCISApplySchur -
459 
460    Input parameters:
461 .  pc - preconditioner context
462 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
463 
464    Output parameters:
465 .  vec1_B - result of Schur complement applied to chunk
466 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
467 .  vec1_D - garbage (used as work space)
468 .  vec2_D - garbage (used as work space)
469 
470 */
471 PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D) {
472   PC_IS *pcis = (PC_IS *)(pc->data);
473 
474   PetscFunctionBegin;
475   if (!vec2_B) vec2_B = v;
476 
477   PetscCall(MatMult(pcis->A_BB, v, vec1_B));
478   PetscCall(MatMult(pcis->A_IB, v, vec1_D));
479   PetscCall(KSPSolve(pcis->ksp_D, vec1_D, vec2_D));
480   PetscCall(KSPCheckSolve(pcis->ksp_D, pc, vec2_D));
481   PetscCall(MatMult(pcis->A_BI, vec2_D, vec2_B));
482   PetscCall(VecAXPY(vec1_B, -1.0, vec2_B));
483   PetscFunctionReturn(0);
484 }
485 
486 /*
487    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
488    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
489    mode.
490 
491    Input parameters:
492 .  pc - preconditioner context
493 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
494 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
495 
496    Output parameter:
497 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
498 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
499 
500    Note:
501    The entries in the array that do not correspond to interface nodes remain unaltered.
502 */
503 PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc) {
504   PetscInt        i;
505   const PetscInt *idex;
506   PetscScalar    *array_B;
507   PC_IS          *pcis = (PC_IS *)(pc->data);
508 
509   PetscFunctionBegin;
510   PetscCall(VecGetArray(v_B, &array_B));
511   PetscCall(ISGetIndices(pcis->is_B_local, &idex));
512 
513   if (smode == SCATTER_FORWARD) {
514     if (imode == INSERT_VALUES) {
515       for (i = 0; i < pcis->n_B; i++) array_B[i] = array_N[idex[i]];
516     } else { /* ADD_VALUES */
517       for (i = 0; i < pcis->n_B; i++) array_B[i] += array_N[idex[i]];
518     }
519   } else { /* SCATTER_REVERSE */
520     if (imode == INSERT_VALUES) {
521       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] = array_B[i];
522     } else { /* ADD_VALUES */
523       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] += array_B[i];
524     }
525   }
526   PetscCall(ISRestoreIndices(pcis->is_B_local, &idex));
527   PetscCall(VecRestoreArray(v_B, &array_B));
528   PetscFunctionReturn(0);
529 }
530 
531 /*
532    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
533    More precisely, solves the problem:
534                                         [ A_II  A_IB ] [ . ]   [ 0 ]
535                                         [            ] [   ] = [   ]
536                                         [ A_BI  A_BB ] [ x ]   [ b ]
537 
538    Input parameters:
539 .  pc - preconditioner context
540 .  b - vector of local interface nodes (including ghosts)
541 
542    Output parameters:
543 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
544        complement to b
545 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
546 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
547 
548 */
549 PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N) {
550   PC_IS *pcis = (PC_IS *)(pc->data);
551 
552   PetscFunctionBegin;
553   /*
554     Neumann solvers.
555     Applying the inverse of the local Schur complement, i.e, solving a Neumann
556     Problem with zero at the interior nodes of the RHS and extracting the interface
557     part of the solution. inverse Schur complement is applied to b and the result
558     is stored in x.
559   */
560   /* Setting the RHS vec1_N */
561   PetscCall(VecSet(vec1_N, 0.0));
562   PetscCall(VecScatterBegin(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
563   PetscCall(VecScatterEnd(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
564   /* Checking for consistency of the RHS */
565   {
566     PetscBool flg = PETSC_FALSE;
567     PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_is_check_consistency", &flg, NULL));
568     if (flg) {
569       PetscScalar average;
570       PetscViewer viewer;
571       PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));
572 
573       PetscCall(VecSum(vec1_N, &average));
574       average = average / ((PetscReal)pcis->n);
575       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
576       if (pcis->pure_neumann) {
577         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is floating. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
578       } else {
579         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is fixed.    Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
580       }
581       PetscCall(PetscViewerFlush(viewer));
582       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
583     }
584   }
585   /* Solving the system for vec2_N */
586   PetscCall(KSPSolve(pcis->ksp_N, vec1_N, vec2_N));
587   PetscCall(KSPCheckSolve(pcis->ksp_N, pc, vec2_N));
588   /* Extracting the local interface vector out of the solution */
589   PetscCall(VecScatterBegin(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
590   PetscCall(VecScatterEnd(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
591   PetscFunctionReturn(0);
592 }
593