xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 
2 #include <petsc/private/pcisimpl.h> /*I "petscpc.h" I*/
3 
4 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use) {
5   PC_IS *pcis = (PC_IS *)pc->data;
6 
7   PetscFunctionBegin;
8   pcis->use_stiffness_scaling = use;
9   PetscFunctionReturn(0);
10 }
11 
12 /*@
13  PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
14                               local matrices' diagonal.
15 
16    Not collective
17 
18    Input Parameters:
19 +  pc - the preconditioning context
20 -  use - whether or not pcis use matrix diagonal to build partition of unity.
21 
22    Level: intermediate
23 
24    Notes:
25 
26 .seealso: `PCBDDC`
27 @*/
28 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use) {
29   PetscFunctionBegin;
30   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
31   PetscValidLogicalCollectiveBool(pc, use, 2);
32   PetscTryMethod(pc, "PCISSetUseStiffnessScaling_C", (PC, PetscBool), (pc, use));
33   PetscFunctionReturn(0);
34 }
35 
36 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors) {
37   PC_IS *pcis = (PC_IS *)pc->data;
38 
39   PetscFunctionBegin;
40   PetscCall(PetscObjectReference((PetscObject)scaling_factors));
41   PetscCall(VecDestroy(&pcis->D));
42   pcis->D = scaling_factors;
43   if (pc->setupcalled) {
44     PetscInt sn;
45 
46     PetscCall(VecGetSize(pcis->D, &sn));
47     if (sn == pcis->n) {
48       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
49       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
50       PetscCall(VecDestroy(&pcis->D));
51       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
52       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
53     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 /*@
59  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
60 
61    Not collective
62 
63    Input Parameters:
64 +  pc - the preconditioning context
65 -  scaling_factors - scaling factors for the subdomain
66 
67    Level: intermediate
68 
69    Notes:
70    Intended to use with jumping coefficients cases.
71 
72 .seealso: `PCBDDC`
73 @*/
74 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors) {
75   PetscFunctionBegin;
76   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
77   PetscValidHeaderSpecific(scaling_factors, VEC_CLASSID, 2);
78   PetscTryMethod(pc, "PCISSetSubdomainDiagonalScaling_C", (PC, Vec), (pc, scaling_factors));
79   PetscFunctionReturn(0);
80 }
81 
82 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal) {
83   PC_IS *pcis = (PC_IS *)pc->data;
84 
85   PetscFunctionBegin;
86   pcis->scaling_factor = scal;
87   if (pcis->D) { PetscCall(VecSet(pcis->D, pcis->scaling_factor)); }
88   PetscFunctionReturn(0);
89 }
90 
91 /*@
92  PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
93 
94    Not collective
95 
96    Input Parameters:
97 +  pc - the preconditioning context
98 -  scal - scaling factor for the subdomain
99 
100    Level: intermediate
101 
102    Notes:
103    Intended to use with jumping coefficients cases.
104 
105 .seealso: `PCBDDC`
106 @*/
107 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal) {
108   PetscFunctionBegin;
109   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
110   PetscTryMethod(pc, "PCISSetSubdomainScalingFactor_C", (PC, PetscScalar), (pc, scal));
111   PetscFunctionReturn(0);
112 }
113 
114 /* -------------------------------------------------------------------------- */
115 /*
116    PCISSetUp -
117 */
118 PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers) {
119   PC_IS    *pcis = (PC_IS *)(pc->data);
120   Mat_IS   *matis;
121   MatReuse  reuse;
122   PetscBool flg, issbaij;
123 
124   PetscFunctionBegin;
125   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &flg));
126   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires preconditioning matrix of type MATIS");
127   matis = (Mat_IS *)pc->pmat->data;
128   if (pc->useAmat) {
129     PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATIS, &flg));
130     PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires linear system matrix of type MATIS");
131   }
132 
133   /* first time creation, get info on substructuring */
134   if (!pc->setupcalled) {
135     PetscInt  n_I;
136     PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global;
137     PetscBT   bt;
138     PetscInt  i, j;
139 
140     /* get info on mapping */
141     PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
142     PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
143     pcis->mapping = matis->rmapping;
144     PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping, &pcis->n));
145     PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
146 
147     /* Identifying interior and interface nodes, in local numbering */
148     PetscCall(PetscBTCreate(pcis->n, &bt));
149     for (i = 0; i < pcis->n_neigh; i++)
150       for (j = 0; j < pcis->n_shared[i]; j++) { PetscCall(PetscBTSet(bt, pcis->shared[i][j])); }
151 
152     /* Creating local and global index sets for interior and inteface nodes. */
153     PetscCall(PetscMalloc1(pcis->n, &idx_I_local));
154     PetscCall(PetscMalloc1(pcis->n, &idx_B_local));
155     for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) {
156       if (!PetscBTLookup(bt, i)) {
157         idx_I_local[n_I] = i;
158         n_I++;
159       } else {
160         idx_B_local[pcis->n_B] = i;
161         pcis->n_B++;
162       }
163     }
164 
165     /* Getting the global numbering */
166     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
167     idx_I_global = idx_B_local + pcis->n_B;
168     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcis->n_B, idx_B_local, idx_B_global));
169     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, n_I, idx_I_local, idx_I_global));
170 
171     /* Creating the index sets */
172     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pcis->n_B, idx_B_local, PETSC_COPY_VALUES, &pcis->is_B_local));
173     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), pcis->n_B, idx_B_global, PETSC_COPY_VALUES, &pcis->is_B_global));
174     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_I, idx_I_local, PETSC_COPY_VALUES, &pcis->is_I_local));
175     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), n_I, idx_I_global, PETSC_COPY_VALUES, &pcis->is_I_global));
176 
177     /* Freeing memory */
178     PetscCall(PetscFree(idx_B_local));
179     PetscCall(PetscFree(idx_I_local));
180     PetscCall(PetscBTDestroy(&bt));
181 
182     /* Creating work vectors and arrays */
183     PetscCall(VecDuplicate(matis->x, &pcis->vec1_N));
184     PetscCall(VecDuplicate(pcis->vec1_N, &pcis->vec2_N));
185     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_D));
186     PetscCall(VecSetSizes(pcis->vec1_D, pcis->n - pcis->n_B, PETSC_DECIDE));
187     PetscCall(VecSetType(pcis->vec1_D, ((PetscObject)pcis->vec1_N)->type_name));
188     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec2_D));
189     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec3_D));
190     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec4_D));
191     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_B));
192     PetscCall(VecSetSizes(pcis->vec1_B, pcis->n_B, PETSC_DECIDE));
193     PetscCall(VecSetType(pcis->vec1_B, ((PetscObject)pcis->vec1_N)->type_name));
194     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec2_B));
195     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec3_B));
196     PetscCall(MatCreateVecs(pc->pmat, &pcis->vec1_global, NULL));
197     PetscCall(PetscMalloc1(pcis->n, &pcis->work_N));
198     /* scaling vector */
199     if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
200       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
201       PetscCall(VecSet(pcis->D, pcis->scaling_factor));
202     }
203 
204     /* Creating the scatter contexts */
205     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_I_local, pcis->vec1_D, (IS)0, &pcis->N_to_D));
206     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_I_global, pcis->vec1_D, (IS)0, &pcis->global_to_D));
207     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_B_local, pcis->vec1_B, (IS)0, &pcis->N_to_B));
208     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_B_global, pcis->vec1_B, (IS)0, &pcis->global_to_B));
209 
210     /* map from boundary to local */
211     PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local, &pcis->BtoNmap));
212   }
213 
214   {
215     PetscInt sn;
216 
217     PetscCall(VecGetSize(pcis->D, &sn));
218     if (sn == pcis->n) {
219       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
220       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
221       PetscCall(VecDestroy(&pcis->D));
222       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
223       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
224     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
225   }
226 
227   /*
228     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
229     is such that interior nodes come first than the interface ones, we have
230 
231         [ A_II | A_IB ]
232     A = [------+------]
233         [ A_BI | A_BB ]
234   */
235   if (computematrices) {
236     PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
237     PetscInt  bs, ibs;
238 
239     reuse = MAT_INITIAL_MATRIX;
240     if (pcis->reusesubmatrices && pc->setupcalled) {
241       if (pc->flag == SAME_NONZERO_PATTERN) {
242         reuse = MAT_REUSE_MATRIX;
243       } else {
244         reuse = MAT_INITIAL_MATRIX;
245       }
246     }
247     if (reuse == MAT_INITIAL_MATRIX) {
248       PetscCall(MatDestroy(&pcis->A_II));
249       PetscCall(MatDestroy(&pcis->pA_II));
250       PetscCall(MatDestroy(&pcis->A_IB));
251       PetscCall(MatDestroy(&pcis->A_BI));
252       PetscCall(MatDestroy(&pcis->A_BB));
253     }
254 
255     PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping, &ibs));
256     PetscCall(MatGetBlockSize(matis->A, &bs));
257     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->pA_II));
258     if (amat) {
259       Mat_IS *amatis = (Mat_IS *)pc->mat->data;
260       PetscCall(MatCreateSubMatrix(amatis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->A_II));
261     } else {
262       PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
263       PetscCall(MatDestroy(&pcis->A_II));
264       pcis->A_II = pcis->pA_II;
265     }
266     PetscCall(MatSetBlockSize(pcis->A_II, bs == ibs ? bs : 1));
267     PetscCall(MatSetBlockSize(pcis->pA_II, bs == ibs ? bs : 1));
268     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_B_local, reuse, &pcis->A_BB));
269     PetscCall(PetscObjectTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
270     if (!issbaij) {
271       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
272       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
273     } else {
274       Mat newmat;
275 
276       PetscCall(MatConvert(matis->A, MATSEQBAIJ, MAT_INITIAL_MATRIX, &newmat));
277       PetscCall(MatCreateSubMatrix(newmat, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
278       PetscCall(MatCreateSubMatrix(newmat, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
279       PetscCall(MatDestroy(&newmat));
280     }
281     PetscCall(MatSetBlockSize(pcis->A_BB, bs == ibs ? bs : 1));
282   }
283 
284   /* Creating scaling vector D */
285   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_is_use_stiffness_scaling", &pcis->use_stiffness_scaling, NULL));
286   if (pcis->use_stiffness_scaling) {
287     PetscScalar *a;
288     PetscInt     i, n;
289 
290     if (pcis->A_BB) {
291       PetscCall(MatGetDiagonal(pcis->A_BB, pcis->D));
292     } else {
293       PetscCall(MatGetDiagonal(matis->A, pcis->vec1_N));
294       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
295       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
296     }
297     PetscCall(VecAbs(pcis->D));
298     PetscCall(VecGetLocalSize(pcis->D, &n));
299     PetscCall(VecGetArray(pcis->D, &a));
300     for (i = 0; i < n; i++)
301       if (PetscAbsScalar(a[i]) < PETSC_SMALL) a[i] = 1.0;
302     PetscCall(VecRestoreArray(pcis->D, &a));
303   }
304   PetscCall(VecSet(pcis->vec1_global, 0.0));
305   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
306   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
307   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
308   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
309   PetscCall(VecPointwiseDivide(pcis->D, pcis->D, pcis->vec1_B));
310   /* See historical note 01, at the bottom of this file. */
311 
312   /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
313   if (computesolvers) {
314     PC pc_ctx;
315 
316     pcis->pure_neumann = matis->pure_neumann;
317     /* Dirichlet */
318     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_D));
319     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D, pc->erroriffailure));
320     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D, (PetscObject)pc, 1));
321     PetscCall(KSPSetOperators(pcis->ksp_D, pcis->A_II, pcis->A_II));
322     PetscCall(KSPSetOptionsPrefix(pcis->ksp_D, "is_localD_"));
323     PetscCall(KSPGetPC(pcis->ksp_D, &pc_ctx));
324     PetscCall(PCSetType(pc_ctx, PCLU));
325     PetscCall(KSPSetType(pcis->ksp_D, KSPPREONLY));
326     PetscCall(KSPSetFromOptions(pcis->ksp_D));
327     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
328     PetscCall(KSPSetUp(pcis->ksp_D));
329     /* Neumann */
330     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_N));
331     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N, pc->erroriffailure));
332     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N, (PetscObject)pc, 1));
333     PetscCall(KSPSetOperators(pcis->ksp_N, matis->A, matis->A));
334     PetscCall(KSPSetOptionsPrefix(pcis->ksp_N, "is_localN_"));
335     PetscCall(KSPGetPC(pcis->ksp_N, &pc_ctx));
336     PetscCall(PCSetType(pc_ctx, PCLU));
337     PetscCall(KSPSetType(pcis->ksp_N, KSPPREONLY));
338     PetscCall(KSPSetFromOptions(pcis->ksp_N));
339     {
340       PetscBool damp_fixed = PETSC_FALSE, remove_nullspace_fixed = PETSC_FALSE, set_damping_factor_floating = PETSC_FALSE, not_damp_floating = PETSC_FALSE, not_remove_nullspace_floating = PETSC_FALSE;
341       PetscReal fixed_factor, floating_factor;
342 
343       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &fixed_factor, &damp_fixed));
344       if (!damp_fixed) fixed_factor = 0.0;
345       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &damp_fixed, NULL));
346 
347       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_remove_nullspace_fixed", &remove_nullspace_fixed, NULL));
348 
349       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &floating_factor, &set_damping_factor_floating));
350       if (!set_damping_factor_floating) floating_factor = 0.0;
351       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &set_damping_factor_floating, NULL));
352       if (!set_damping_factor_floating) floating_factor = 1.e-12;
353 
354       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_damp_floating", &not_damp_floating, NULL));
355 
356       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_remove_nullspace_floating", &not_remove_nullspace_floating, NULL));
357 
358       if (pcis->pure_neumann) { /* floating subdomain */
359         if (!(not_damp_floating)) {
360           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
361           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
362         }
363         if (!(not_remove_nullspace_floating)) {
364           MatNullSpace nullsp;
365           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
366           PetscCall(MatSetNullSpace(matis->A, nullsp));
367           PetscCall(MatNullSpaceDestroy(&nullsp));
368         }
369       } else { /* fixed subdomain */
370         if (damp_fixed) {
371           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
372           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
373         }
374         if (remove_nullspace_fixed) {
375           MatNullSpace nullsp;
376           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
377           PetscCall(MatSetNullSpace(matis->A, nullsp));
378           PetscCall(MatNullSpaceDestroy(&nullsp));
379         }
380       }
381     }
382     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
383     PetscCall(KSPSetUp(pcis->ksp_N));
384   }
385   PetscFunctionReturn(0);
386 }
387 
388 /* -------------------------------------------------------------------------- */
389 /*
390    PCISDestroy -
391 */
392 PetscErrorCode PCISDestroy(PC pc) {
393   PC_IS *pcis;
394 
395   PetscFunctionBegin;
396   if (!pc) PetscFunctionReturn(0);
397   pcis = (PC_IS *)(pc->data);
398   PetscCall(ISDestroy(&pcis->is_B_local));
399   PetscCall(ISDestroy(&pcis->is_I_local));
400   PetscCall(ISDestroy(&pcis->is_B_global));
401   PetscCall(ISDestroy(&pcis->is_I_global));
402   PetscCall(MatDestroy(&pcis->A_II));
403   PetscCall(MatDestroy(&pcis->pA_II));
404   PetscCall(MatDestroy(&pcis->A_IB));
405   PetscCall(MatDestroy(&pcis->A_BI));
406   PetscCall(MatDestroy(&pcis->A_BB));
407   PetscCall(VecDestroy(&pcis->D));
408   PetscCall(KSPDestroy(&pcis->ksp_N));
409   PetscCall(KSPDestroy(&pcis->ksp_D));
410   PetscCall(VecDestroy(&pcis->vec1_N));
411   PetscCall(VecDestroy(&pcis->vec2_N));
412   PetscCall(VecDestroy(&pcis->vec1_D));
413   PetscCall(VecDestroy(&pcis->vec2_D));
414   PetscCall(VecDestroy(&pcis->vec3_D));
415   PetscCall(VecDestroy(&pcis->vec4_D));
416   PetscCall(VecDestroy(&pcis->vec1_B));
417   PetscCall(VecDestroy(&pcis->vec2_B));
418   PetscCall(VecDestroy(&pcis->vec3_B));
419   PetscCall(VecDestroy(&pcis->vec1_global));
420   PetscCall(VecScatterDestroy(&pcis->global_to_D));
421   PetscCall(VecScatterDestroy(&pcis->N_to_B));
422   PetscCall(VecScatterDestroy(&pcis->N_to_D));
423   PetscCall(VecScatterDestroy(&pcis->global_to_B));
424   PetscCall(PetscFree(pcis->work_N));
425   if (pcis->n_neigh > -1) { PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared))); }
426   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
427   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
428   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", NULL));
429   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", NULL));
430   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", NULL));
431   PetscFunctionReturn(0);
432 }
433 
434 /* -------------------------------------------------------------------------- */
435 /*
436    PCISCreate -
437 */
438 PetscErrorCode PCISCreate(PC pc) {
439   PC_IS *pcis = (PC_IS *)(pc->data);
440 
441   PetscFunctionBegin;
442   if (!pcis) {
443     PetscCall(PetscNewLog(pc, &pcis));
444     pc->data = pcis;
445   }
446   pcis->n_neigh          = -1;
447   pcis->scaling_factor   = 1.0;
448   pcis->reusesubmatrices = PETSC_TRUE;
449   /* composing functions */
450   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", PCISSetUseStiffnessScaling_IS));
451   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", PCISSetSubdomainScalingFactor_IS));
452   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", PCISSetSubdomainDiagonalScaling_IS));
453   PetscFunctionReturn(0);
454 }
455 
456 /* -------------------------------------------------------------------------- */
457 /*
458    PCISApplySchur -
459 
460    Input parameters:
461 .  pc - preconditioner context
462 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
463 
464    Output parameters:
465 .  vec1_B - result of Schur complement applied to chunk
466 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
467 .  vec1_D - garbage (used as work space)
468 .  vec2_D - garbage (used as work space)
469 
470 */
471 PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D) {
472   PC_IS *pcis = (PC_IS *)(pc->data);
473 
474   PetscFunctionBegin;
475   if (!vec2_B) vec2_B = v;
476 
477   PetscCall(MatMult(pcis->A_BB, v, vec1_B));
478   PetscCall(MatMult(pcis->A_IB, v, vec1_D));
479   PetscCall(KSPSolve(pcis->ksp_D, vec1_D, vec2_D));
480   PetscCall(KSPCheckSolve(pcis->ksp_D, pc, vec2_D));
481   PetscCall(MatMult(pcis->A_BI, vec2_D, vec2_B));
482   PetscCall(VecAXPY(vec1_B, -1.0, vec2_B));
483   PetscFunctionReturn(0);
484 }
485 
486 /* -------------------------------------------------------------------------- */
487 /*
488    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
489    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
490    mode.
491 
492    Input parameters:
493 .  pc - preconditioner context
494 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
495 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
496 
497    Output parameter:
498 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
499 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
500 
501    Notes:
502    The entries in the array that do not correspond to interface nodes remain unaltered.
503 */
504 PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc) {
505   PetscInt        i;
506   const PetscInt *idex;
507   PetscScalar    *array_B;
508   PC_IS          *pcis = (PC_IS *)(pc->data);
509 
510   PetscFunctionBegin;
511   PetscCall(VecGetArray(v_B, &array_B));
512   PetscCall(ISGetIndices(pcis->is_B_local, &idex));
513 
514   if (smode == SCATTER_FORWARD) {
515     if (imode == INSERT_VALUES) {
516       for (i = 0; i < pcis->n_B; i++) array_B[i] = array_N[idex[i]];
517     } else { /* ADD_VALUES */
518       for (i = 0; i < pcis->n_B; i++) array_B[i] += array_N[idex[i]];
519     }
520   } else { /* SCATTER_REVERSE */
521     if (imode == INSERT_VALUES) {
522       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] = array_B[i];
523     } else { /* ADD_VALUES */
524       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] += array_B[i];
525     }
526   }
527   PetscCall(ISRestoreIndices(pcis->is_B_local, &idex));
528   PetscCall(VecRestoreArray(v_B, &array_B));
529   PetscFunctionReturn(0);
530 }
531 
532 /* -------------------------------------------------------------------------- */
533 /*
534    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
535    More precisely, solves the problem:
536                                         [ A_II  A_IB ] [ . ]   [ 0 ]
537                                         [            ] [   ] = [   ]
538                                         [ A_BI  A_BB ] [ x ]   [ b ]
539 
540    Input parameters:
541 .  pc - preconditioner context
542 .  b - vector of local interface nodes (including ghosts)
543 
544    Output parameters:
545 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
546        complement to b
547 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
548 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
549 
550 */
551 PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N) {
552   PC_IS *pcis = (PC_IS *)(pc->data);
553 
554   PetscFunctionBegin;
555   /*
556     Neumann solvers.
557     Applying the inverse of the local Schur complement, i.e, solving a Neumann
558     Problem with zero at the interior nodes of the RHS and extracting the interface
559     part of the solution. inverse Schur complement is applied to b and the result
560     is stored in x.
561   */
562   /* Setting the RHS vec1_N */
563   PetscCall(VecSet(vec1_N, 0.0));
564   PetscCall(VecScatterBegin(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
565   PetscCall(VecScatterEnd(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
566   /* Checking for consistency of the RHS */
567   {
568     PetscBool flg = PETSC_FALSE;
569     PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_is_check_consistency", &flg, NULL));
570     if (flg) {
571       PetscScalar average;
572       PetscViewer viewer;
573       PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));
574 
575       PetscCall(VecSum(vec1_N, &average));
576       average = average / ((PetscReal)pcis->n);
577       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
578       if (pcis->pure_neumann) {
579         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is floating. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
580       } else {
581         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is fixed.    Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
582       }
583       PetscCall(PetscViewerFlush(viewer));
584       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
585     }
586   }
587   /* Solving the system for vec2_N */
588   PetscCall(KSPSolve(pcis->ksp_N, vec1_N, vec2_N));
589   PetscCall(KSPCheckSolve(pcis->ksp_N, pc, vec2_N));
590   /* Extracting the local interface vector out of the solution */
591   PetscCall(VecScatterBegin(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
592   PetscCall(VecScatterEnd(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
593   PetscFunctionReturn(0);
594 }
595