xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision 2e65eb737b5b07432530db55f6b2a145ebc548b2)
1 
2 #include <../src/ksp/pc/impls/is/pcis.h> /*I "petscpc.h" I*/
3 
4 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
5 {
6   PC_IS *pcis = (PC_IS*)pc->data;
7 
8   PetscFunctionBegin;
9   pcis->use_stiffness_scaling = use;
10   PetscFunctionReturn(0);
11 }
12 
13 /*@
14  PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
15                               local matrices' diagonal.
16 
17    Not collective
18 
19    Input Parameters:
20 +  pc - the preconditioning context
21 -  use - whether or not pcis use matrix diagonal to build partition of unity.
22 
23    Level: intermediate
24 
25    Notes:
26 
27 .seealso: PCBDDC
28 @*/
29 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
30 {
31   PetscFunctionBegin;
32   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
33   PetscValidLogicalCollectiveBool(pc,use,2);
34   PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));
35   PetscFunctionReturn(0);
36 }
37 
38 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
39 {
40   PC_IS          *pcis = (PC_IS*)pc->data;
41 
42   PetscFunctionBegin;
43   PetscCall(PetscObjectReference((PetscObject)scaling_factors));
44   PetscCall(VecDestroy(&pcis->D));
45   pcis->D = scaling_factors;
46   if (pc->setupcalled) {
47     PetscInt sn;
48 
49     PetscCall(VecGetSize(pcis->D,&sn));
50     if (sn == pcis->n) {
51       PetscCall(VecScatterBegin(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
52       PetscCall(VecScatterEnd(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
53       PetscCall(VecDestroy(&pcis->D));
54       PetscCall(VecDuplicate(pcis->vec1_B,&pcis->D));
55       PetscCall(VecCopy(pcis->vec1_B,pcis->D));
56     } else PetscCheck(sn == pcis->n_B,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT,pcis->n_B,pcis->n,sn);
57   }
58   PetscFunctionReturn(0);
59 }
60 
61 /*@
62  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
63 
64    Not collective
65 
66    Input Parameters:
67 +  pc - the preconditioning context
68 -  scaling_factors - scaling factors for the subdomain
69 
70    Level: intermediate
71 
72    Notes:
73    Intended to use with jumping coefficients cases.
74 
75 .seealso: PCBDDC
76 @*/
77 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
78 {
79   PetscFunctionBegin;
80   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
81   PetscValidHeaderSpecific(scaling_factors,VEC_CLASSID,2);
82   PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));
83   PetscFunctionReturn(0);
84 }
85 
86 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
87 {
88   PC_IS *pcis = (PC_IS*)pc->data;
89 
90   PetscFunctionBegin;
91   pcis->scaling_factor = scal;
92   if (pcis->D) {
93 
94     PetscCall(VecSet(pcis->D,pcis->scaling_factor));
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 /*@
100  PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
101 
102    Not collective
103 
104    Input Parameters:
105 +  pc - the preconditioning context
106 -  scal - scaling factor for the subdomain
107 
108    Level: intermediate
109 
110    Notes:
111    Intended to use with jumping coefficients cases.
112 
113 .seealso: PCBDDC
114 @*/
115 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
116 {
117   PetscFunctionBegin;
118   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
119   PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));
120   PetscFunctionReturn(0);
121 }
122 
123 /* -------------------------------------------------------------------------- */
124 /*
125    PCISSetUp -
126 */
127 PetscErrorCode  PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
128 {
129   PC_IS          *pcis  = (PC_IS*)(pc->data);
130   Mat_IS         *matis;
131   MatReuse       reuse;
132   PetscBool      flg,issbaij;
133 
134   PetscFunctionBegin;
135   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg));
136   PetscCheck(flg,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Requires preconditioning matrix of type MATIS");
137   matis = (Mat_IS*)pc->pmat->data;
138   if (pc->useAmat) {
139     PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat,MATIS,&flg));
140     PetscCheck(flg,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Requires linear system matrix of type MATIS");
141   }
142 
143   /* first time creation, get info on substructuring */
144   if (!pc->setupcalled) {
145     PetscInt    n_I;
146     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
147     PetscBT     bt;
148     PetscInt    i,j;
149 
150     /* get info on mapping */
151     PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
152     PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
153     pcis->mapping = matis->rmapping;
154     PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping,&pcis->n));
155     PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared)));
156 
157     /* Identifying interior and interface nodes, in local numbering */
158     PetscCall(PetscBTCreate(pcis->n,&bt));
159     for (i=0;i<pcis->n_neigh;i++)
160       for (j=0;j<pcis->n_shared[i];j++) {
161         PetscCall(PetscBTSet(bt,pcis->shared[i][j]));
162       }
163 
164     /* Creating local and global index sets for interior and inteface nodes. */
165     PetscCall(PetscMalloc1(pcis->n,&idx_I_local));
166     PetscCall(PetscMalloc1(pcis->n,&idx_B_local));
167     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
168       if (!PetscBTLookup(bt,i)) {
169         idx_I_local[n_I] = i;
170         n_I++;
171       } else {
172         idx_B_local[pcis->n_B] = i;
173         pcis->n_B++;
174       }
175     }
176 
177     /* Getting the global numbering */
178     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
179     idx_I_global = idx_B_local + pcis->n_B;
180     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping,pcis->n_B,idx_B_local,idx_B_global));
181     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping,n_I,idx_I_local,idx_I_global));
182 
183     /* Creating the index sets */
184     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local));
185     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc),pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global));
186     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local));
187     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc),n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global));
188 
189     /* Freeing memory */
190     PetscCall(PetscFree(idx_B_local));
191     PetscCall(PetscFree(idx_I_local));
192     PetscCall(PetscBTDestroy(&bt));
193 
194     /* Creating work vectors and arrays */
195     PetscCall(VecDuplicate(matis->x,&pcis->vec1_N));
196     PetscCall(VecDuplicate(pcis->vec1_N,&pcis->vec2_N));
197     PetscCall(VecCreate(PETSC_COMM_SELF,&pcis->vec1_D));
198     PetscCall(VecSetSizes(pcis->vec1_D,pcis->n-pcis->n_B,PETSC_DECIDE));
199     PetscCall(VecSetType(pcis->vec1_D,((PetscObject)pcis->vec1_N)->type_name));
200     PetscCall(VecDuplicate(pcis->vec1_D,&pcis->vec2_D));
201     PetscCall(VecDuplicate(pcis->vec1_D,&pcis->vec3_D));
202     PetscCall(VecDuplicate(pcis->vec1_D,&pcis->vec4_D));
203     PetscCall(VecCreate(PETSC_COMM_SELF,&pcis->vec1_B));
204     PetscCall(VecSetSizes(pcis->vec1_B,pcis->n_B,PETSC_DECIDE));
205     PetscCall(VecSetType(pcis->vec1_B,((PetscObject)pcis->vec1_N)->type_name));
206     PetscCall(VecDuplicate(pcis->vec1_B,&pcis->vec2_B));
207     PetscCall(VecDuplicate(pcis->vec1_B,&pcis->vec3_B));
208     PetscCall(MatCreateVecs(pc->pmat,&pcis->vec1_global,NULL));
209     PetscCall(PetscMalloc1(pcis->n,&pcis->work_N));
210     /* scaling vector */
211     if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
212       PetscCall(VecDuplicate(pcis->vec1_B,&pcis->D));
213       PetscCall(VecSet(pcis->D,pcis->scaling_factor));
214     }
215 
216     /* Creating the scatter contexts */
217     PetscCall(VecScatterCreate(pcis->vec1_N,pcis->is_I_local,pcis->vec1_D,(IS)0,&pcis->N_to_D));
218     PetscCall(VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D));
219     PetscCall(VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B));
220     PetscCall(VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B));
221 
222     /* map from boundary to local */
223     PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&pcis->BtoNmap));
224   }
225 
226   {
227     PetscInt sn;
228 
229     PetscCall(VecGetSize(pcis->D,&sn));
230     if (sn == pcis->n) {
231       PetscCall(VecScatterBegin(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
232       PetscCall(VecScatterEnd(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
233       PetscCall(VecDestroy(&pcis->D));
234       PetscCall(VecDuplicate(pcis->vec1_B,&pcis->D));
235       PetscCall(VecCopy(pcis->vec1_B,pcis->D));
236     } else PetscCheck(sn == pcis->n_B,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT,pcis->n_B,pcis->n,sn);
237   }
238 
239   /*
240     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
241     is such that interior nodes come first than the interface ones, we have
242 
243         [ A_II | A_IB ]
244     A = [------+------]
245         [ A_BI | A_BB ]
246   */
247   if (computematrices) {
248     PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
249     PetscInt  bs,ibs;
250 
251     reuse = MAT_INITIAL_MATRIX;
252     if (pcis->reusesubmatrices && pc->setupcalled) {
253       if (pc->flag == SAME_NONZERO_PATTERN) {
254         reuse = MAT_REUSE_MATRIX;
255       } else {
256         reuse = MAT_INITIAL_MATRIX;
257       }
258     }
259     if (reuse == MAT_INITIAL_MATRIX) {
260       PetscCall(MatDestroy(&pcis->A_II));
261       PetscCall(MatDestroy(&pcis->pA_II));
262       PetscCall(MatDestroy(&pcis->A_IB));
263       PetscCall(MatDestroy(&pcis->A_BI));
264       PetscCall(MatDestroy(&pcis->A_BB));
265     }
266 
267     PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping,&ibs));
268     PetscCall(MatGetBlockSize(matis->A,&bs));
269     PetscCall(MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->pA_II));
270     if (amat) {
271       Mat_IS *amatis = (Mat_IS*)pc->mat->data;
272       PetscCall(MatCreateSubMatrix(amatis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->A_II));
273     } else {
274       PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
275       PetscCall(MatDestroy(&pcis->A_II));
276       pcis->A_II = pcis->pA_II;
277     }
278     PetscCall(MatSetBlockSize(pcis->A_II,bs == ibs ? bs : 1));
279     PetscCall(MatSetBlockSize(pcis->pA_II,bs == ibs ? bs : 1));
280     PetscCall(MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,reuse,&pcis->A_BB));
281     PetscCall(PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij));
282     if (!issbaij) {
283       PetscCall(MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB));
284       PetscCall(MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI));
285     } else {
286       Mat newmat;
287 
288       PetscCall(MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat));
289       PetscCall(MatCreateSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB));
290       PetscCall(MatCreateSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI));
291       PetscCall(MatDestroy(&newmat));
292     }
293     PetscCall(MatSetBlockSize(pcis->A_BB,bs == ibs ? bs : 1));
294   }
295 
296   /* Creating scaling vector D */
297   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options,((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL));
298   if (pcis->use_stiffness_scaling) {
299     PetscScalar *a;
300     PetscInt    i,n;
301 
302     if (pcis->A_BB) {
303       PetscCall(MatGetDiagonal(pcis->A_BB,pcis->D));
304     } else {
305       PetscCall(MatGetDiagonal(matis->A,pcis->vec1_N));
306       PetscCall(VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD));
307       PetscCall(VecScatterEnd(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD));
308     }
309     PetscCall(VecAbs(pcis->D));
310     PetscCall(VecGetLocalSize(pcis->D,&n));
311     PetscCall(VecGetArray(pcis->D,&a));
312     for (i=0;i<n;i++) if (PetscAbsScalar(a[i])<PETSC_SMALL) a[i] = 1.0;
313     PetscCall(VecRestoreArray(pcis->D,&a));
314   }
315   PetscCall(VecSet(pcis->vec1_global,0.0));
316   PetscCall(VecScatterBegin(pcis->global_to_B,pcis->D,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE));
317   PetscCall(VecScatterEnd(pcis->global_to_B,pcis->D,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE));
318   PetscCall(VecScatterBegin(pcis->global_to_B,pcis->vec1_global,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
319   PetscCall(VecScatterEnd(pcis->global_to_B,pcis->vec1_global,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD));
320   PetscCall(VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B));
321   /* See historical note 01, at the bottom of this file. */
322 
323   /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
324   if (computesolvers) {
325     PC pc_ctx;
326 
327     pcis->pure_neumann = matis->pure_neumann;
328     /* Dirichlet */
329     PetscCall(KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D));
330     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D,pc->erroriffailure));
331     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1));
332     PetscCall(KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II));
333     PetscCall(KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_"));
334     PetscCall(KSPGetPC(pcis->ksp_D,&pc_ctx));
335     PetscCall(PCSetType(pc_ctx,PCLU));
336     PetscCall(KSPSetType(pcis->ksp_D,KSPPREONLY));
337     PetscCall(KSPSetFromOptions(pcis->ksp_D));
338     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
339     PetscCall(KSPSetUp(pcis->ksp_D));
340     /* Neumann */
341     PetscCall(KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N));
342     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N,pc->erroriffailure));
343     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1));
344     PetscCall(KSPSetOperators(pcis->ksp_N,matis->A,matis->A));
345     PetscCall(KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_"));
346     PetscCall(KSPGetPC(pcis->ksp_N,&pc_ctx));
347     PetscCall(PCSetType(pc_ctx,PCLU));
348     PetscCall(KSPSetType(pcis->ksp_N,KSPPREONLY));
349     PetscCall(KSPSetFromOptions(pcis->ksp_N));
350     {
351       PetscBool damp_fixed                    = PETSC_FALSE,
352                 remove_nullspace_fixed        = PETSC_FALSE,
353                 set_damping_factor_floating   = PETSC_FALSE,
354                 not_damp_floating             = PETSC_FALSE,
355                 not_remove_nullspace_floating = PETSC_FALSE;
356       PetscReal fixed_factor,
357                 floating_factor;
358 
359       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed));
360       if (!damp_fixed) fixed_factor = 0.0;
361       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL));
362 
363       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL));
364 
365       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&floating_factor,&set_damping_factor_floating));
366       if (!set_damping_factor_floating) floating_factor = 0.0;
367       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL));
368       if (!set_damping_factor_floating) floating_factor = 1.e-12;
369 
370       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating,NULL));
371 
372       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating,NULL));
373 
374       if (pcis->pure_neumann) {  /* floating subdomain */
375         if (!(not_damp_floating)) {
376           PetscCall(PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO));
377           PetscCall(PCFactorSetShiftAmount(pc_ctx,floating_factor));
378         }
379         if (!(not_remove_nullspace_floating)) {
380           MatNullSpace nullsp;
381           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp));
382           PetscCall(MatSetNullSpace(matis->A,nullsp));
383           PetscCall(MatNullSpaceDestroy(&nullsp));
384         }
385       } else {  /* fixed subdomain */
386         if (damp_fixed) {
387           PetscCall(PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO));
388           PetscCall(PCFactorSetShiftAmount(pc_ctx,floating_factor));
389         }
390         if (remove_nullspace_fixed) {
391           MatNullSpace nullsp;
392           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp));
393           PetscCall(MatSetNullSpace(matis->A,nullsp));
394           PetscCall(MatNullSpaceDestroy(&nullsp));
395         }
396       }
397     }
398     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
399     PetscCall(KSPSetUp(pcis->ksp_N));
400   }
401   PetscFunctionReturn(0);
402 }
403 
404 /* -------------------------------------------------------------------------- */
405 /*
406    PCISDestroy -
407 */
408 PetscErrorCode  PCISDestroy(PC pc)
409 {
410   PC_IS          *pcis = (PC_IS*)(pc->data);
411 
412   PetscFunctionBegin;
413   PetscCall(ISDestroy(&pcis->is_B_local));
414   PetscCall(ISDestroy(&pcis->is_I_local));
415   PetscCall(ISDestroy(&pcis->is_B_global));
416   PetscCall(ISDestroy(&pcis->is_I_global));
417   PetscCall(MatDestroy(&pcis->A_II));
418   PetscCall(MatDestroy(&pcis->pA_II));
419   PetscCall(MatDestroy(&pcis->A_IB));
420   PetscCall(MatDestroy(&pcis->A_BI));
421   PetscCall(MatDestroy(&pcis->A_BB));
422   PetscCall(VecDestroy(&pcis->D));
423   PetscCall(KSPDestroy(&pcis->ksp_N));
424   PetscCall(KSPDestroy(&pcis->ksp_D));
425   PetscCall(VecDestroy(&pcis->vec1_N));
426   PetscCall(VecDestroy(&pcis->vec2_N));
427   PetscCall(VecDestroy(&pcis->vec1_D));
428   PetscCall(VecDestroy(&pcis->vec2_D));
429   PetscCall(VecDestroy(&pcis->vec3_D));
430   PetscCall(VecDestroy(&pcis->vec4_D));
431   PetscCall(VecDestroy(&pcis->vec1_B));
432   PetscCall(VecDestroy(&pcis->vec2_B));
433   PetscCall(VecDestroy(&pcis->vec3_B));
434   PetscCall(VecDestroy(&pcis->vec1_global));
435   PetscCall(VecScatterDestroy(&pcis->global_to_D));
436   PetscCall(VecScatterDestroy(&pcis->N_to_B));
437   PetscCall(VecScatterDestroy(&pcis->N_to_D));
438   PetscCall(VecScatterDestroy(&pcis->global_to_B));
439   PetscCall(PetscFree(pcis->work_N));
440   if (pcis->n_neigh > -1) {
441     PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared)));
442   }
443   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
444   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
445   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL));
446   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL));
447   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL));
448   PetscFunctionReturn(0);
449 }
450 
451 /* -------------------------------------------------------------------------- */
452 /*
453    PCISCreate -
454 */
455 PetscErrorCode  PCISCreate(PC pc)
456 {
457   PC_IS          *pcis = (PC_IS*)(pc->data);
458 
459   PetscFunctionBegin;
460   pcis->n_neigh          = -1;
461   pcis->scaling_factor   = 1.0;
462   pcis->reusesubmatrices = PETSC_TRUE;
463   /* composing functions */
464   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",PCISSetUseStiffnessScaling_IS));
465   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",PCISSetSubdomainScalingFactor_IS));
466   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",PCISSetSubdomainDiagonalScaling_IS));
467   PetscFunctionReturn(0);
468 }
469 
470 /* -------------------------------------------------------------------------- */
471 /*
472    PCISApplySchur -
473 
474    Input parameters:
475 .  pc - preconditioner context
476 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
477 
478    Output parameters:
479 .  vec1_B - result of Schur complement applied to chunk
480 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
481 .  vec1_D - garbage (used as work space)
482 .  vec2_D - garbage (used as work space)
483 
484 */
485 PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
486 {
487   PC_IS          *pcis = (PC_IS*)(pc->data);
488 
489   PetscFunctionBegin;
490   if (!vec2_B) vec2_B = v;
491 
492   PetscCall(MatMult(pcis->A_BB,v,vec1_B));
493   PetscCall(MatMult(pcis->A_IB,v,vec1_D));
494   PetscCall(KSPSolve(pcis->ksp_D,vec1_D,vec2_D));
495   PetscCall(KSPCheckSolve(pcis->ksp_D,pc,vec2_D));
496   PetscCall(MatMult(pcis->A_BI,vec2_D,vec2_B));
497   PetscCall(VecAXPY(vec1_B,-1.0,vec2_B));
498   PetscFunctionReturn(0);
499 }
500 
501 /* -------------------------------------------------------------------------- */
502 /*
503    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
504    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
505    mode.
506 
507    Input parameters:
508 .  pc - preconditioner context
509 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
510 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
511 
512    Output parameter:
513 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
514 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
515 
516    Notes:
517    The entries in the array that do not correspond to interface nodes remain unaltered.
518 */
519 PetscErrorCode  PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
520 {
521   PetscInt       i;
522   const PetscInt *idex;
523   PetscScalar    *array_B;
524   PC_IS          *pcis = (PC_IS*)(pc->data);
525 
526   PetscFunctionBegin;
527   PetscCall(VecGetArray(v_B,&array_B));
528   PetscCall(ISGetIndices(pcis->is_B_local,&idex));
529 
530   if (smode == SCATTER_FORWARD) {
531     if (imode == INSERT_VALUES) {
532       for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
533     } else {  /* ADD_VALUES */
534       for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
535     }
536   } else {  /* SCATTER_REVERSE */
537     if (imode == INSERT_VALUES) {
538       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
539     } else {  /* ADD_VALUES */
540       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
541     }
542   }
543   PetscCall(ISRestoreIndices(pcis->is_B_local,&idex));
544   PetscCall(VecRestoreArray(v_B,&array_B));
545   PetscFunctionReturn(0);
546 }
547 
548 /* -------------------------------------------------------------------------- */
549 /*
550    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
551    More precisely, solves the problem:
552                                         [ A_II  A_IB ] [ . ]   [ 0 ]
553                                         [            ] [   ] = [   ]
554                                         [ A_BI  A_BB ] [ x ]   [ b ]
555 
556    Input parameters:
557 .  pc - preconditioner context
558 .  b - vector of local interface nodes (including ghosts)
559 
560    Output parameters:
561 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
562        complement to b
563 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
564 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
565 
566 */
567 PetscErrorCode  PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
568 {
569   PC_IS          *pcis = (PC_IS*)(pc->data);
570 
571   PetscFunctionBegin;
572   /*
573     Neumann solvers.
574     Applying the inverse of the local Schur complement, i.e, solving a Neumann
575     Problem with zero at the interior nodes of the RHS and extracting the interface
576     part of the solution. inverse Schur complement is applied to b and the result
577     is stored in x.
578   */
579   /* Setting the RHS vec1_N */
580   PetscCall(VecSet(vec1_N,0.0));
581   PetscCall(VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE));
582   PetscCall(VecScatterEnd  (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE));
583   /* Checking for consistency of the RHS */
584   {
585     PetscBool flg = PETSC_FALSE;
586     PetscCall(PetscOptionsGetBool(NULL,NULL,"-pc_is_check_consistency",&flg,NULL));
587     if (flg) {
588       PetscScalar average;
589       PetscViewer viewer;
590       PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer));
591 
592       PetscCall(VecSum(vec1_N,&average));
593       average = average / ((PetscReal)pcis->n);
594       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
595       if (pcis->pure_neumann) {
596         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,(double)PetscAbsScalar(average)));
597       } else {
598         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed.    Average = % 1.14e\n",PetscGlobalRank,(double)PetscAbsScalar(average)));
599       }
600       PetscCall(PetscViewerFlush(viewer));
601       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
602     }
603   }
604   /* Solving the system for vec2_N */
605   PetscCall(KSPSolve(pcis->ksp_N,vec1_N,vec2_N));
606   PetscCall(KSPCheckSolve(pcis->ksp_N,pc,vec2_N));
607   /* Extracting the local interface vector out of the solution */
608   PetscCall(VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD));
609   PetscCall(VecScatterEnd  (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD));
610   PetscFunctionReturn(0);
611 }
612