xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision 047240e14af00aad1ef65e96f6fface8924f7f7e)
1 
2 #include "../src/ksp/pc/impls/is/pcis.h" /*I "petscpc.h" I*/
3 
4 EXTERN_C_BEGIN
5 #undef __FUNCT__
6 #define __FUNCT__ "PCISSetUseStiffnessScaling_IS"
7 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
8 {
9   PC_IS *pcis = (PC_IS*)pc->data;
10 
11   PetscFunctionBegin;
12   pcis->use_stiffness_scaling = use;
13   PetscFunctionReturn(0);
14 }
15 EXTERN_C_END
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "PCISSetUseStiffnessScaling"
19 /*@
20  PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
21                               local matrices' diagonal.
22 
23    Not collective
24 
25    Input Parameters:
26 +  pc - the preconditioning context
27 -  use - whether or not pcis use matrix diagonal to build partition of unity.
28 
29    Level: intermediate
30 
31    Notes:
32 
33 .seealso: PCBDDC
34 @*/
35 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
36 {
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
41   ierr = PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 EXTERN_C_BEGIN
46 #undef __FUNCT__
47 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling_IS"
48 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
49 {
50   PetscErrorCode ierr;
51   PC_IS          *pcis = (PC_IS*)pc->data;
52 
53   PetscFunctionBegin;
54   ierr    = VecDestroy(&pcis->D);CHKERRQ(ierr);
55   ierr    = PetscObjectReference((PetscObject)scaling_factors);CHKERRQ(ierr);
56   pcis->D = scaling_factors;
57   PetscFunctionReturn(0);
58 }
59 EXTERN_C_END
60 
61 #undef __FUNCT__
62 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling"
63 /*@
64  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
65 
66    Not collective
67 
68    Input Parameters:
69 +  pc - the preconditioning context
70 -  scaling_factors - scaling factors for the subdomain
71 
72    Level: intermediate
73 
74    Notes:
75    Intended to use with jumping coefficients cases.
76 
77 .seealso: PCBDDC
78 @*/
79 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
80 {
81   PetscErrorCode ierr;
82 
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
85   ierr = PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));CHKERRQ(ierr);
86   PetscFunctionReturn(0);
87 }
88 
89 EXTERN_C_BEGIN
90 #undef __FUNCT__
91 #define __FUNCT__ "PCISSetSubdomainScalingFactor_IS"
92 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
93 {
94   PC_IS *pcis = (PC_IS*)pc->data;
95 
96   PetscFunctionBegin;
97   pcis->scaling_factor = scal;
98   PetscFunctionReturn(0);
99 }
100 EXTERN_C_END
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "PCISSetSubdomainScalingFactor"
104 /*@
105  PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
106 
107    Not collective
108 
109    Input Parameters:
110 +  pc - the preconditioning context
111 -  scal - scaling factor for the subdomain
112 
113    Level: intermediate
114 
115    Notes:
116    Intended to use with jumping coefficients cases.
117 
118 .seealso: PCBDDC
119 @*/
120 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
121 {
122   PetscErrorCode ierr;
123 
124   PetscFunctionBegin;
125   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
126   ierr = PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));CHKERRQ(ierr);
127   PetscFunctionReturn(0);
128 }
129 
130 
131 /* -------------------------------------------------------------------------- */
132 /*
133    PCISSetUp -
134 */
135 #undef __FUNCT__
136 #define __FUNCT__ "PCISSetUp"
137 PetscErrorCode  PCISSetUp(PC pc)
138 {
139   PC_IS          *pcis  = (PC_IS*)(pc->data);
140   Mat_IS         *matis = (Mat_IS*)pc->mat->data;
141   PetscInt       i;
142   PetscErrorCode ierr;
143   PetscBool      flg;
144   Vec            counter;
145 
146   PetscFunctionBegin;
147   ierr = PetscObjectTypeCompare((PetscObject)pc->mat,MATIS,&flg);CHKERRQ(ierr);
148   if (!flg) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
149 
150   pcis->pure_neumann = matis->pure_neumann;
151 
152   /*
153     Creating the local vector vec1_N, containing the inverse of the number
154     of subdomains to which each local node (either owned or ghost)
155     pertains. To accomplish that, we scatter local vectors of 1's to
156     a global vector (adding the values); scatter the result back to
157     local vectors and finally invert the result.
158   */
159   ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
160   ierr = MatGetVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
161   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
162   ierr = VecSet(pcis->vec1_N,1.0);CHKERRQ(ierr);
163   ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
164   ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
165   ierr = VecScatterBegin(matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
166   ierr = VecScatterEnd  (matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
167   /*
168     Creating local and global index sets for interior and
169     inteface nodes. Notice that interior nodes have D[i]==1.0.
170   */
171   {
172     PetscInt    n_I;
173     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
174     PetscScalar *array;
175     /* Identifying interior and interface nodes, in local numbering */
176     ierr = VecGetSize(pcis->vec1_N,&pcis->n);CHKERRQ(ierr);
177     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
178     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);CHKERRQ(ierr);
179     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);CHKERRQ(ierr);
180     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
181       if (array[i] == 1.0) {
182         idx_I_local[n_I] = i;
183         n_I++;
184       } else {
185         idx_B_local[pcis->n_B] = i;
186         pcis->n_B++;
187       }
188     }
189     /* Getting the global numbering */
190     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
191     idx_I_global = idx_B_local + pcis->n_B;
192     ierr         = ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
193     ierr         = ISLocalToGlobalMappingApply(matis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);
194 
195     /* Creating the index sets. */
196     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);CHKERRQ(ierr);
197     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);CHKERRQ(ierr);
198     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);CHKERRQ(ierr);
199     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);CHKERRQ(ierr);
200 
201     /* Freeing memory and restoring arrays */
202     ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
203     ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
204     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
205   }
206 
207   /*
208     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
209     is such that interior nodes come first than the interface ones, we have
210 
211     [           |      ]
212     [    A_II   | A_IB ]
213     A = [           |      ]
214     [-----------+------]
215     [    A_BI   | A_BB ]
216   */
217 
218   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
219   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
220   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
221   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
222 
223   /*
224     Creating work vectors and arrays
225   */
226   /* pcis->vec1_N has already been created */
227   ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
228   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
229   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
230   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
231   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
232   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
233   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
234   ierr = MatGetVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
235   ierr = PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);CHKERRQ(ierr);
236 
237   /* Creating the scatter contexts */
238   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
239   ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
240   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);
241 
242   /* Creating scaling "matrix" D */
243   ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,PETSC_NULL);CHKERRQ(ierr);
244   if (!pcis->D) {
245     ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
246     if (!pcis->use_stiffness_scaling) {
247       ierr = VecSet(pcis->D,pcis->scaling_factor);CHKERRQ(ierr);
248     } else {
249       ierr = MatGetDiagonal(matis->A,pcis->vec1_N);CHKERRQ(ierr);
250       ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
251       ierr = VecScatterEnd  (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
252     }
253   }
254   ierr = VecCopy(pcis->D,pcis->vec1_B);CHKERRQ(ierr);
255   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
256   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
257   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
258   ierr = VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
259   ierr = VecScatterEnd  (pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
260   ierr = VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);CHKERRQ(ierr);
261 
262   /* See historical note 01, at the bottom of this file. */
263 
264   /*
265     Creating the KSP contexts for the local Dirichlet and Neumann problems.
266   */
267   {
268     PC pc_ctx;
269     /* Dirichlet */
270     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
271     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr);
272     ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
273     ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
274     ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
275     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
276     ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
277     ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
278     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
279     ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
280     /* Neumann */
281     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
282     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);CHKERRQ(ierr);
283     ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);CHKERRQ(ierr);
284     ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
285     ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
286     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
287     ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
288     ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
289     {
290       PetscBool damp_fixed                    = PETSC_FALSE,
291                 remove_nullspace_fixed        = PETSC_FALSE,
292                 set_damping_factor_floating   = PETSC_FALSE,
293                 not_damp_floating             = PETSC_FALSE,
294                 not_remove_nullspace_floating = PETSC_FALSE;
295       PetscReal fixed_factor,
296                 floating_factor;
297 
298       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
299       if (!damp_fixed) fixed_factor = 0.0;
300       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,PETSC_NULL);CHKERRQ(ierr);
301 
302       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,PETSC_NULL);CHKERRQ(ierr);
303 
304       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
305                               &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
306       if (!set_damping_factor_floating) floating_factor = 0.0;
307       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,PETSC_NULL);CHKERRQ(ierr);
308       if (!set_damping_factor_floating) floating_factor = 1.e-12;
309 
310       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating,PETSC_NULL);CHKERRQ(ierr);
311 
312       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating,PETSC_NULL);CHKERRQ(ierr);
313 
314       if (pcis->pure_neumann) {  /* floating subdomain */
315         if (!(not_damp_floating)) {
316           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
317           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
318         }
319         if (!(not_remove_nullspace_floating)) {
320           MatNullSpace nullsp;
321           ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
322           ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
323           ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
324         }
325       } else {  /* fixed subdomain */
326         if (damp_fixed) {
327           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
328           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
329         }
330         if (remove_nullspace_fixed) {
331           MatNullSpace nullsp;
332           ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
333           ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
334           ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
335         }
336       }
337     }
338     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
339     ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
340   }
341 
342   ierr = ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
343 
344   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
345 
346   ierr = VecDestroy(&counter);CHKERRQ(ierr);
347   PetscFunctionReturn(0);
348 }
349 
350 /* -------------------------------------------------------------------------- */
351 /*
352    PCISDestroy -
353 */
354 #undef __FUNCT__
355 #define __FUNCT__ "PCISDestroy"
356 PetscErrorCode  PCISDestroy(PC pc)
357 {
358   PC_IS          *pcis = (PC_IS*)(pc->data);
359   PetscErrorCode ierr;
360 
361   PetscFunctionBegin;
362   ierr = ISDestroy(&pcis->is_B_local);CHKERRQ(ierr);
363   ierr = ISDestroy(&pcis->is_I_local);CHKERRQ(ierr);
364   ierr = ISDestroy(&pcis->is_B_global);CHKERRQ(ierr);
365   ierr = ISDestroy(&pcis->is_I_global);CHKERRQ(ierr);
366   ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr);
367   ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr);
368   ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr);
369   ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr);
370   ierr = VecDestroy(&pcis->D);CHKERRQ(ierr);
371   ierr = KSPDestroy(&pcis->ksp_N);CHKERRQ(ierr);
372   ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr);
373   ierr = VecDestroy(&pcis->vec1_N);CHKERRQ(ierr);
374   ierr = VecDestroy(&pcis->vec2_N);CHKERRQ(ierr);
375   ierr = VecDestroy(&pcis->vec1_D);CHKERRQ(ierr);
376   ierr = VecDestroy(&pcis->vec2_D);CHKERRQ(ierr);
377   ierr = VecDestroy(&pcis->vec3_D);CHKERRQ(ierr);
378   ierr = VecDestroy(&pcis->vec1_B);CHKERRQ(ierr);
379   ierr = VecDestroy(&pcis->vec2_B);CHKERRQ(ierr);
380   ierr = VecDestroy(&pcis->vec3_B);CHKERRQ(ierr);
381   ierr = VecDestroy(&pcis->vec1_global);CHKERRQ(ierr);
382   ierr = VecScatterDestroy(&pcis->global_to_D);CHKERRQ(ierr);
383   ierr = VecScatterDestroy(&pcis->N_to_B);CHKERRQ(ierr);
384   ierr = VecScatterDestroy(&pcis->global_to_B);CHKERRQ(ierr);
385   ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);
386   if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
387     ierr = ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
388   }
389   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetUseStiffnessScaling_C","",PETSC_NULL);CHKERRQ(ierr);
390   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","",PETSC_NULL);CHKERRQ(ierr);
391   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","",PETSC_NULL);CHKERRQ(ierr);
392   PetscFunctionReturn(0);
393 }
394 
395 /* -------------------------------------------------------------------------- */
396 /*
397    PCISCreate -
398 */
399 #undef __FUNCT__
400 #define __FUNCT__ "PCISCreate"
401 PetscErrorCode  PCISCreate(PC pc)
402 {
403   PC_IS          *pcis = (PC_IS*)(pc->data);
404   PetscErrorCode ierr;
405 
406   PetscFunctionBegin;
407   pcis->is_B_local  = 0;
408   pcis->is_I_local  = 0;
409   pcis->is_B_global = 0;
410   pcis->is_I_global = 0;
411   pcis->A_II        = 0;
412   pcis->A_IB        = 0;
413   pcis->A_BI        = 0;
414   pcis->A_BB        = 0;
415   pcis->D           = 0;
416   pcis->ksp_N       = 0;
417   pcis->ksp_D      = 0;
418   pcis->vec1_N      = 0;
419   pcis->vec2_N      = 0;
420   pcis->vec1_D      = 0;
421   pcis->vec2_D      = 0;
422   pcis->vec3_D      = 0;
423   pcis->vec1_B      = 0;
424   pcis->vec2_B      = 0;
425   pcis->vec3_B      = 0;
426   pcis->vec1_global = 0;
427   pcis->work_N      = 0;
428   pcis->global_to_D = 0;
429   pcis->N_to_B      = 0;
430   pcis->global_to_B = 0;
431 
432   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
433 
434   pcis->scaling_factor = 1.0;
435   /* composing functions */
436   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetUseStiffnessScaling_C","PCISSetUseStiffnessScaling_IS",
437                                            PCISSetUseStiffnessScaling_IS);CHKERRQ(ierr);
438   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","PCISSetSubdomainScalingFactor_IS",
439                                            PCISSetSubdomainScalingFactor_IS);CHKERRQ(ierr);
440   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","PCISSetSubdomainDiagonalScaling_IS",
441                                            PCISSetSubdomainDiagonalScaling_IS);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 /* -------------------------------------------------------------------------- */
446 /*
447    PCISApplySchur -
448 
449    Input parameters:
450 .  pc - preconditioner context
451 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
452 
453    Output parameters:
454 .  vec1_B - result of Schur complement applied to chunk
455 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
456 .  vec1_D - garbage (used as work space)
457 .  vec2_D - garbage (used as work space)
458 
459 */
460 #undef __FUNCT__
461 #define __FUNCT__ "PCISApplySchur"
462 PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
463 {
464   PetscErrorCode ierr;
465   PC_IS          *pcis = (PC_IS*)(pc->data);
466 
467   PetscFunctionBegin;
468   if (!vec2_B) vec2_B = v;
469 
470   ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr);
471   ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr);
472   ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr);
473   ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr);
474   ierr = VecAXPY(vec1_B,-1.0,vec2_B);CHKERRQ(ierr);
475   PetscFunctionReturn(0);
476 }
477 
478 /* -------------------------------------------------------------------------- */
479 /*
480    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
481    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
482    mode.
483 
484    Input parameters:
485 .  pc - preconditioner context
486 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
487 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
488 
489    Output parameter:
490 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
491 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
492 
493    Notes:
494    The entries in the array that do not correspond to interface nodes remain unaltered.
495 */
496 #undef __FUNCT__
497 #define __FUNCT__ "PCISScatterArrayNToVecB"
498 PetscErrorCode  PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
499 {
500   PetscInt       i;
501   const PetscInt *idex;
502   PetscErrorCode ierr;
503   PetscScalar    *array_B;
504   PC_IS          *pcis = (PC_IS*)(pc->data);
505 
506   PetscFunctionBegin;
507   ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr);
508   ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
509 
510   if (smode == SCATTER_FORWARD) {
511     if (imode == INSERT_VALUES) {
512       for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
513     } else {  /* ADD_VALUES */
514       for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
515     }
516   } else {  /* SCATTER_REVERSE */
517     if (imode == INSERT_VALUES) {
518       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
519     } else {  /* ADD_VALUES */
520       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
521     }
522   }
523   ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
524   ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr);
525   PetscFunctionReturn(0);
526 }
527 
528 /* -------------------------------------------------------------------------- */
529 /*
530    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
531    More precisely, solves the problem:
532                                         [ A_II  A_IB ] [ . ]   [ 0 ]
533                                         [            ] [   ] = [   ]
534                                         [ A_BI  A_BB ] [ x ]   [ b ]
535 
536    Input parameters:
537 .  pc - preconditioner context
538 .  b - vector of local interface nodes (including ghosts)
539 
540    Output parameters:
541 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
542        complement to b
543 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
544 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
545 
546 */
547 #undef __FUNCT__
548 #define __FUNCT__ "PCISApplyInvSchur"
549 PetscErrorCode  PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
550 {
551   PetscErrorCode ierr;
552   PC_IS          *pcis = (PC_IS*)(pc->data);
553 
554   PetscFunctionBegin;
555   /*
556     Neumann solvers.
557     Applying the inverse of the local Schur complement, i.e, solving a Neumann
558     Problem with zero at the interior nodes of the RHS and extracting the interface
559     part of the solution. inverse Schur complement is applied to b and the result
560     is stored in x.
561   */
562   /* Setting the RHS vec1_N */
563   ierr = VecSet(vec1_N,0.0);CHKERRQ(ierr);
564   ierr = VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
565   ierr = VecScatterEnd  (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
566   /* Checking for consistency of the RHS */
567   {
568     PetscBool flg = PETSC_FALSE;
569     ierr = PetscOptionsGetBool(PETSC_NULL,"-pc_is_check_consistency",&flg,PETSC_NULL);CHKERRQ(ierr);
570     if (flg) {
571       PetscScalar average;
572       PetscViewer viewer;
573       ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr);
574 
575       ierr    = VecSum(vec1_N,&average);CHKERRQ(ierr);
576       average = average / ((PetscReal)pcis->n);
577       ierr    = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
578       if (pcis->pure_neumann) {
579         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
580       } else {
581         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed.    Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
582       }
583       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
584       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
585     }
586   }
587   /* Solving the system for vec2_N */
588   ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr);
589   /* Extracting the local interface vector out of the solution */
590   ierr = VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
591   ierr = VecScatterEnd  (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
592   PetscFunctionReturn(0);
593 }
594 
595 
596 
597 
598 
599 
600 
601 
602 
603