xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision a6dfd86ebdf75fa024070af26d51b62fd16650a3)
1 
2 #include "../src/ksp/pc/impls/is/pcis.h" /*I "petscpc.h" I*/
3 
4 EXTERN_C_BEGIN
5 #undef __FUNCT__
6 #define __FUNCT__ "PCISSetUseStiffnessScaling_IS"
7 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
8 {
9   PC_IS  *pcis = (PC_IS*)pc->data;
10 
11   PetscFunctionBegin;
12   pcis->use_stiffness_scaling = use;
13   PetscFunctionReturn(0);
14 }
15 EXTERN_C_END
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "PCISSetUseStiffnessScaling"
19 /*@
20  PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
21                               local matrices' diagonal.
22 
23    Not collective
24 
25    Input Parameters:
26 +  pc - the preconditioning context
27 -  use - whether or not pcis use matrix diagonal to build partition of unity.
28 
29    Level: intermediate
30 
31    Notes:
32 
33 .seealso: PCBDDC
34 @*/
35 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
36 {
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
41   ierr = PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 EXTERN_C_BEGIN
46 #undef __FUNCT__
47 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling_IS"
48 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
49 {
50   PetscErrorCode ierr;
51   PC_IS  *pcis = (PC_IS*)pc->data;
52 
53   PetscFunctionBegin;
54   ierr = VecDestroy(&pcis->D);CHKERRQ(ierr);
55   ierr = PetscObjectReference((PetscObject)scaling_factors);CHKERRQ(ierr);
56   pcis->D = scaling_factors;
57   PetscFunctionReturn(0);
58 }
59 EXTERN_C_END
60 
61 #undef __FUNCT__
62 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling"
63 /*@
64  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
65 
66    Not collective
67 
68    Input Parameters:
69 +  pc - the preconditioning context
70 -  scaling_factors - scaling factors for the subdomain
71 
72    Level: intermediate
73 
74    Notes:
75    Intended to use with jumping coefficients cases.
76 
77 .seealso: PCBDDC
78 @*/
79 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
80 {
81   PetscErrorCode ierr;
82 
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
85   ierr = PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));CHKERRQ(ierr);
86   PetscFunctionReturn(0);
87 }
88 
89 EXTERN_C_BEGIN
90 #undef __FUNCT__
91 #define __FUNCT__ "PCISSetSubdomainScalingFactor_IS"
92 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
93 {
94   PC_IS  *pcis = (PC_IS*)pc->data;
95 
96   PetscFunctionBegin;
97   pcis->scaling_factor = scal;
98   PetscFunctionReturn(0);
99 }
100 EXTERN_C_END
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "PCISSetSubdomainScalingFactor"
104 /*@
105  PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
106 
107    Not collective
108 
109    Input Parameters:
110 +  pc - the preconditioning context
111 -  scal - scaling factor for the subdomain
112 
113    Level: intermediate
114 
115    Notes:
116    Intended to use with jumping coefficients cases.
117 
118 .seealso: PCBDDC
119 @*/
120 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
121 {
122   PetscErrorCode ierr;
123 
124   PetscFunctionBegin;
125   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
126   ierr = PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));CHKERRQ(ierr);
127   PetscFunctionReturn(0);
128 }
129 
130 
131 /* -------------------------------------------------------------------------- */
132 /*
133    PCISSetUp -
134 */
135 #undef __FUNCT__
136 #define __FUNCT__ "PCISSetUp"
137 PetscErrorCode  PCISSetUp(PC pc)
138 {
139   PC_IS           *pcis = (PC_IS*)(pc->data);
140   Mat_IS          *matis = (Mat_IS*)pc->mat->data;
141   PetscInt        i;
142   PetscErrorCode  ierr;
143   PetscBool       flg;
144   Vec    counter;
145 
146   PetscFunctionBegin;
147   ierr = PetscObjectTypeCompare((PetscObject)pc->mat,MATIS,&flg);CHKERRQ(ierr);
148   if (!flg) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
149 
150   pcis->pure_neumann = matis->pure_neumann;
151 
152   /*
153     Creating the local vector vec1_N, containing the inverse of the number
154     of subdomains to which each local node (either owned or ghost)
155     pertains. To accomplish that, we scatter local vectors of 1's to
156     a global vector (adding the values); scatter the result back to
157     local vectors and finally invert the result.
158   */
159   ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
160   ierr = MatGetVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
161   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
162   ierr = VecSet(pcis->vec1_N,1.0);CHKERRQ(ierr);
163   ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
164   ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
165   ierr = VecScatterBegin(matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
166   ierr = VecScatterEnd  (matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
167   /*
168     Creating local and global index sets for interior and
169     inteface nodes. Notice that interior nodes have D[i]==1.0.
170   */
171   {
172     PetscInt     n_I;
173     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
174     PetscScalar *array;
175     /* Identifying interior and interface nodes, in local numbering */
176     ierr = VecGetSize(pcis->vec1_N,&pcis->n);CHKERRQ(ierr);
177     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
178     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);CHKERRQ(ierr);
179     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);CHKERRQ(ierr);
180     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
181       if (array[i] == 1.0) { idx_I_local[n_I]       = i; n_I++;       }
182       else                 { idx_B_local[pcis->n_B] = i; pcis->n_B++; }
183     }
184     /* Getting the global numbering */
185     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
186     idx_I_global = idx_B_local + pcis->n_B;
187     ierr = ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
188     ierr = ISLocalToGlobalMappingApply(matis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);
189     /* Creating the index sets. */
190     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);CHKERRQ(ierr);
191     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);CHKERRQ(ierr);
192     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);CHKERRQ(ierr);
193     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);CHKERRQ(ierr);
194     /* Freeing memory and restoring arrays */
195     ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
196     ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
197     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
198   }
199 
200   /*
201     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
202     is such that interior nodes come first than the interface ones, we have
203 
204     [           |      ]
205     [    A_II   | A_IB ]
206     A = [           |      ]
207     [-----------+------]
208     [    A_BI   | A_BB ]
209   */
210 
211   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
212   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
213   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
214   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
215 
216   /*
217     Creating work vectors and arrays
218   */
219   /* pcis->vec1_N has already been created */
220   ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
221   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
222   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
223   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
224   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
225   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
226   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
227   ierr = MatGetVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
228   ierr = PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);CHKERRQ(ierr);
229 
230   /* Creating the scatter contexts */
231   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
232   ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
233   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);
234 
235   /* Creating scaling "matrix" D */
236   ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,PETSC_NULL);CHKERRQ(ierr);
237   if ( !pcis->D ) {
238     ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
239     if (!pcis->use_stiffness_scaling) {
240       ierr = VecSet(pcis->D,pcis->scaling_factor);CHKERRQ(ierr);
241     } else {
242       ierr = MatGetDiagonal(matis->A,pcis->vec1_N);CHKERRQ(ierr);
243       ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
244       ierr = VecScatterEnd  (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
245     }
246   }
247   ierr = VecCopy(pcis->D,pcis->vec1_B);CHKERRQ(ierr);
248   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
249   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
250   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
251   ierr = VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
252   ierr = VecScatterEnd  (pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
253   ierr = VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);CHKERRQ(ierr);
254 
255   /* See historical note 01, at the bottom of this file. */
256 
257   /*
258     Creating the KSP contexts for the local Dirichlet and Neumann problems.
259   */
260   {
261     PC  pc_ctx;
262     /* Dirichlet */
263     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
264     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr);
265     ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
266     ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
267     ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
268     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
269     ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
270     ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
271     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
272     ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
273     /* Neumann */
274     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
275     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);CHKERRQ(ierr);
276     ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);CHKERRQ(ierr);
277     ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
278     ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
279     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
280     ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
281     ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
282     {
283       PetscBool  damp_fixed = PETSC_FALSE,
284                  remove_nullspace_fixed = PETSC_FALSE,
285                  set_damping_factor_floating = PETSC_FALSE,
286                  not_damp_floating = PETSC_FALSE,
287                  not_remove_nullspace_floating = PETSC_FALSE;
288       PetscReal  fixed_factor,
289                  floating_factor;
290 
291       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
292       if (!damp_fixed) { fixed_factor = 0.0; }
293       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,PETSC_NULL);CHKERRQ(ierr);
294 
295       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,PETSC_NULL);CHKERRQ(ierr);
296 
297       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
298                               &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
299       if (!set_damping_factor_floating) { floating_factor = 0.0; }
300       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,PETSC_NULL);CHKERRQ(ierr);
301       if (!set_damping_factor_floating) { floating_factor = 1.e-12; }
302 
303       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating,PETSC_NULL);CHKERRQ(ierr);
304 
305       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating,PETSC_NULL);CHKERRQ(ierr);
306 
307       if (pcis->pure_neumann) {  /* floating subdomain */
308         if (!(not_damp_floating)) {
309           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
310           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
311         }
312         if (!(not_remove_nullspace_floating)) {
313           MatNullSpace nullsp;
314           ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
315           ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
316           ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
317         }
318       } else {  /* fixed subdomain */
319         if (damp_fixed) {
320           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
321           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
322         }
323         if (remove_nullspace_fixed) {
324           MatNullSpace nullsp;
325           ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
326           ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
327           ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
328         }
329       }
330     }
331     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
332     ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
333   }
334 
335   ierr = ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
336   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
337   ierr = VecDestroy(&counter);CHKERRQ(ierr);
338   PetscFunctionReturn(0);
339 }
340 
341 /* -------------------------------------------------------------------------- */
342 /*
343    PCISDestroy -
344 */
345 #undef __FUNCT__
346 #define __FUNCT__ "PCISDestroy"
347 PetscErrorCode  PCISDestroy(PC pc)
348 {
349   PC_IS          *pcis = (PC_IS*)(pc->data);
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   ierr = ISDestroy(&pcis->is_B_local);CHKERRQ(ierr);
354   ierr = ISDestroy(&pcis->is_I_local);CHKERRQ(ierr);
355   ierr = ISDestroy(&pcis->is_B_global);CHKERRQ(ierr);
356   ierr = ISDestroy(&pcis->is_I_global);CHKERRQ(ierr);
357   ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr);
358   ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr);
359   ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr);
360   ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr);
361   ierr = VecDestroy(&pcis->D);CHKERRQ(ierr);
362   ierr = KSPDestroy(&pcis->ksp_N);CHKERRQ(ierr);
363   ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr);
364   ierr = VecDestroy(&pcis->vec1_N);CHKERRQ(ierr);
365   ierr = VecDestroy(&pcis->vec2_N);CHKERRQ(ierr);
366   ierr = VecDestroy(&pcis->vec1_D);CHKERRQ(ierr);
367   ierr = VecDestroy(&pcis->vec2_D);CHKERRQ(ierr);
368   ierr = VecDestroy(&pcis->vec3_D);CHKERRQ(ierr);
369   ierr = VecDestroy(&pcis->vec1_B);CHKERRQ(ierr);
370   ierr = VecDestroy(&pcis->vec2_B);CHKERRQ(ierr);
371   ierr = VecDestroy(&pcis->vec3_B);CHKERRQ(ierr);
372   ierr = VecDestroy(&pcis->vec1_global);CHKERRQ(ierr);
373   ierr = VecScatterDestroy(&pcis->global_to_D);CHKERRQ(ierr);
374   ierr = VecScatterDestroy(&pcis->N_to_B);CHKERRQ(ierr);
375   ierr = VecScatterDestroy(&pcis->global_to_B);CHKERRQ(ierr);
376   ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);
377   if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
378     ierr = ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
379   }
380   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetUseStiffnessScaling_C","",PETSC_NULL);CHKERRQ(ierr);
381   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","",PETSC_NULL);CHKERRQ(ierr);
382   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","",PETSC_NULL);CHKERRQ(ierr);
383   PetscFunctionReturn(0);
384 }
385 
386 /* -------------------------------------------------------------------------- */
387 /*
388    PCISCreate -
389 */
390 #undef __FUNCT__
391 #define __FUNCT__ "PCISCreate"
392 PetscErrorCode  PCISCreate(PC pc)
393 {
394   PC_IS *pcis = (PC_IS*)(pc->data);
395   PetscErrorCode ierr;
396 
397   PetscFunctionBegin;
398   pcis->is_B_local  = 0;
399   pcis->is_I_local  = 0;
400   pcis->is_B_global = 0;
401   pcis->is_I_global = 0;
402   pcis->A_II        = 0;
403   pcis->A_IB        = 0;
404   pcis->A_BI        = 0;
405   pcis->A_BB        = 0;
406   pcis->D           = 0;
407   pcis->ksp_N      = 0;
408   pcis->ksp_D      = 0;
409   pcis->vec1_N      = 0;
410   pcis->vec2_N      = 0;
411   pcis->vec1_D      = 0;
412   pcis->vec2_D      = 0;
413   pcis->vec3_D      = 0;
414   pcis->vec1_B      = 0;
415   pcis->vec2_B      = 0;
416   pcis->vec3_B      = 0;
417   pcis->vec1_global = 0;
418   pcis->work_N      = 0;
419   pcis->global_to_D = 0;
420   pcis->N_to_B      = 0;
421   pcis->global_to_B = 0;
422   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
423   pcis->scaling_factor = 1.0;
424   /* composing functions */
425   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetUseStiffnessScaling_C","PCISSetUseStiffnessScaling_IS",
426                     PCISSetUseStiffnessScaling_IS);CHKERRQ(ierr);
427   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","PCISSetSubdomainScalingFactor_IS",
428                     PCISSetSubdomainScalingFactor_IS);CHKERRQ(ierr);
429   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","PCISSetSubdomainDiagonalScaling_IS",
430                     PCISSetSubdomainDiagonalScaling_IS);CHKERRQ(ierr);
431   PetscFunctionReturn(0);
432 }
433 
434 /* -------------------------------------------------------------------------- */
435 /*
436    PCISApplySchur -
437 
438    Input parameters:
439 .  pc - preconditioner context
440 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
441 
442    Output parameters:
443 .  vec1_B - result of Schur complement applied to chunk
444 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
445 .  vec1_D - garbage (used as work space)
446 .  vec2_D - garbage (used as work space)
447 
448 */
449 #undef __FUNCT__
450 #define __FUNCT__ "PCISApplySchur"
451 PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
452 {
453   PetscErrorCode ierr;
454   PC_IS          *pcis = (PC_IS*)(pc->data);
455 
456   PetscFunctionBegin;
457   if (!vec2_B) { vec2_B = v; }
458 
459   ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr);
460   ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr);
461   ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr);
462   ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr);
463   ierr = VecAXPY(vec1_B,-1.0,vec2_B);CHKERRQ(ierr);
464   PetscFunctionReturn(0);
465 }
466 
467 /* -------------------------------------------------------------------------- */
468 /*
469    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
470    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
471    mode.
472 
473    Input parameters:
474 .  pc - preconditioner context
475 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
476 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
477 
478    Output parameter:
479 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
480 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
481 
482    Notes:
483    The entries in the array that do not correspond to interface nodes remain unaltered.
484 */
485 #undef __FUNCT__
486 #define __FUNCT__ "PCISScatterArrayNToVecB"
487 PetscErrorCode  PCISScatterArrayNToVecB (PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
488 {
489   PetscInt       i;
490   const PetscInt *idex;
491   PetscErrorCode ierr;
492   PetscScalar    *array_B;
493   PC_IS          *pcis = (PC_IS*)(pc->data);
494 
495   PetscFunctionBegin;
496   ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr);
497   ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
498 
499   if (smode == SCATTER_FORWARD) {
500     if (imode == INSERT_VALUES) {
501       for (i=0; i<pcis->n_B; i++) { array_B[i]  = array_N[idex[i]]; }
502     } else {  /* ADD_VALUES */
503       for (i=0; i<pcis->n_B; i++) { array_B[i] += array_N[idex[i]]; }
504     }
505   } else {  /* SCATTER_REVERSE */
506     if (imode == INSERT_VALUES) {
507       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]]  = array_B[i]; }
508     } else {  /* ADD_VALUES */
509       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]] += array_B[i]; }
510     }
511   }
512   ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
513   ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr);
514   PetscFunctionReturn(0);
515 }
516 
517 /* -------------------------------------------------------------------------- */
518 /*
519    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
520    More precisely, solves the problem:
521                                         [ A_II  A_IB ] [ . ]   [ 0 ]
522                                         [            ] [   ] = [   ]
523                                         [ A_BI  A_BB ] [ x ]   [ b ]
524 
525    Input parameters:
526 .  pc - preconditioner context
527 .  b - vector of local interface nodes (including ghosts)
528 
529    Output parameters:
530 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
531        complement to b
532 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
533 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
534 
535 */
536 #undef __FUNCT__
537 #define __FUNCT__ "PCISApplyInvSchur"
538 PetscErrorCode  PCISApplyInvSchur (PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
539 {
540   PetscErrorCode ierr;
541   PC_IS          *pcis = (PC_IS*)(pc->data);
542 
543   PetscFunctionBegin;
544   /*
545     Neumann solvers.
546     Applying the inverse of the local Schur complement, i.e, solving a Neumann
547     Problem with zero at the interior nodes of the RHS and extracting the interface
548     part of the solution. inverse Schur complement is applied to b and the result
549     is stored in x.
550   */
551   /* Setting the RHS vec1_N */
552   ierr = VecSet(vec1_N,0.0);CHKERRQ(ierr);
553   ierr = VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
554   ierr = VecScatterEnd  (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
555   /* Checking for consistency of the RHS */
556   {
557     PetscBool  flg = PETSC_FALSE;
558     ierr = PetscOptionsGetBool(PETSC_NULL,"-pc_is_check_consistency",&flg,PETSC_NULL);CHKERRQ(ierr);
559     if (flg) {
560       PetscScalar average;
561       PetscViewer viewer;
562       ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr);
563 
564       ierr = VecSum(vec1_N,&average);CHKERRQ(ierr);
565       average = average / ((PetscReal)pcis->n);
566       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
567       if (pcis->pure_neumann) {
568         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
569       } else {
570         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed.    Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
571       }
572       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
573       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
574     }
575   }
576   /* Solving the system for vec2_N */
577   ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr);
578   /* Extracting the local interface vector out of the solution */
579   ierr = VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
580   ierr = VecScatterEnd  (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
581   PetscFunctionReturn(0);
582 }
583 
584 
585 
586 
587 
588 
589 
590 
591 
592