xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision 8cc058d9cd56c1ccb3be12a47760ddfc446aaffc)
1 
2 #include "../src/ksp/pc/impls/is/pcis.h" /*I "petscpc.h" I*/
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "PCISSetUseStiffnessScaling_IS"
6 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
7 {
8   PC_IS *pcis = (PC_IS*)pc->data;
9 
10   PetscFunctionBegin;
11   pcis->use_stiffness_scaling = use;
12   PetscFunctionReturn(0);
13 }
14 
15 #undef __FUNCT__
16 #define __FUNCT__ "PCISSetUseStiffnessScaling"
17 /*@
18  PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
19                               local matrices' diagonal.
20 
21    Not collective
22 
23    Input Parameters:
24 +  pc - the preconditioning context
25 -  use - whether or not pcis use matrix diagonal to build partition of unity.
26 
27    Level: intermediate
28 
29    Notes:
30 
31 .seealso: PCBDDC
32 @*/
33 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
34 {
35   PetscErrorCode ierr;
36 
37   PetscFunctionBegin;
38   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
39   ierr = PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));CHKERRQ(ierr);
40   PetscFunctionReturn(0);
41 }
42 
43 #undef __FUNCT__
44 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling_IS"
45 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
46 {
47   PetscErrorCode ierr;
48   PC_IS          *pcis = (PC_IS*)pc->data;
49 
50   PetscFunctionBegin;
51   ierr    = VecDestroy(&pcis->D);CHKERRQ(ierr);
52   ierr    = PetscObjectReference((PetscObject)scaling_factors);CHKERRQ(ierr);
53   pcis->D = scaling_factors;
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling"
59 /*@
60  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
61 
62    Not collective
63 
64    Input Parameters:
65 +  pc - the preconditioning context
66 -  scaling_factors - scaling factors for the subdomain
67 
68    Level: intermediate
69 
70    Notes:
71    Intended to use with jumping coefficients cases.
72 
73 .seealso: PCBDDC
74 @*/
75 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
76 {
77   PetscErrorCode ierr;
78 
79   PetscFunctionBegin;
80   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
81   ierr = PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "PCISSetSubdomainScalingFactor_IS"
87 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
88 {
89   PC_IS *pcis = (PC_IS*)pc->data;
90 
91   PetscFunctionBegin;
92   pcis->scaling_factor = scal;
93   PetscFunctionReturn(0);
94 }
95 
96 #undef __FUNCT__
97 #define __FUNCT__ "PCISSetSubdomainScalingFactor"
98 /*@
99  PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
100 
101    Not collective
102 
103    Input Parameters:
104 +  pc - the preconditioning context
105 -  scal - scaling factor for the subdomain
106 
107    Level: intermediate
108 
109    Notes:
110    Intended to use with jumping coefficients cases.
111 
112 .seealso: PCBDDC
113 @*/
114 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
115 {
116   PetscErrorCode ierr;
117 
118   PetscFunctionBegin;
119   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
120   ierr = PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));CHKERRQ(ierr);
121   PetscFunctionReturn(0);
122 }
123 
124 
125 /* -------------------------------------------------------------------------- */
126 /*
127    PCISSetUp -
128 */
129 #undef __FUNCT__
130 #define __FUNCT__ "PCISSetUp"
131 PetscErrorCode  PCISSetUp(PC pc)
132 {
133   PC_IS          *pcis  = (PC_IS*)(pc->data);
134   Mat_IS         *matis = (Mat_IS*)pc->mat->data;
135   PetscInt       i;
136   PetscErrorCode ierr;
137   PetscBool      flg;
138   Vec            counter;
139 
140   PetscFunctionBegin;
141   ierr = PetscObjectTypeCompare((PetscObject)pc->mat,MATIS,&flg);CHKERRQ(ierr);
142   if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
143 
144   pcis->pure_neumann = matis->pure_neumann;
145 
146   /*
147     Creating the local vector vec1_N, containing the inverse of the number
148     of subdomains to which each local node (either owned or ghost)
149     pertains. To accomplish that, we scatter local vectors of 1's to
150     a global vector (adding the values); scatter the result back to
151     local vectors and finally invert the result.
152   */
153   ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
154   ierr = MatGetVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
155   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
156   ierr = VecSet(pcis->vec1_N,1.0);CHKERRQ(ierr);
157   ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
158   ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
159   ierr = VecScatterBegin(matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
160   ierr = VecScatterEnd  (matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
161   /*
162     Creating local and global index sets for interior and
163     inteface nodes. Notice that interior nodes have D[i]==1.0.
164   */
165   {
166     PetscInt    n_I;
167     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
168     PetscScalar *array;
169     /* Identifying interior and interface nodes, in local numbering */
170     ierr = VecGetSize(pcis->vec1_N,&pcis->n);CHKERRQ(ierr);
171     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
172     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);CHKERRQ(ierr);
173     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);CHKERRQ(ierr);
174     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
175       if (array[i] == 1.0) {
176         idx_I_local[n_I] = i;
177         n_I++;
178       } else {
179         idx_B_local[pcis->n_B] = i;
180         pcis->n_B++;
181       }
182     }
183     /* Getting the global numbering */
184     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
185     idx_I_global = idx_B_local + pcis->n_B;
186     ierr         = ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
187     ierr         = ISLocalToGlobalMappingApply(matis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);
188 
189     /* Creating the index sets. */
190     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);CHKERRQ(ierr);
191     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);CHKERRQ(ierr);
192     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);CHKERRQ(ierr);
193     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);CHKERRQ(ierr);
194 
195     /* Freeing memory and restoring arrays */
196     ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
197     ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
198     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
199   }
200 
201   /*
202     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
203     is such that interior nodes come first than the interface ones, we have
204 
205     [           |      ]
206     [    A_II   | A_IB ]
207     A = [           |      ]
208     [-----------+------]
209     [    A_BI   | A_BB ]
210   */
211 
212   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
213   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
214   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
215   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
216 
217   /*
218     Creating work vectors and arrays
219   */
220   /* pcis->vec1_N has already been created */
221   ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
222   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
223   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
224   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
225   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
226   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
227   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
228   ierr = MatGetVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
229   ierr = PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);CHKERRQ(ierr);
230 
231   /* Creating the scatter contexts */
232   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
233   ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
234   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);
235 
236   /* Creating scaling "matrix" D */
237   ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL);CHKERRQ(ierr);
238   if (!pcis->D) {
239     ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
240     if (!pcis->use_stiffness_scaling) {
241       ierr = VecSet(pcis->D,pcis->scaling_factor);CHKERRQ(ierr);
242     } else {
243       ierr = MatGetDiagonal(matis->A,pcis->vec1_N);CHKERRQ(ierr);
244       ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
245       ierr = VecScatterEnd  (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
246     }
247   }
248   ierr = VecCopy(pcis->D,pcis->vec1_B);CHKERRQ(ierr);
249   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
250   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
251   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
252   ierr = VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
253   ierr = VecScatterEnd  (pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
254   ierr = VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);CHKERRQ(ierr);
255 
256   /* See historical note 01, at the bottom of this file. */
257 
258   /*
259     Creating the KSP contexts for the local Dirichlet and Neumann problems.
260   */
261   {
262     PC pc_ctx;
263     /* Dirichlet */
264     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
265     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr);
266     ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
267     ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
268     ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
269     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
270     ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
271     ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
272     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
273     ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
274     /* Neumann */
275     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
276     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);CHKERRQ(ierr);
277     ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);CHKERRQ(ierr);
278     ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
279     ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
280     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
281     ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
282     ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
283     {
284       PetscBool damp_fixed                    = PETSC_FALSE,
285                 remove_nullspace_fixed        = PETSC_FALSE,
286                 set_damping_factor_floating   = PETSC_FALSE,
287                 not_damp_floating             = PETSC_FALSE,
288                 not_remove_nullspace_floating = PETSC_FALSE;
289       PetscReal fixed_factor,
290                 floating_factor;
291 
292       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
293       if (!damp_fixed) fixed_factor = 0.0;
294       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL);CHKERRQ(ierr);
295 
296       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL);CHKERRQ(ierr);
297 
298       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
299                               &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
300       if (!set_damping_factor_floating) floating_factor = 0.0;
301       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL);CHKERRQ(ierr);
302       if (!set_damping_factor_floating) floating_factor = 1.e-12;
303 
304       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating,NULL);CHKERRQ(ierr);
305 
306       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating,NULL);CHKERRQ(ierr);
307 
308       if (pcis->pure_neumann) {  /* floating subdomain */
309         if (!(not_damp_floating)) {
310           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
311           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
312         }
313         if (!(not_remove_nullspace_floating)) {
314           MatNullSpace nullsp;
315           ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);CHKERRQ(ierr);
316           ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
317           ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
318         }
319       } else {  /* fixed subdomain */
320         if (damp_fixed) {
321           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
322           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
323         }
324         if (remove_nullspace_fixed) {
325           MatNullSpace nullsp;
326           ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);CHKERRQ(ierr);
327           ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
328           ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
329         }
330       }
331     }
332     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
333     ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
334   }
335 
336   ierr = ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
337 
338   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
339 
340   ierr = VecDestroy(&counter);CHKERRQ(ierr);
341   PetscFunctionReturn(0);
342 }
343 
344 /* -------------------------------------------------------------------------- */
345 /*
346    PCISDestroy -
347 */
348 #undef __FUNCT__
349 #define __FUNCT__ "PCISDestroy"
350 PetscErrorCode  PCISDestroy(PC pc)
351 {
352   PC_IS          *pcis = (PC_IS*)(pc->data);
353   PetscErrorCode ierr;
354 
355   PetscFunctionBegin;
356   ierr = ISDestroy(&pcis->is_B_local);CHKERRQ(ierr);
357   ierr = ISDestroy(&pcis->is_I_local);CHKERRQ(ierr);
358   ierr = ISDestroy(&pcis->is_B_global);CHKERRQ(ierr);
359   ierr = ISDestroy(&pcis->is_I_global);CHKERRQ(ierr);
360   ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr);
361   ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr);
362   ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr);
363   ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr);
364   ierr = VecDestroy(&pcis->D);CHKERRQ(ierr);
365   ierr = KSPDestroy(&pcis->ksp_N);CHKERRQ(ierr);
366   ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr);
367   ierr = VecDestroy(&pcis->vec1_N);CHKERRQ(ierr);
368   ierr = VecDestroy(&pcis->vec2_N);CHKERRQ(ierr);
369   ierr = VecDestroy(&pcis->vec1_D);CHKERRQ(ierr);
370   ierr = VecDestroy(&pcis->vec2_D);CHKERRQ(ierr);
371   ierr = VecDestroy(&pcis->vec3_D);CHKERRQ(ierr);
372   ierr = VecDestroy(&pcis->vec1_B);CHKERRQ(ierr);
373   ierr = VecDestroy(&pcis->vec2_B);CHKERRQ(ierr);
374   ierr = VecDestroy(&pcis->vec3_B);CHKERRQ(ierr);
375   ierr = VecDestroy(&pcis->vec1_global);CHKERRQ(ierr);
376   ierr = VecScatterDestroy(&pcis->global_to_D);CHKERRQ(ierr);
377   ierr = VecScatterDestroy(&pcis->N_to_B);CHKERRQ(ierr);
378   ierr = VecScatterDestroy(&pcis->global_to_B);CHKERRQ(ierr);
379   ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);
380   if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
381     ierr = ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
382   }
383   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C","",NULL);CHKERRQ(ierr);
384   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","",NULL);CHKERRQ(ierr);
385   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","",NULL);CHKERRQ(ierr);
386   PetscFunctionReturn(0);
387 }
388 
389 /* -------------------------------------------------------------------------- */
390 /*
391    PCISCreate -
392 */
393 #undef __FUNCT__
394 #define __FUNCT__ "PCISCreate"
395 PetscErrorCode  PCISCreate(PC pc)
396 {
397   PC_IS          *pcis = (PC_IS*)(pc->data);
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   pcis->is_B_local  = 0;
402   pcis->is_I_local  = 0;
403   pcis->is_B_global = 0;
404   pcis->is_I_global = 0;
405   pcis->A_II        = 0;
406   pcis->A_IB        = 0;
407   pcis->A_BI        = 0;
408   pcis->A_BB        = 0;
409   pcis->D           = 0;
410   pcis->ksp_N       = 0;
411   pcis->ksp_D      = 0;
412   pcis->vec1_N      = 0;
413   pcis->vec2_N      = 0;
414   pcis->vec1_D      = 0;
415   pcis->vec2_D      = 0;
416   pcis->vec3_D      = 0;
417   pcis->vec1_B      = 0;
418   pcis->vec2_B      = 0;
419   pcis->vec3_B      = 0;
420   pcis->vec1_global = 0;
421   pcis->work_N      = 0;
422   pcis->global_to_D = 0;
423   pcis->N_to_B      = 0;
424   pcis->global_to_B = 0;
425 
426   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
427 
428   pcis->scaling_factor = 1.0;
429   /* composing functions */
430   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C","PCISSetUseStiffnessScaling_IS",PCISSetUseStiffnessScaling_IS);CHKERRQ(ierr);
431   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","PCISSetSubdomainScalingFactor_IS",PCISSetSubdomainScalingFactor_IS);CHKERRQ(ierr);
432   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","PCISSetSubdomainDiagonalScaling_IS",PCISSetSubdomainDiagonalScaling_IS);CHKERRQ(ierr);
433   PetscFunctionReturn(0);
434 }
435 
436 /* -------------------------------------------------------------------------- */
437 /*
438    PCISApplySchur -
439 
440    Input parameters:
441 .  pc - preconditioner context
442 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
443 
444    Output parameters:
445 .  vec1_B - result of Schur complement applied to chunk
446 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
447 .  vec1_D - garbage (used as work space)
448 .  vec2_D - garbage (used as work space)
449 
450 */
451 #undef __FUNCT__
452 #define __FUNCT__ "PCISApplySchur"
453 PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
454 {
455   PetscErrorCode ierr;
456   PC_IS          *pcis = (PC_IS*)(pc->data);
457 
458   PetscFunctionBegin;
459   if (!vec2_B) vec2_B = v;
460 
461   ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr);
462   ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr);
463   ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr);
464   ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr);
465   ierr = VecAXPY(vec1_B,-1.0,vec2_B);CHKERRQ(ierr);
466   PetscFunctionReturn(0);
467 }
468 
469 /* -------------------------------------------------------------------------- */
470 /*
471    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
472    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
473    mode.
474 
475    Input parameters:
476 .  pc - preconditioner context
477 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
478 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
479 
480    Output parameter:
481 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
482 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
483 
484    Notes:
485    The entries in the array that do not correspond to interface nodes remain unaltered.
486 */
487 #undef __FUNCT__
488 #define __FUNCT__ "PCISScatterArrayNToVecB"
489 PetscErrorCode  PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
490 {
491   PetscInt       i;
492   const PetscInt *idex;
493   PetscErrorCode ierr;
494   PetscScalar    *array_B;
495   PC_IS          *pcis = (PC_IS*)(pc->data);
496 
497   PetscFunctionBegin;
498   ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr);
499   ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
500 
501   if (smode == SCATTER_FORWARD) {
502     if (imode == INSERT_VALUES) {
503       for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
504     } else {  /* ADD_VALUES */
505       for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
506     }
507   } else {  /* SCATTER_REVERSE */
508     if (imode == INSERT_VALUES) {
509       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
510     } else {  /* ADD_VALUES */
511       for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
512     }
513   }
514   ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
515   ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 /* -------------------------------------------------------------------------- */
520 /*
521    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
522    More precisely, solves the problem:
523                                         [ A_II  A_IB ] [ . ]   [ 0 ]
524                                         [            ] [   ] = [   ]
525                                         [ A_BI  A_BB ] [ x ]   [ b ]
526 
527    Input parameters:
528 .  pc - preconditioner context
529 .  b - vector of local interface nodes (including ghosts)
530 
531    Output parameters:
532 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
533        complement to b
534 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
535 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
536 
537 */
538 #undef __FUNCT__
539 #define __FUNCT__ "PCISApplyInvSchur"
540 PetscErrorCode  PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
541 {
542   PetscErrorCode ierr;
543   PC_IS          *pcis = (PC_IS*)(pc->data);
544 
545   PetscFunctionBegin;
546   /*
547     Neumann solvers.
548     Applying the inverse of the local Schur complement, i.e, solving a Neumann
549     Problem with zero at the interior nodes of the RHS and extracting the interface
550     part of the solution. inverse Schur complement is applied to b and the result
551     is stored in x.
552   */
553   /* Setting the RHS vec1_N */
554   ierr = VecSet(vec1_N,0.0);CHKERRQ(ierr);
555   ierr = VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
556   ierr = VecScatterEnd  (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
557   /* Checking for consistency of the RHS */
558   {
559     PetscBool flg = PETSC_FALSE;
560     ierr = PetscOptionsGetBool(NULL,"-pc_is_check_consistency",&flg,NULL);CHKERRQ(ierr);
561     if (flg) {
562       PetscScalar average;
563       PetscViewer viewer;
564       ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr);
565 
566       ierr    = VecSum(vec1_N,&average);CHKERRQ(ierr);
567       average = average / ((PetscReal)pcis->n);
568       ierr    = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
569       if (pcis->pure_neumann) {
570         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
571       } else {
572         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed.    Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
573       }
574       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
575       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
576     }
577   }
578   /* Solving the system for vec2_N */
579   ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr);
580   /* Extracting the local interface vector out of the solution */
581   ierr = VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
582   ierr = VecScatterEnd  (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 
587 
588 
589 
590 
591 
592 
593 
594