xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision 7a4fe282d1b349e95b3be72d69d8dd3d3bcd7bc6)
1 #define PETSCKSP_DLL
2 
3 #include "../src/ksp/pc/impls/is/pcis.h"
4 
5 /* -------------------------------------------------------------------------- */
6 /*
7    PCISSetUp -
8 */
9 #undef __FUNCT__
10 #define __FUNCT__ "PCISSetUp"
11 PetscErrorCode PETSCKSP_DLLEXPORT PCISSetUp(PC pc)
12 {
13   PC_IS           *pcis = (PC_IS*)(pc->data);
14   Mat_IS          *matis = (Mat_IS*)pc->mat->data;
15   PetscInt        i;
16   PetscErrorCode  ierr;
17   PetscTruth      flg;
18 
19   PetscFunctionBegin;
20   ierr = PetscTypeCompare((PetscObject)pc->mat,MATIS,&flg);CHKERRQ(ierr);
21   if (!flg){
22     SETERRQ(PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
23   }
24 
25   pcis->pure_neumann = matis->pure_neumann;
26 
27   /*
28     Creating the local vector vec1_N, containing the inverse of the number
29     of subdomains to which each local node (either owned or ghost)
30     pertains. To accomplish that, we scatter local vectors of 1's to
31     a global vector (adding the values); scatter the result back to
32     local vectors and finally invert the result.
33   */
34   {
35     Vec    counter;
36     ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
37     ierr = MatGetVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
38     ierr = VecSet(counter,0.0);CHKERRQ(ierr);
39     ierr = VecSet(pcis->vec1_N,1.0);CHKERRQ(ierr);
40     ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
41     ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
42     ierr = VecScatterBegin(matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
43     ierr = VecScatterEnd  (matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
44     ierr = VecDestroy(counter);CHKERRQ(ierr);
45   }
46   /*
47     Creating local and global index sets for interior and
48     inteface nodes. Notice that interior nodes have D[i]==1.0.
49   */
50   {
51     PetscInt     n_I;
52     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
53     PetscScalar *array;
54     /* Identifying interior and interface nodes, in local numbering */
55     ierr = VecGetSize(pcis->vec1_N,&pcis->n);CHKERRQ(ierr);
56     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
57     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);CHKERRQ(ierr);
58     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);CHKERRQ(ierr);
59     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
60       if (array[i] == 1.0) { idx_I_local[n_I]       = i; n_I++;       }
61       else                 { idx_B_local[pcis->n_B] = i; pcis->n_B++; }
62     }
63     /* Getting the global numbering */
64     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
65     idx_I_global = idx_B_local + pcis->n_B;
66     ierr = ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
67     ierr = ISLocalToGlobalMappingApply(matis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);
68     /* Creating the index sets. */
69     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local, &pcis->is_B_local);CHKERRQ(ierr);
70     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,&pcis->is_B_global);CHKERRQ(ierr);
71     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_local, &pcis->is_I_local);CHKERRQ(ierr);
72     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_global,&pcis->is_I_global);CHKERRQ(ierr);
73     /* Freeing memory and restoring arrays */
74     ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
75     ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
76     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
77   }
78 
79   /*
80     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
81     is such that interior nodes come first than the interface ones, we have
82 
83     [           |      ]
84     [    A_II   | A_IB ]
85     A = [           |      ]
86     [-----------+------]
87     [    A_BI   | A_BB ]
88   */
89 
90   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
91   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
92   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
93   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
94 
95   /*
96     Creating work vectors and arrays
97   */
98   /* pcis->vec1_N has already been created */
99   ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
100   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
101   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
102   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
103   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
104   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
105   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
106   ierr = MatGetVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
107   ierr = PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);CHKERRQ(ierr);
108 
109   /* Creating the scatter contexts */
110   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
111   ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
112   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);
113 
114   /* Creating scaling "matrix" D, from information in vec1_N */
115   ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
116   ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
117   ierr = VecScatterEnd  (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
118   ierr = VecReciprocal(pcis->D);CHKERRQ(ierr);
119 
120   /* See historical note 01, at the bottom of this file. */
121 
122   /*
123     Creating the KSP contexts for the local Dirichlet and Neumann problems.
124   */
125   {
126     PC  pc_ctx;
127     /* Dirichlet */
128     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
129     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr);
130     ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
131     ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
132     ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
133     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
134     ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
135     ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
136     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
137     ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
138     /* Neumann */
139     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
140     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);CHKERRQ(ierr);
141     ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);CHKERRQ(ierr);
142     ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
143     ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
144     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
145     ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
146     ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
147     {
148       PetscTruth damp_fixed,
149                  remove_nullspace_fixed,
150                  set_damping_factor_floating,
151                  not_damp_floating,
152                  not_remove_nullspace_floating;
153       PetscReal  fixed_factor,
154                  floating_factor;
155 
156       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
157       if (!damp_fixed) { fixed_factor = 0.0; }
158       ierr = PetscOptionsHasName(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed);CHKERRQ(ierr);
159 
160       ierr = PetscOptionsHasName(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed);CHKERRQ(ierr);
161 
162       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
163 			      &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
164       if (!set_damping_factor_floating) { floating_factor = 0.0; }
165       ierr = PetscOptionsHasName(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating);CHKERRQ(ierr);
166       if (!set_damping_factor_floating) { floating_factor = 1.e-12; }
167 
168       ierr = PetscOptionsHasName(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating);CHKERRQ(ierr);
169 
170       ierr = PetscOptionsHasName(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating);CHKERRQ(ierr);
171 
172       if (pcis->pure_neumann) {  /* floating subdomain */
173 	if (!(not_damp_floating)) {
174           ierr = PCFactorSetShiftNonzero(pc_ctx,floating_factor);CHKERRQ(ierr);
175 	}
176 	if (!(not_remove_nullspace_floating)){
177 	  MatNullSpace nullsp;
178 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
179 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
180 	  ierr = MatNullSpaceDestroy(nullsp);CHKERRQ(ierr);
181 	}
182       } else {  /* fixed subdomain */
183 	if (damp_fixed) {
184           ierr = PCFactorSetShiftNonzero(pc_ctx,fixed_factor);CHKERRQ(ierr);
185 	}
186 	if (remove_nullspace_fixed) {
187 	  MatNullSpace nullsp;
188 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
189 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
190 	  ierr = MatNullSpaceDestroy(nullsp);CHKERRQ(ierr);
191 	}
192       }
193     }
194     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
195     ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
196   }
197 
198   ierr = ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
199   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
200   PetscFunctionReturn(0);
201 }
202 
203 /* -------------------------------------------------------------------------- */
204 /*
205    PCISDestroy -
206 */
207 #undef __FUNCT__
208 #define __FUNCT__ "PCISDestroy"
209 PetscErrorCode PETSCKSP_DLLEXPORT PCISDestroy(PC pc)
210 {
211   PC_IS          *pcis = (PC_IS*)(pc->data);
212   PetscErrorCode ierr;
213 
214   PetscFunctionBegin;
215   if (pcis->is_B_local)  {ierr = ISDestroy(pcis->is_B_local);CHKERRQ(ierr);}
216   if (pcis->is_I_local)  {ierr = ISDestroy(pcis->is_I_local);CHKERRQ(ierr);}
217   if (pcis->is_B_global) {ierr = ISDestroy(pcis->is_B_global);CHKERRQ(ierr);}
218   if (pcis->is_I_global) {ierr = ISDestroy(pcis->is_I_global);CHKERRQ(ierr);}
219   if (pcis->A_II)        {ierr = MatDestroy(pcis->A_II);CHKERRQ(ierr);}
220   if (pcis->A_IB)        {ierr = MatDestroy(pcis->A_IB);CHKERRQ(ierr);}
221   if (pcis->A_BI)        {ierr = MatDestroy(pcis->A_BI);CHKERRQ(ierr);}
222   if (pcis->A_BB)        {ierr = MatDestroy(pcis->A_BB);CHKERRQ(ierr);}
223   if (pcis->D)           {ierr = VecDestroy(pcis->D);CHKERRQ(ierr);}
224   if (pcis->ksp_N)      {ierr = KSPDestroy(pcis->ksp_N);CHKERRQ(ierr);}
225   if (pcis->ksp_D)      {ierr = KSPDestroy(pcis->ksp_D);CHKERRQ(ierr);}
226   if (pcis->vec1_N)      {ierr = VecDestroy(pcis->vec1_N);CHKERRQ(ierr);}
227   if (pcis->vec2_N)      {ierr = VecDestroy(pcis->vec2_N);CHKERRQ(ierr);}
228   if (pcis->vec1_D)      {ierr = VecDestroy(pcis->vec1_D);CHKERRQ(ierr);}
229   if (pcis->vec2_D)      {ierr = VecDestroy(pcis->vec2_D);CHKERRQ(ierr);}
230   if (pcis->vec3_D)      {ierr = VecDestroy(pcis->vec3_D);CHKERRQ(ierr);}
231   if (pcis->vec1_B)      {ierr = VecDestroy(pcis->vec1_B);CHKERRQ(ierr);}
232   if (pcis->vec2_B)      {ierr = VecDestroy(pcis->vec2_B);CHKERRQ(ierr);}
233   if (pcis->vec3_B)      {ierr = VecDestroy(pcis->vec3_B);CHKERRQ(ierr);}
234   if (pcis->vec1_global) {ierr = VecDestroy(pcis->vec1_global);CHKERRQ(ierr);}
235   if (pcis->global_to_D) {ierr = VecScatterDestroy(pcis->global_to_D);CHKERRQ(ierr);}
236   if (pcis->N_to_B)      {ierr = VecScatterDestroy(pcis->N_to_B);CHKERRQ(ierr);}
237   if (pcis->global_to_B) {ierr = VecScatterDestroy(pcis->global_to_B);CHKERRQ(ierr);}
238   ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);
239   if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
240     ierr = ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
241   }
242   PetscFunctionReturn(0);
243 }
244 
245 /* -------------------------------------------------------------------------- */
246 /*
247    PCISCreate -
248 */
249 #undef __FUNCT__
250 #define __FUNCT__ "PCISCreate"
251 PetscErrorCode PETSCKSP_DLLEXPORT PCISCreate(PC pc)
252 {
253   PC_IS *pcis = (PC_IS*)(pc->data);
254 
255   PetscFunctionBegin;
256   pcis->is_B_local  = 0;
257   pcis->is_I_local  = 0;
258   pcis->is_B_global = 0;
259   pcis->is_I_global = 0;
260   pcis->A_II        = 0;
261   pcis->A_IB        = 0;
262   pcis->A_BI        = 0;
263   pcis->A_BB        = 0;
264   pcis->D           = 0;
265   pcis->ksp_N      = 0;
266   pcis->ksp_D      = 0;
267   pcis->vec1_N      = 0;
268   pcis->vec2_N      = 0;
269   pcis->vec1_D      = 0;
270   pcis->vec2_D      = 0;
271   pcis->vec3_D      = 0;
272   pcis->vec1_B      = 0;
273   pcis->vec2_B      = 0;
274   pcis->vec3_B      = 0;
275   pcis->vec1_global = 0;
276   pcis->work_N      = 0;
277   pcis->global_to_D = 0;
278   pcis->N_to_B      = 0;
279   pcis->global_to_B = 0;
280   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
281   PetscFunctionReturn(0);
282 }
283 
284 /* -------------------------------------------------------------------------- */
285 /*
286    PCISApplySchur -
287 
288    Input parameters:
289 .  pc - preconditioner context
290 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
291 
292    Output parameters:
293 .  vec1_B - result of Schur complement applied to chunk
294 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
295 .  vec1_D - garbage (used as work space)
296 .  vec2_D - garbage (used as work space)
297 
298 */
299 #undef __FUNCT__
300 #define __FUNCT__ "PCIterSuApplySchur"
301 PetscErrorCode PETSCKSP_DLLEXPORT PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
302 {
303   PetscErrorCode ierr;
304   PC_IS          *pcis = (PC_IS*)(pc->data);
305 
306   PetscFunctionBegin;
307   if (!vec2_B) { vec2_B = v; }
308 
309   ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr);
310   ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr);
311   ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr);
312   ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr);
313   ierr = VecAXPY(vec1_B,-1.0,vec2_B);CHKERRQ(ierr);
314   PetscFunctionReturn(0);
315 }
316 
317 /* -------------------------------------------------------------------------- */
318 /*
319    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
320    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
321    mode.
322 
323    Input parameters:
324 .  pc - preconditioner context
325 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
326 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
327 
328    Output parameter:
329 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
330 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
331 
332    Notes:
333    The entries in the array that do not correspond to interface nodes remain unaltered.
334 */
335 #undef __FUNCT__
336 #define __FUNCT__ "PCISScatterArrayNToVecB"
337 PetscErrorCode PETSCKSP_DLLEXPORT PCISScatterArrayNToVecB (PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
338 {
339   PetscInt       i;
340   const PetscInt *idex;
341   PetscErrorCode ierr;
342   PetscScalar    *array_B;
343   PC_IS          *pcis = (PC_IS*)(pc->data);
344 
345   PetscFunctionBegin;
346   ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr);
347   ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
348 
349   if (smode == SCATTER_FORWARD) {
350     if (imode == INSERT_VALUES) {
351       for (i=0; i<pcis->n_B; i++) { array_B[i]  = array_N[idex[i]]; }
352     } else {  /* ADD_VALUES */
353       for (i=0; i<pcis->n_B; i++) { array_B[i] += array_N[idex[i]]; }
354     }
355   } else {  /* SCATTER_REVERSE */
356     if (imode == INSERT_VALUES) {
357       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]]  = array_B[i]; }
358     } else {  /* ADD_VALUES */
359       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]] += array_B[i]; }
360     }
361   }
362   ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
363   ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr);
364   PetscFunctionReturn(0);
365 }
366 
367 /* -------------------------------------------------------------------------- */
368 /*
369    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
370    More precisely, solves the problem:
371                                         [ A_II  A_IB ] [ . ]   [ 0 ]
372                                         [            ] [   ] = [   ]
373                                         [ A_BI  A_BB ] [ x ]   [ b ]
374 
375    Input parameters:
376 .  pc - preconditioner context
377 .  b - vector of local interface nodes (including ghosts)
378 
379    Output parameters:
380 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
381        complement to b
382 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
383 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
384 
385 */
386 #undef __FUNCT__
387 #define __FUNCT__ "PCISApplyInvSchur"
388 PetscErrorCode PETSCKSP_DLLEXPORT PCISApplyInvSchur (PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
389 {
390   PetscErrorCode ierr;
391   PC_IS          *pcis = (PC_IS*)(pc->data);
392 
393   PetscFunctionBegin;
394   /*
395     Neumann solvers.
396     Applying the inverse of the local Schur complement, i.e, solving a Neumann
397     Problem with zero at the interior nodes of the RHS and extracting the interface
398     part of the solution. inverse Schur complement is applied to b and the result
399     is stored in x.
400   */
401   /* Setting the RHS vec1_N */
402   ierr = VecSet(vec1_N,0.0);CHKERRQ(ierr);
403   ierr = VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
404   ierr = VecScatterEnd  (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
405   /* Checking for consistency of the RHS */
406   {
407     PetscTruth flg;
408     ierr = PetscOptionsHasName(PETSC_NULL,"-pc_is_check_consistency",&flg);CHKERRQ(ierr);
409     if (flg) {
410       PetscScalar average;
411       PetscViewer viewer;
412       ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr);
413 
414       ierr = VecSum(vec1_N,&average);CHKERRQ(ierr);
415       average = average / ((PetscReal)pcis->n);
416       if (pcis->pure_neumann) {
417 
418         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",
419                                              PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
420       } else {
421         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed.    Average = % 1.14e\n",
422                                              PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
423       }
424       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
425     }
426   }
427   /* Solving the system for vec2_N */
428   ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr);
429   /* Extracting the local interface vector out of the solution */
430   ierr = VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
431   ierr = VecScatterEnd  (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
432   PetscFunctionReturn(0);
433 }
434 
435 
436 
437 
438 
439 
440 
441 
442 
443