xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision 8bc8193efbc389280f83b3d41dffa9e2d23e2ace)
1 
2 #include "src/ksp/pc/impls/is/pcis.h"
3 
4 /* -------------------------------------------------------------------------- */
5 /*
6    PCISSetUp -
7 */
8 #undef __FUNCT__
9 #define __FUNCT__ "PCISSetUp"
10 PetscErrorCode PCISSetUp(PC pc)
11 {
12   PC_IS           *pcis = (PC_IS*)(pc->data);
13   Mat_IS          *matis = (Mat_IS*)pc->mat->data;
14   PetscInt        i;
15   PetscErrorCode  ierr;
16   PetscTruth      flg;
17 
18   PetscFunctionBegin;
19   ierr = PetscTypeCompare((PetscObject)pc->mat,MATIS,&flg);CHKERRQ(ierr);
20   if (!flg){
21     SETERRQ(PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
22   }
23 
24   pcis->pure_neumann = matis->pure_neumann;
25 
26   /*
27     Creating the local vector vec1_N, containing the inverse of the number
28     of subdomains to which each local node (either owned or ghost)
29     pertains. To accomplish that, we scatter local vectors of 1's to
30     a global vector (adding the values); scatter the result back to
31     local vectors and finally invert the result.
32   */
33   {
34     Vec    counter;
35     PetscScalar one=1.0, zero=0.0;
36     ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
37     ierr = MatGetVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
38     ierr = VecSet(&zero,counter);CHKERRQ(ierr);
39     ierr = VecSet(&one,pcis->vec1_N);CHKERRQ(ierr);
40     ierr = VecScatterBegin(pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE,matis->ctx);CHKERRQ(ierr);
41     ierr = VecScatterEnd  (pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE,matis->ctx);CHKERRQ(ierr);
42     ierr = VecScatterBegin(counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD,matis->ctx);CHKERRQ(ierr);
43     ierr = VecScatterEnd  (counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD,matis->ctx);CHKERRQ(ierr);
44     ierr = VecDestroy(counter);CHKERRQ(ierr);
45   }
46   /*
47     Creating local and global index sets for interior and
48     inteface nodes. Notice that interior nodes have D[i]==1.0.
49   */
50   {
51     PetscInt     n_I;
52     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
53     PetscScalar *array;
54     /* Identifying interior and interface nodes, in local numbering */
55     ierr = VecGetSize(pcis->vec1_N,&pcis->n);CHKERRQ(ierr);
56     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
57     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);CHKERRQ(ierr);
58     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);CHKERRQ(ierr);
59     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
60       if (array[i] == 1.0) { idx_I_local[n_I]       = i; n_I++;       }
61       else                 { idx_B_local[pcis->n_B] = i; pcis->n_B++; }
62     }
63     /* Getting the global numbering */
64     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
65     idx_I_global = idx_B_local + pcis->n_B;
66     ierr = ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
67     ierr = ISLocalToGlobalMappingApply(matis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);
68     /* Creating the index sets. */
69     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local, &pcis->is_B_local);CHKERRQ(ierr);
70     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,&pcis->is_B_global);CHKERRQ(ierr);
71     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_local, &pcis->is_I_local);CHKERRQ(ierr);
72     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_global,&pcis->is_I_global);CHKERRQ(ierr);
73     /* Freeing memory and restoring arrays */
74     ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
75     ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
76     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
77   }
78 
79   /*
80     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
81     is such that interior nodes come first than the interface ones, we have
82 
83     [           |      ]
84     [    A_II   | A_IB ]
85     A = [           |      ]
86     [-----------+------]
87     [    A_BI   | A_BB ]
88   */
89 
90   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
91   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
92   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
93   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
94 
95   /*
96     Creating work vectors and arrays
97   */
98   /* pcis->vec1_N has already been created */
99   ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
100   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
101   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
102   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
103   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
104   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
105   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
106   ierr = MatGetVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
107   ierr = PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);CHKERRQ(ierr);
108 
109   /* Creating the scatter contexts */
110   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
111   ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
112   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);
113 
114   /* Creating scaling "matrix" D, from information in vec1_N */
115   ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
116   ierr = VecScatterBegin(pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
117   ierr = VecScatterEnd  (pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
118   ierr = VecReciprocal(pcis->D);CHKERRQ(ierr);
119 
120   /* See historical note 01, at the bottom of this file. */
121 
122   /*
123     Creating the KSP contexts for the local Dirichlet and Neumann problems.
124   */
125   {
126     PC  pc_ctx;
127     /* Dirichlet */
128     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
129     ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
130     ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
131     ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
132     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
133     ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
134     ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
135     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
136     ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
137     /* Neumann */
138     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
139     ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);CHKERRQ(ierr);
140     ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
141     ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
142     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
143     ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
144     ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
145     {
146       PetscTruth damp_fixed,
147                  remove_nullspace_fixed,
148                  set_damping_factor_floating,
149                  not_damp_floating,
150                  not_remove_nullspace_floating;
151       PetscReal  fixed_factor,
152                  floating_factor;
153 
154       ierr = PetscOptionsGetReal(pc_ctx->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
155       if (!damp_fixed) { fixed_factor = 0.0; }
156       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_damp_fixed",&damp_fixed);CHKERRQ(ierr);
157 
158       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed);CHKERRQ(ierr);
159 
160       ierr = PetscOptionsGetReal(pc_ctx->prefix,"-pc_is_set_damping_factor_floating",
161 			      &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
162       if (!set_damping_factor_floating) { floating_factor = 0.0; }
163       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating);CHKERRQ(ierr);
164       if (!set_damping_factor_floating) { floating_factor = 1.e-12; }
165 
166       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_not_damp_floating",&not_damp_floating);CHKERRQ(ierr);
167 
168       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating);CHKERRQ(ierr);
169 
170       if (pcis->pure_neumann) {  /* floating subdomain */
171 	if (!(not_damp_floating)) {
172 	  ierr = PCLUSetDamping (pc_ctx,floating_factor);CHKERRQ(ierr);
173 	  ierr = PCILUSetDamping(pc_ctx,floating_factor);CHKERRQ(ierr);
174 	}
175 	if (!(not_remove_nullspace_floating)){
176 	  MatNullSpace nullsp;
177 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
178 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
179 	  ierr = MatNullSpaceDestroy(nullsp);CHKERRQ(ierr);
180 	}
181       } else {  /* fixed subdomain */
182 	if (damp_fixed) {
183 	  ierr = PCLUSetDamping (pc_ctx,fixed_factor);CHKERRQ(ierr);
184 	  ierr = PCILUSetDamping(pc_ctx,fixed_factor);CHKERRQ(ierr);
185 	}
186 	if (remove_nullspace_fixed) {
187 	  MatNullSpace nullsp;
188 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
189 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
190 	  ierr = MatNullSpaceDestroy(nullsp);CHKERRQ(ierr);
191 	}
192       }
193     }
194     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
195     ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
196   }
197 
198   ierr = ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
199   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
200   PetscFunctionReturn(0);
201 }
202 
203 /* -------------------------------------------------------------------------- */
204 /*
205    PCISDestroy -
206 */
207 #undef __FUNCT__
208 #define __FUNCT__ "PCISDestroy"
209 PetscErrorCode PCISDestroy(PC pc)
210 {
211   PC_IS          *pcis = (PC_IS*)(pc->data);
212   PetscErrorCode ierr;
213 
214   PetscFunctionBegin;
215   if (pcis->is_B_local)  {ierr = ISDestroy(pcis->is_B_local);CHKERRQ(ierr);}
216   if (pcis->is_I_local)  {ierr = ISDestroy(pcis->is_I_local);CHKERRQ(ierr);}
217   if (pcis->is_B_global) {ierr = ISDestroy(pcis->is_B_global);CHKERRQ(ierr);}
218   if (pcis->is_I_global) {ierr = ISDestroy(pcis->is_I_global);CHKERRQ(ierr);}
219   if (pcis->A_II)        {ierr = MatDestroy(pcis->A_II);CHKERRQ(ierr);}
220   if (pcis->A_IB)        {ierr = MatDestroy(pcis->A_IB);CHKERRQ(ierr);}
221   if (pcis->A_BI)        {ierr = MatDestroy(pcis->A_BI);CHKERRQ(ierr);}
222   if (pcis->A_BB)        {ierr = MatDestroy(pcis->A_BB);CHKERRQ(ierr);}
223   if (pcis->D)           {ierr = VecDestroy(pcis->D);CHKERRQ(ierr);}
224   if (pcis->ksp_N)      {ierr = KSPDestroy(pcis->ksp_N);CHKERRQ(ierr);}
225   if (pcis->ksp_D)      {ierr = KSPDestroy(pcis->ksp_D);CHKERRQ(ierr);}
226   if (pcis->vec1_N)      {ierr = VecDestroy(pcis->vec1_N);CHKERRQ(ierr);}
227   if (pcis->vec2_N)      {ierr = VecDestroy(pcis->vec2_N);CHKERRQ(ierr);}
228   if (pcis->vec1_D)      {ierr = VecDestroy(pcis->vec1_D);CHKERRQ(ierr);}
229   if (pcis->vec2_D)      {ierr = VecDestroy(pcis->vec2_D);CHKERRQ(ierr);}
230   if (pcis->vec3_D)      {ierr = VecDestroy(pcis->vec3_D);CHKERRQ(ierr);}
231   if (pcis->vec1_B)      {ierr = VecDestroy(pcis->vec1_B);CHKERRQ(ierr);}
232   if (pcis->vec2_B)      {ierr = VecDestroy(pcis->vec2_B);CHKERRQ(ierr);}
233   if (pcis->vec3_B)      {ierr = VecDestroy(pcis->vec3_B);CHKERRQ(ierr);}
234   if (pcis->vec1_global) {ierr = VecDestroy(pcis->vec1_global);CHKERRQ(ierr);}
235   if (pcis->work_N)      {ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);}
236   if (pcis->global_to_D) {ierr = VecScatterDestroy(pcis->global_to_D);CHKERRQ(ierr);}
237   if (pcis->N_to_B)      {ierr = VecScatterDestroy(pcis->N_to_B);CHKERRQ(ierr);}
238   if (pcis->global_to_B) {ierr = VecScatterDestroy(pcis->global_to_B);CHKERRQ(ierr);}
239   if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
240     ierr = ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
241   }
242   PetscFunctionReturn(0);
243 }
244 
245 /* -------------------------------------------------------------------------- */
246 /*
247    PCISCreate -
248 */
249 #undef __FUNCT__
250 #define __FUNCT__ "PCISCreate"
251 PetscErrorCode PCISCreate(PC pc)
252 {
253   PC_IS *pcis = (PC_IS*)(pc->data);
254 
255   PetscFunctionBegin;
256   pcis->is_B_local  = 0;
257   pcis->is_I_local  = 0;
258   pcis->is_B_global = 0;
259   pcis->is_I_global = 0;
260   pcis->A_II        = 0;
261   pcis->A_IB        = 0;
262   pcis->A_BI        = 0;
263   pcis->A_BB        = 0;
264   pcis->D           = 0;
265   pcis->ksp_N      = 0;
266   pcis->ksp_D      = 0;
267   pcis->vec1_N      = 0;
268   pcis->vec2_N      = 0;
269   pcis->vec1_D      = 0;
270   pcis->vec2_D      = 0;
271   pcis->vec3_D      = 0;
272   pcis->vec1_B      = 0;
273   pcis->vec2_B      = 0;
274   pcis->vec3_B      = 0;
275   pcis->vec1_global = 0;
276   pcis->work_N      = 0;
277   pcis->global_to_D = 0;
278   pcis->N_to_B      = 0;
279   pcis->global_to_B = 0;
280   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
281   PetscFunctionReturn(0);
282 }
283 
284 /* -------------------------------------------------------------------------- */
285 /*
286    PCISApplySchur -
287 
288    Input parameters:
289 .  pc - preconditioner context
290 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
291 
292    Output parameters:
293 .  vec1_B - result of Schur complement applied to chunk
294 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
295 .  vec1_D - garbage (used as work space)
296 .  vec2_D - garbage (used as work space)
297 
298 */
299 #undef __FUNCT__
300 #define __FUNCT__ "PCIterSuApplySchur"
301 PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
302 {
303   PetscErrorCode ierr;
304   PetscScalar    m_one = -1.0;
305   PC_IS          *pcis = (PC_IS*)(pc->data);
306 
307   PetscFunctionBegin;
308   if (!vec2_B) { vec2_B = v; }
309 
310   ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr);
311   ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr);
312   ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr);
313   ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr);
314   ierr = VecAXPY(&m_one,vec2_B,vec1_B);CHKERRQ(ierr);
315   PetscFunctionReturn(0);
316 }
317 
318 /* -------------------------------------------------------------------------- */
319 /*
320    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
321    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
322    mode.
323 
324    Input parameters:
325 .  pc - preconditioner context
326 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
327 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
328 
329    Output parameter:
330 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
331 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
332 
333    Notes:
334    The entries in the array that do not correspond to interface nodes remain unaltered.
335 */
336 #undef __FUNCT__
337 #define __FUNCT__ "PCISScatterArrayNToVecB"
338 PetscErrorCode PCISScatterArrayNToVecB (PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
339 {
340   PetscInt       i, *idex;
341   PetscErrorCode ierr;
342   PetscScalar    *array_B;
343   PC_IS          *pcis = (PC_IS*)(pc->data);
344 
345   PetscFunctionBegin;
346   ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr);
347   ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
348 
349   if (smode == SCATTER_FORWARD) {
350     if (imode == INSERT_VALUES) {
351       for (i=0; i<pcis->n_B; i++) { array_B[i]  = array_N[idex[i]]; }
352     } else {  /* ADD_VALUES */
353       for (i=0; i<pcis->n_B; i++) { array_B[i] += array_N[idex[i]]; }
354     }
355   } else {  /* SCATTER_REVERSE */
356     if (imode == INSERT_VALUES) {
357       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]]  = array_B[i]; }
358     } else {  /* ADD_VALUES */
359       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]] += array_B[i]; }
360     }
361   }
362   ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
363   ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr);
364   PetscFunctionReturn(0);
365 }
366 
367 /* -------------------------------------------------------------------------- */
368 /*
369    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
370    More precisely, solves the problem:
371                                         [ A_II  A_IB ] [ . ]   [ 0 ]
372                                         [            ] [   ] = [   ]
373                                         [ A_BI  A_BB ] [ x ]   [ b ]
374 
375    Input parameters:
376 .  pc - preconditioner context
377 .  b - vector of local interface nodes (including ghosts)
378 
379    Output parameters:
380 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
381        complement to b
382 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
383 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
384 
385 */
386 #undef __FUNCT__
387 #define __FUNCT__ "PCISApplyInvSchur"
388 PetscErrorCode PCISApplyInvSchur (PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
389 {
390   PetscErrorCode ierr;
391   PC_IS          *pcis = (PC_IS*)(pc->data);
392   PetscScalar    zero  = 0.0;
393 
394   PetscFunctionBegin;
395   /*
396     Neumann solvers.
397     Applying the inverse of the local Schur complement, i.e, solving a Neumann
398     Problem with zero at the interior nodes of the RHS and extracting the interface
399     part of the solution. inverse Schur complement is applied to b and the result
400     is stored in x.
401   */
402   /* Setting the RHS vec1_N */
403   ierr = VecSet(&zero,vec1_N);CHKERRQ(ierr);
404   ierr = VecScatterBegin(b,vec1_N,INSERT_VALUES,SCATTER_REVERSE,pcis->N_to_B);CHKERRQ(ierr);
405   ierr = VecScatterEnd  (b,vec1_N,INSERT_VALUES,SCATTER_REVERSE,pcis->N_to_B);CHKERRQ(ierr);
406   /* Checking for consistency of the RHS */
407   {
408     PetscTruth flg;
409     ierr = PetscOptionsHasName(PETSC_NULL,"-pc_is_check_consistency",&flg);CHKERRQ(ierr);
410     if (flg) {
411       PetscScalar average;
412       ierr = VecSum(vec1_N,&average);CHKERRQ(ierr);
413       average = average / ((PetscReal)pcis->n);
414       if (pcis->pure_neumann) {
415         ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_(pc->comm),"Subdomain %04d is floating. Average = % 1.14e\n",
416                                              PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
417       } else {
418         ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_(pc->comm),"Subdomain %04d is fixed.    Average = % 1.14e\n",
419                                              PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
420       }
421       PetscViewerFlush(PETSC_VIEWER_STDOUT_(pc->comm));
422     }
423   }
424   /* Solving the system for vec2_N */
425   ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr);
426   /* Extracting the local interface vector out of the solution */
427   ierr = VecScatterBegin(vec2_N,x,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
428   ierr = VecScatterEnd  (vec2_N,x,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
429   PetscFunctionReturn(0);
430 }
431 
432 
433 
434 
435 
436 
437 
438 
439 
440