xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision ee45ca4afdde1af4d1deda7cd4dc1a4a63a3ea97)
1 
2 #include "src/ksp/pc/impls/is/pcis.h"
3 #include "src/ksp/pc/impls/factor/lu/lu.h"
4 
5 /* -------------------------------------------------------------------------- */
6 /*
7    PCISSetUp -
8 */
9 #undef __FUNCT__
10 #define __FUNCT__ "PCISSetUp"
11 PetscErrorCode PCISSetUp(PC pc)
12 {
13   PC_IS           *pcis = (PC_IS*)(pc->data);
14   Mat_IS          *matis = (Mat_IS*)pc->mat->data;
15   PetscInt        i;
16   PetscErrorCode  ierr;
17   PetscTruth      flg;
18 
19   PetscFunctionBegin;
20   ierr = PetscTypeCompare((PetscObject)pc->mat,MATIS,&flg);CHKERRQ(ierr);
21   if (!flg){
22     SETERRQ(PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
23   }
24 
25   pcis->pure_neumann = matis->pure_neumann;
26 
27   /*
28     Creating the local vector vec1_N, containing the inverse of the number
29     of subdomains to which each local node (either owned or ghost)
30     pertains. To accomplish that, we scatter local vectors of 1's to
31     a global vector (adding the values); scatter the result back to
32     local vectors and finally invert the result.
33   */
34   {
35     Vec    counter;
36     PetscScalar one=1.0, zero=0.0;
37     ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
38     ierr = MatGetVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
39     ierr = VecSet(&zero,counter);CHKERRQ(ierr);
40     ierr = VecSet(&one,pcis->vec1_N);CHKERRQ(ierr);
41     ierr = VecScatterBegin(pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE,matis->ctx);CHKERRQ(ierr);
42     ierr = VecScatterEnd  (pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE,matis->ctx);CHKERRQ(ierr);
43     ierr = VecScatterBegin(counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD,matis->ctx);CHKERRQ(ierr);
44     ierr = VecScatterEnd  (counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD,matis->ctx);CHKERRQ(ierr);
45     ierr = VecDestroy(counter);CHKERRQ(ierr);
46   }
47   /*
48     Creating local and global index sets for interior and
49     inteface nodes. Notice that interior nodes have D[i]==1.0.
50   */
51   {
52     PetscInt     n_I;
53     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
54     PetscScalar *array;
55     /* Identifying interior and interface nodes, in local numbering */
56     ierr = VecGetSize(pcis->vec1_N,&pcis->n);CHKERRQ(ierr);
57     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
58     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);CHKERRQ(ierr);
59     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);CHKERRQ(ierr);
60     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
61       if (array[i] == 1.0) { idx_I_local[n_I]       = i; n_I++;       }
62       else                 { idx_B_local[pcis->n_B] = i; pcis->n_B++; }
63     }
64     /* Getting the global numbering */
65     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
66     idx_I_global = idx_B_local + pcis->n_B;
67     ierr = ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
68     ierr = ISLocalToGlobalMappingApply(matis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);
69     /* Creating the index sets. */
70     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local, &pcis->is_B_local);CHKERRQ(ierr);
71     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,&pcis->is_B_global);CHKERRQ(ierr);
72     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_local, &pcis->is_I_local);CHKERRQ(ierr);
73     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_global,&pcis->is_I_global);CHKERRQ(ierr);
74     /* Freeing memory and restoring arrays */
75     ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
76     ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
77     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
78   }
79 
80   /*
81     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
82     is such that interior nodes come first than the interface ones, we have
83 
84     [           |      ]
85     [    A_II   | A_IB ]
86     A = [           |      ]
87     [-----------+------]
88     [    A_BI   | A_BB ]
89   */
90 
91   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
92   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
93   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
94   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,PETSC_DECIDE,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
95 
96   /*
97     Creating work vectors and arrays
98   */
99   /* pcis->vec1_N has already been created */
100   ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
101   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
102   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
103   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
104   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
105   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
106   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
107   ierr = MatGetVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
108   ierr = PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);CHKERRQ(ierr);
109 
110   /* Creating the scatter contexts */
111   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
112   ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
113   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);
114 
115   /* Creating scaling "matrix" D, from information in vec1_N */
116   ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
117   ierr = VecScatterBegin(pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
118   ierr = VecScatterEnd  (pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
119   ierr = VecReciprocal(pcis->D);CHKERRQ(ierr);
120 
121   /* See historical note 01, at the bottom of this file. */
122 
123   /*
124     Creating the KSP contexts for the local Dirichlet and Neumann problems.
125   */
126   {
127     PC  pc_ctx;
128     /* Dirichlet */
129     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
130     ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
131     ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
132     ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
133     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
134     ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
135     ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
136     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
137     ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
138     /* Neumann */
139     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
140     ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);CHKERRQ(ierr);
141     ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
142     ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
143     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
144     ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
145     ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
146     {
147       PetscTruth damp_fixed,
148                  remove_nullspace_fixed,
149                  set_damping_factor_floating,
150                  not_damp_floating,
151                  not_remove_nullspace_floating;
152       PetscReal  fixed_factor,
153                  floating_factor;
154 
155       ierr = PetscOptionsGetReal(pc_ctx->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
156       if (!damp_fixed) { fixed_factor = 0.0; }
157       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_damp_fixed",&damp_fixed);CHKERRQ(ierr);
158 
159       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed);CHKERRQ(ierr);
160 
161       ierr = PetscOptionsGetReal(pc_ctx->prefix,"-pc_is_set_damping_factor_floating",
162 			      &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
163       if (!set_damping_factor_floating) { floating_factor = 0.0; }
164       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating);CHKERRQ(ierr);
165       if (!set_damping_factor_floating) { floating_factor = 1.e-12; }
166 
167       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_not_damp_floating",&not_damp_floating);CHKERRQ(ierr);
168 
169       ierr = PetscOptionsHasName(pc_ctx->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating);CHKERRQ(ierr);
170 
171       if (pcis->pure_neumann) {  /* floating subdomain */
172 	if (!(not_damp_floating)) {
173           ierr = PCFactorSetShiftNonzero(floating_factor,&((PC_LU*)pc_ctx->data)->info);CHKERRQ(ierr);
174 	}
175 	if (!(not_remove_nullspace_floating)){
176 	  MatNullSpace nullsp;
177 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
178 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
179 	  ierr = MatNullSpaceDestroy(nullsp);CHKERRQ(ierr);
180 	}
181       } else {  /* fixed subdomain */
182 	if (damp_fixed) {
183           ierr = PCFactorSetShiftNonzero(fixed_factor,&((PC_LU*)pc_ctx->data)->info);CHKERRQ(ierr);
184 	}
185 	if (remove_nullspace_fixed) {
186 	  MatNullSpace nullsp;
187 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
188 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
189 	  ierr = MatNullSpaceDestroy(nullsp);CHKERRQ(ierr);
190 	}
191       }
192     }
193     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
194     ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
195   }
196 
197   ierr = ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
198   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
199   PetscFunctionReturn(0);
200 }
201 
202 /* -------------------------------------------------------------------------- */
203 /*
204    PCISDestroy -
205 */
206 #undef __FUNCT__
207 #define __FUNCT__ "PCISDestroy"
208 PetscErrorCode PCISDestroy(PC pc)
209 {
210   PC_IS          *pcis = (PC_IS*)(pc->data);
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   if (pcis->is_B_local)  {ierr = ISDestroy(pcis->is_B_local);CHKERRQ(ierr);}
215   if (pcis->is_I_local)  {ierr = ISDestroy(pcis->is_I_local);CHKERRQ(ierr);}
216   if (pcis->is_B_global) {ierr = ISDestroy(pcis->is_B_global);CHKERRQ(ierr);}
217   if (pcis->is_I_global) {ierr = ISDestroy(pcis->is_I_global);CHKERRQ(ierr);}
218   if (pcis->A_II)        {ierr = MatDestroy(pcis->A_II);CHKERRQ(ierr);}
219   if (pcis->A_IB)        {ierr = MatDestroy(pcis->A_IB);CHKERRQ(ierr);}
220   if (pcis->A_BI)        {ierr = MatDestroy(pcis->A_BI);CHKERRQ(ierr);}
221   if (pcis->A_BB)        {ierr = MatDestroy(pcis->A_BB);CHKERRQ(ierr);}
222   if (pcis->D)           {ierr = VecDestroy(pcis->D);CHKERRQ(ierr);}
223   if (pcis->ksp_N)      {ierr = KSPDestroy(pcis->ksp_N);CHKERRQ(ierr);}
224   if (pcis->ksp_D)      {ierr = KSPDestroy(pcis->ksp_D);CHKERRQ(ierr);}
225   if (pcis->vec1_N)      {ierr = VecDestroy(pcis->vec1_N);CHKERRQ(ierr);}
226   if (pcis->vec2_N)      {ierr = VecDestroy(pcis->vec2_N);CHKERRQ(ierr);}
227   if (pcis->vec1_D)      {ierr = VecDestroy(pcis->vec1_D);CHKERRQ(ierr);}
228   if (pcis->vec2_D)      {ierr = VecDestroy(pcis->vec2_D);CHKERRQ(ierr);}
229   if (pcis->vec3_D)      {ierr = VecDestroy(pcis->vec3_D);CHKERRQ(ierr);}
230   if (pcis->vec1_B)      {ierr = VecDestroy(pcis->vec1_B);CHKERRQ(ierr);}
231   if (pcis->vec2_B)      {ierr = VecDestroy(pcis->vec2_B);CHKERRQ(ierr);}
232   if (pcis->vec3_B)      {ierr = VecDestroy(pcis->vec3_B);CHKERRQ(ierr);}
233   if (pcis->vec1_global) {ierr = VecDestroy(pcis->vec1_global);CHKERRQ(ierr);}
234   if (pcis->work_N)      {ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);}
235   if (pcis->global_to_D) {ierr = VecScatterDestroy(pcis->global_to_D);CHKERRQ(ierr);}
236   if (pcis->N_to_B)      {ierr = VecScatterDestroy(pcis->N_to_B);CHKERRQ(ierr);}
237   if (pcis->global_to_B) {ierr = VecScatterDestroy(pcis->global_to_B);CHKERRQ(ierr);}
238   if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
239     ierr = ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
240   }
241   PetscFunctionReturn(0);
242 }
243 
244 /* -------------------------------------------------------------------------- */
245 /*
246    PCISCreate -
247 */
248 #undef __FUNCT__
249 #define __FUNCT__ "PCISCreate"
250 PetscErrorCode PCISCreate(PC pc)
251 {
252   PC_IS *pcis = (PC_IS*)(pc->data);
253 
254   PetscFunctionBegin;
255   pcis->is_B_local  = 0;
256   pcis->is_I_local  = 0;
257   pcis->is_B_global = 0;
258   pcis->is_I_global = 0;
259   pcis->A_II        = 0;
260   pcis->A_IB        = 0;
261   pcis->A_BI        = 0;
262   pcis->A_BB        = 0;
263   pcis->D           = 0;
264   pcis->ksp_N      = 0;
265   pcis->ksp_D      = 0;
266   pcis->vec1_N      = 0;
267   pcis->vec2_N      = 0;
268   pcis->vec1_D      = 0;
269   pcis->vec2_D      = 0;
270   pcis->vec3_D      = 0;
271   pcis->vec1_B      = 0;
272   pcis->vec2_B      = 0;
273   pcis->vec3_B      = 0;
274   pcis->vec1_global = 0;
275   pcis->work_N      = 0;
276   pcis->global_to_D = 0;
277   pcis->N_to_B      = 0;
278   pcis->global_to_B = 0;
279   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
280   PetscFunctionReturn(0);
281 }
282 
283 /* -------------------------------------------------------------------------- */
284 /*
285    PCISApplySchur -
286 
287    Input parameters:
288 .  pc - preconditioner context
289 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
290 
291    Output parameters:
292 .  vec1_B - result of Schur complement applied to chunk
293 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
294 .  vec1_D - garbage (used as work space)
295 .  vec2_D - garbage (used as work space)
296 
297 */
298 #undef __FUNCT__
299 #define __FUNCT__ "PCIterSuApplySchur"
300 PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
301 {
302   PetscErrorCode ierr;
303   PetscScalar    m_one = -1.0;
304   PC_IS          *pcis = (PC_IS*)(pc->data);
305 
306   PetscFunctionBegin;
307   if (!vec2_B) { vec2_B = v; }
308 
309   ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr);
310   ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr);
311   ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr);
312   ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr);
313   ierr = VecAXPY(&m_one,vec2_B,vec1_B);CHKERRQ(ierr);
314   PetscFunctionReturn(0);
315 }
316 
317 /* -------------------------------------------------------------------------- */
318 /*
319    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
320    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
321    mode.
322 
323    Input parameters:
324 .  pc - preconditioner context
325 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
326 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
327 
328    Output parameter:
329 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
330 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
331 
332    Notes:
333    The entries in the array that do not correspond to interface nodes remain unaltered.
334 */
335 #undef __FUNCT__
336 #define __FUNCT__ "PCISScatterArrayNToVecB"
337 PetscErrorCode PCISScatterArrayNToVecB (PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
338 {
339   PetscInt       i, *idex;
340   PetscErrorCode ierr;
341   PetscScalar    *array_B;
342   PC_IS          *pcis = (PC_IS*)(pc->data);
343 
344   PetscFunctionBegin;
345   ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr);
346   ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
347 
348   if (smode == SCATTER_FORWARD) {
349     if (imode == INSERT_VALUES) {
350       for (i=0; i<pcis->n_B; i++) { array_B[i]  = array_N[idex[i]]; }
351     } else {  /* ADD_VALUES */
352       for (i=0; i<pcis->n_B; i++) { array_B[i] += array_N[idex[i]]; }
353     }
354   } else {  /* SCATTER_REVERSE */
355     if (imode == INSERT_VALUES) {
356       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]]  = array_B[i]; }
357     } else {  /* ADD_VALUES */
358       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]] += array_B[i]; }
359     }
360   }
361   ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
362   ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr);
363   PetscFunctionReturn(0);
364 }
365 
366 /* -------------------------------------------------------------------------- */
367 /*
368    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
369    More precisely, solves the problem:
370                                         [ A_II  A_IB ] [ . ]   [ 0 ]
371                                         [            ] [   ] = [   ]
372                                         [ A_BI  A_BB ] [ x ]   [ b ]
373 
374    Input parameters:
375 .  pc - preconditioner context
376 .  b - vector of local interface nodes (including ghosts)
377 
378    Output parameters:
379 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
380        complement to b
381 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
382 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
383 
384 */
385 #undef __FUNCT__
386 #define __FUNCT__ "PCISApplyInvSchur"
387 PetscErrorCode PCISApplyInvSchur (PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
388 {
389   PetscErrorCode ierr;
390   PC_IS          *pcis = (PC_IS*)(pc->data);
391   PetscScalar    zero  = 0.0;
392 
393   PetscFunctionBegin;
394   /*
395     Neumann solvers.
396     Applying the inverse of the local Schur complement, i.e, solving a Neumann
397     Problem with zero at the interior nodes of the RHS and extracting the interface
398     part of the solution. inverse Schur complement is applied to b and the result
399     is stored in x.
400   */
401   /* Setting the RHS vec1_N */
402   ierr = VecSet(&zero,vec1_N);CHKERRQ(ierr);
403   ierr = VecScatterBegin(b,vec1_N,INSERT_VALUES,SCATTER_REVERSE,pcis->N_to_B);CHKERRQ(ierr);
404   ierr = VecScatterEnd  (b,vec1_N,INSERT_VALUES,SCATTER_REVERSE,pcis->N_to_B);CHKERRQ(ierr);
405   /* Checking for consistency of the RHS */
406   {
407     PetscTruth flg;
408     ierr = PetscOptionsHasName(PETSC_NULL,"-pc_is_check_consistency",&flg);CHKERRQ(ierr);
409     if (flg) {
410       PetscScalar average;
411       ierr = VecSum(vec1_N,&average);CHKERRQ(ierr);
412       average = average / ((PetscReal)pcis->n);
413       if (pcis->pure_neumann) {
414         ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_(pc->comm),"Subdomain %04d is floating. Average = % 1.14e\n",
415                                              PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
416       } else {
417         ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_(pc->comm),"Subdomain %04d is fixed.    Average = % 1.14e\n",
418                                              PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
419       }
420       PetscViewerFlush(PETSC_VIEWER_STDOUT_(pc->comm));
421     }
422   }
423   /* Solving the system for vec2_N */
424   ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr);
425   /* Extracting the local interface vector out of the solution */
426   ierr = VecScatterBegin(vec2_N,x,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
427   ierr = VecScatterEnd  (vec2_N,x,INSERT_VALUES,SCATTER_FORWARD,pcis->N_to_B);CHKERRQ(ierr);
428   PetscFunctionReturn(0);
429 }
430 
431 
432 
433 
434 
435 
436 
437 
438 
439