xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision a44bcdf57b2bdc3efd71d40abec79a20d8d3f002)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 #include <petscblaslapack.h>
9 
10 EXTERN_C_BEGIN
11 #if defined(PETSC_USE_COMPLEX)
12 #if defined(PETSC_USE_REAL_SINGLE)
13 #include <cmumps_c.h>
14 #else
15 #include <zmumps_c.h>
16 #endif
17 #else
18 #if defined(PETSC_USE_REAL_SINGLE)
19 #include <smumps_c.h>
20 #else
21 #include <dmumps_c.h>
22 #endif
23 #endif
24 EXTERN_C_END
25 #define JOB_INIT -1
26 #define JOB_FACTSYMBOLIC 1
27 #define JOB_FACTNUMERIC 2
28 #define JOB_SOLVE 3
29 #define JOB_END -2
30 
31 /* calls to MUMPS */
32 #if defined(PETSC_USE_COMPLEX)
33 #if defined(PETSC_USE_REAL_SINGLE)
34 #define PetscMUMPS_c cmumps_c
35 #else
36 #define PetscMUMPS_c zmumps_c
37 #endif
38 #else
39 #if defined(PETSC_USE_REAL_SINGLE)
40 #define PetscMUMPS_c smumps_c
41 #else
42 #define PetscMUMPS_c dmumps_c
43 #endif
44 #endif
45 
46 /* declare MumpsScalar */
47 #if defined(PETSC_USE_COMPLEX)
48 #if defined(PETSC_USE_REAL_SINGLE)
49 #define MumpsScalar mumps_complex
50 #else
51 #define MumpsScalar mumps_double_complex
52 #endif
53 #else
54 #define MumpsScalar PetscScalar
55 #endif
56 
57 /* macros s.t. indices match MUMPS documentation */
58 #define ICNTL(I) icntl[(I)-1]
59 #define CNTL(I) cntl[(I)-1]
60 #define INFOG(I) infog[(I)-1]
61 #define INFO(I) info[(I)-1]
62 #define RINFOG(I) rinfog[(I)-1]
63 #define RINFO(I) rinfo[(I)-1]
64 
65 typedef struct {
66 #if defined(PETSC_USE_COMPLEX)
67 #if defined(PETSC_USE_REAL_SINGLE)
68   CMUMPS_STRUC_C id;
69 #else
70   ZMUMPS_STRUC_C id;
71 #endif
72 #else
73 #if defined(PETSC_USE_REAL_SINGLE)
74   SMUMPS_STRUC_C id;
75 #else
76   DMUMPS_STRUC_C id;
77 #endif
78 #endif
79 
80   MatStructure matstruc;
81   PetscMPIInt  myid,size;
82   PetscInt     *irn,*jcn,nz,sym;
83   PetscScalar  *val;
84   MPI_Comm     comm_mumps;
85   PetscBool    isAIJ;
86   PetscInt     ICNTL9_pre;           /* check if ICNTL(9) is changed from previous MatSolve */
87   VecScatter   scat_rhs, scat_sol;   /* used by MatSolve() */
88   Vec          b_seq,x_seq;
89   PetscInt     ninfo,*info;          /* display INFO */
90   PetscBool    schur_second_solve;
91   PetscInt     sizeredrhs;
92   PetscInt     *schur_pivots;
93   PetscInt     schur_B_lwork;
94   PetscScalar  *schur_work;
95   PetscScalar  *schur_sol;
96   PetscInt     schur_sizesol;
97   PetscBool    schur_restored;
98   PetscBool    schur_factored;
99   PetscBool    schur_inverted;
100 
101   PetscErrorCode (*Destroy)(Mat);
102   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
103 } Mat_MUMPS;
104 
105 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
106 
107 #undef __FUNCT__
108 #define __FUNCT__ "MatMumpsResetSchur_Private"
109 static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
115   ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
116   ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
117   ierr = PetscFree(mumps->schur_pivots);CHKERRQ(ierr);
118   ierr = PetscFree(mumps->schur_work);CHKERRQ(ierr);
119   if (!mumps->schur_restored) {
120     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
121   }
122   mumps->id.size_schur = 0;
123   mumps->id.ICNTL(19) = 0;
124   PetscFunctionReturn(0);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "MatMumpsFactorSchur_Private"
129 static PetscErrorCode MatMumpsFactorSchur_Private(Mat_MUMPS* mumps)
130 {
131   PetscBLASInt   B_N,B_ierr,B_slda;
132   PetscErrorCode ierr;
133 
134   PetscFunctionBegin;
135   if (mumps->schur_factored) {
136     PetscFunctionReturn(0);
137   }
138   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
139   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
140   if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
141     if (!mumps->schur_pivots) {
142       ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr);
143     }
144     ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
145     PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&B_N,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&B_ierr));
146     ierr = PetscFPTrapPop();CHKERRQ(ierr);
147     if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)B_ierr);
148   } else { /* either full or lower-triangular (not packed) */
149     char ord[2];
150     if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
151       sprintf(ord,"L");
152     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
153       sprintf(ord,"U");
154     }
155     if (mumps->id.sym == 2) {
156       if (!mumps->schur_pivots) {
157         PetscScalar  lwork;
158 
159         ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr);
160         mumps->schur_B_lwork=-1;
161         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
162         PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
163         ierr = PetscFPTrapPop();CHKERRQ(ierr);
164         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to SYTRF Lapack routine %d",(int)B_ierr);
165         ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr);
166         ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr);
167       }
168       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
169       PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
170       ierr = PetscFPTrapPop();CHKERRQ(ierr);
171       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRF Lapack routine %d",(int)B_ierr);
172     } else {
173       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
174       PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,&B_ierr));
175       ierr = PetscFPTrapPop();CHKERRQ(ierr);
176       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr);
177     }
178   }
179   mumps->schur_factored = PETSC_TRUE;
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "MatMumpsInvertSchur_Private"
185 static PetscErrorCode MatMumpsInvertSchur_Private(Mat_MUMPS* mumps)
186 {
187   PetscBLASInt   B_N,B_ierr,B_slda;
188   PetscErrorCode ierr;
189 
190   PetscFunctionBegin;
191   ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr);
192   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
193   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
194   if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
195     if (!mumps->schur_work) {
196       PetscScalar lwork;
197 
198       mumps->schur_B_lwork = -1;
199       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
200       PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
201       ierr = PetscFPTrapPop();CHKERRQ(ierr);
202       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to GETRI Lapack routine %d",(int)B_ierr);
203       ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr);
204       ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr);
205     }
206     ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
207     PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
208     ierr = PetscFPTrapPop();CHKERRQ(ierr);
209     if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRI Lapack routine %d",(int)B_ierr);
210   } else { /* either full or lower-triangular (not packed) */
211     char ord[2];
212     if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
213       sprintf(ord,"L");
214     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
215       sprintf(ord,"U");
216     }
217     if (mumps->id.sym == 2) {
218       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
219       PetscStackCallBLAS("LAPACKsytri",LAPACKsytri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&B_ierr));
220       ierr = PetscFPTrapPop();CHKERRQ(ierr);
221       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRI Lapack routine %d",(int)B_ierr);
222     } else {
223       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
224       PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,&B_ierr));
225       ierr = PetscFPTrapPop();CHKERRQ(ierr);
226       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr);
227     }
228   }
229   mumps->schur_inverted = PETSC_TRUE;
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatMumpsSolveSchur_Private"
235 static PetscErrorCode MatMumpsSolveSchur_Private(Mat_MUMPS* mumps, PetscBool sol_in_redrhs)
236 {
237   PetscBLASInt   B_N,B_Nrhs,B_ierr,B_slda,B_rlda;
238   PetscScalar    one=1.,zero=0.;
239   PetscErrorCode ierr;
240 
241   PetscFunctionBegin;
242   ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr);
243   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
244   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
245   ierr = PetscBLASIntCast(mumps->id.nrhs,&B_Nrhs);CHKERRQ(ierr);
246   ierr = PetscBLASIntCast(mumps->id.lredrhs,&B_rlda);CHKERRQ(ierr);
247   if (mumps->schur_inverted) {
248     PetscInt sizesol = B_Nrhs*B_N;
249     if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
250       ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
251       ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr);
252       mumps->schur_sizesol = sizesol;
253     }
254     if (!mumps->sym) {
255       char type[2];
256       if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
257         if (!mumps->id.ICNTL(9)) { /* transpose solve */
258           sprintf(type,"N");
259         } else {
260           sprintf(type,"T");
261         }
262       } else { /* stored by columns */
263         if (!mumps->id.ICNTL(9)) { /* transpose solve */
264           sprintf(type,"T");
265         } else {
266           sprintf(type,"N");
267         }
268       }
269       PetscStackCallBLAS("BLASgemm",BLASgemm_(type,"N",&B_N,&B_Nrhs,&B_N,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
270     } else {
271       char ord[2];
272       if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
273         sprintf(ord,"L");
274       } else { /* ICNTL(19) == 1 lower triangular stored by rows */
275         sprintf(ord,"U");
276       }
277       PetscStackCallBLAS("BLASsymm",BLASsymm_("L",ord,&B_N,&B_Nrhs,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
278     }
279     if (sol_in_redrhs) {
280       ierr = PetscMemcpy(mumps->id.redrhs,mumps->schur_sol,sizesol*sizeof(PetscScalar));CHKERRQ(ierr);
281     }
282   } else { /* Schur complement has not been inverted */
283     MumpsScalar *orhs=NULL;
284 
285     if (!sol_in_redrhs) {
286       PetscInt sizesol = B_Nrhs*B_N;
287       if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
288         ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
289         ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr);
290         mumps->schur_sizesol = sizesol;
291       }
292       orhs = mumps->id.redrhs;
293       ierr = PetscMemcpy(mumps->schur_sol,mumps->id.redrhs,sizesol*sizeof(PetscScalar));CHKERRQ(ierr);
294       mumps->id.redrhs = (MumpsScalar*)mumps->schur_sol;
295     }
296     if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
297       char type[2];
298       if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
299         if (!mumps->id.ICNTL(9)) { /* transpose solve */
300           sprintf(type,"N");
301         } else {
302           sprintf(type,"T");
303         }
304       } else { /* stored by columns */
305         if (!mumps->id.ICNTL(9)) { /* transpose solve */
306           sprintf(type,"T");
307         } else {
308           sprintf(type,"N");
309         }
310       }
311       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
312       PetscStackCallBLAS("LAPACKgetrs",LAPACKgetrs_(type,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
313       ierr = PetscFPTrapPop();CHKERRQ(ierr);
314       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRS Lapack routine %d",(int)B_ierr);
315     } else { /* either full or lower-triangular (not packed) */
316       char ord[2];
317       if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
318         sprintf(ord,"L");
319       } else { /* ICNTL(19) == 1 lower triangular stored by rows */
320         sprintf(ord,"U");
321       }
322       if (mumps->id.sym == 2) {
323         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
324         PetscStackCallBLAS("LAPACKsytrs",LAPACKsytrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
325         ierr = PetscFPTrapPop();CHKERRQ(ierr);
326         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRS Lapack routine %d",(int)B_ierr);
327       } else {
328         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
329         PetscStackCallBLAS("LAPACKpotrs",LAPACKpotrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
330         ierr = PetscFPTrapPop();CHKERRQ(ierr);
331         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRS Lapack routine %d",(int)B_ierr);
332       }
333     }
334     if (!sol_in_redrhs) {
335       mumps->id.redrhs = orhs;
336     }
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "MatMumpsHandleSchur_Private"
343 static PetscErrorCode MatMumpsHandleSchur_Private(Mat_MUMPS* mumps)
344 {
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */
349     PetscFunctionReturn(0);
350   }
351   if (!mumps->schur_second_solve) { /* prepare for the condensation step */
352     /* check if schur complement has been computed
353        We set by default ICNTL(26) == -1 when Schur indices have been provided by the user.
354        According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase
355        Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system.
356        This requires an extra call to PetscMUMPS_c and the computation of the factors for S, handled setting double_schur_solve to PETSC_TRUE */
357     if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) {
358       PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur;
359       /* allocate MUMPS internal array to store reduced right-hand sides */
360       if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) {
361         ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
362         mumps->id.lredrhs = mumps->id.size_schur;
363         ierr = PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);CHKERRQ(ierr);
364         mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs;
365       }
366       mumps->schur_second_solve = PETSC_TRUE;
367       mumps->id.ICNTL(26) = 1; /* condensation phase */
368     }
369   } else { /* prepare for the expansion step */
370     /* solve Schur complement (this should be done by the MUMPS user, so basically us) */
371     ierr = MatMumpsSolveSchur_Private(mumps,PETSC_TRUE);CHKERRQ(ierr);
372     mumps->id.ICNTL(26) = 2; /* expansion phase */
373     PetscMUMPS_c(&mumps->id);
374     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
375     /* restore defaults */
376     mumps->id.ICNTL(26) = -1;
377     mumps->schur_second_solve = PETSC_FALSE;
378   }
379   PetscFunctionReturn(0);
380 }
381 
382 /*
383   MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz]
384 
385   input:
386     A       - matrix in aij,baij or sbaij (bs=1) format
387     shift   - 0: C style output triple; 1: Fortran style output triple.
388     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
389               MAT_REUSE_MATRIX:   only the values in v array are updated
390   output:
391     nnz     - dim of r, c, and v (number of local nonzero entries of A)
392     r, c, v - row and col index, matrix values (matrix triples)
393 
394   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
395   freed with PetscFree((mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
396   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
397 
398  */
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
402 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
403 {
404   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
405   PetscInt       nz,rnz,i,j;
406   PetscErrorCode ierr;
407   PetscInt       *row,*col;
408   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
409 
410   PetscFunctionBegin;
411   *v=aa->a;
412   if (reuse == MAT_INITIAL_MATRIX) {
413     nz   = aa->nz;
414     ai   = aa->i;
415     aj   = aa->j;
416     *nnz = nz;
417     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
418     col  = row + nz;
419 
420     nz = 0;
421     for (i=0; i<M; i++) {
422       rnz = ai[i+1] - ai[i];
423       ajj = aj + ai[i];
424       for (j=0; j<rnz; j++) {
425         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
426       }
427     }
428     *r = row; *c = col;
429   }
430   PetscFunctionReturn(0);
431 }
432 
433 #undef __FUNCT__
434 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
435 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
436 {
437   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
438   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
439   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
440   PetscErrorCode ierr;
441   PetscInt       *row,*col;
442 
443   PetscFunctionBegin;
444   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
445   M = A->rmap->N/bs;
446   *v = aa->a;
447   if (reuse == MAT_INITIAL_MATRIX) {
448     ai   = aa->i; aj = aa->j;
449     nz   = bs2*aa->nz;
450     *nnz = nz;
451     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
452     col  = row + nz;
453 
454     for (i=0; i<M; i++) {
455       ajj = aj + ai[i];
456       rnz = ai[i+1] - ai[i];
457       for (k=0; k<rnz; k++) {
458         for (j=0; j<bs; j++) {
459           for (m=0; m<bs; m++) {
460             row[idx]   = i*bs + m + shift;
461             col[idx++] = bs*(ajj[k]) + j + shift;
462           }
463         }
464       }
465     }
466     *r = row; *c = col;
467   }
468   PetscFunctionReturn(0);
469 }
470 
471 #undef __FUNCT__
472 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
473 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
474 {
475   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
476   PetscInt       nz,rnz,i,j;
477   PetscErrorCode ierr;
478   PetscInt       *row,*col;
479   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
480 
481   PetscFunctionBegin;
482   *v = aa->a;
483   if (reuse == MAT_INITIAL_MATRIX) {
484     nz   = aa->nz;
485     ai   = aa->i;
486     aj   = aa->j;
487     *v   = aa->a;
488     *nnz = nz;
489     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
490     col  = row + nz;
491 
492     nz = 0;
493     for (i=0; i<M; i++) {
494       rnz = ai[i+1] - ai[i];
495       ajj = aj + ai[i];
496       for (j=0; j<rnz; j++) {
497         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
498       }
499     }
500     *r = row; *c = col;
501   }
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
507 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
508 {
509   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
510   PetscInt          nz,rnz,i,j;
511   const PetscScalar *av,*v1;
512   PetscScalar       *val;
513   PetscErrorCode    ierr;
514   PetscInt          *row,*col;
515   Mat_SeqAIJ        *aa=(Mat_SeqAIJ*)A->data;
516 
517   PetscFunctionBegin;
518   ai   =aa->i; aj=aa->j;av=aa->a;
519   adiag=aa->diag;
520   if (reuse == MAT_INITIAL_MATRIX) {
521     /* count nz in the uppper triangular part of A */
522     nz = 0;
523     for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
524     *nnz = nz;
525 
526     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
527     col  = row + nz;
528     val  = (PetscScalar*)(col + nz);
529 
530     nz = 0;
531     for (i=0; i<M; i++) {
532       rnz = ai[i+1] - adiag[i];
533       ajj = aj + adiag[i];
534       v1  = av + adiag[i];
535       for (j=0; j<rnz; j++) {
536         row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
537       }
538     }
539     *r = row; *c = col; *v = val;
540   } else {
541     nz = 0; val = *v;
542     for (i=0; i <M; i++) {
543       rnz = ai[i+1] - adiag[i];
544       ajj = aj + adiag[i];
545       v1  = av + adiag[i];
546       for (j=0; j<rnz; j++) {
547         val[nz++] = v1[j];
548       }
549     }
550   }
551   PetscFunctionReturn(0);
552 }
553 
554 #undef __FUNCT__
555 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
556 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
557 {
558   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
559   PetscErrorCode    ierr;
560   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
561   PetscInt          *row,*col;
562   const PetscScalar *av, *bv,*v1,*v2;
563   PetscScalar       *val;
564   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
565   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
566   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
567 
568   PetscFunctionBegin;
569   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
570   av=aa->a; bv=bb->a;
571 
572   garray = mat->garray;
573 
574   if (reuse == MAT_INITIAL_MATRIX) {
575     nz   = aa->nz + bb->nz;
576     *nnz = nz;
577     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
578     col  = row + nz;
579     val  = (PetscScalar*)(col + nz);
580 
581     *r = row; *c = col; *v = val;
582   } else {
583     row = *r; col = *c; val = *v;
584   }
585 
586   jj = 0; irow = rstart;
587   for (i=0; i<m; i++) {
588     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
589     countA = ai[i+1] - ai[i];
590     countB = bi[i+1] - bi[i];
591     bjj    = bj + bi[i];
592     v1     = av + ai[i];
593     v2     = bv + bi[i];
594 
595     /* A-part */
596     for (j=0; j<countA; j++) {
597       if (reuse == MAT_INITIAL_MATRIX) {
598         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
599       }
600       val[jj++] = v1[j];
601     }
602 
603     /* B-part */
604     for (j=0; j < countB; j++) {
605       if (reuse == MAT_INITIAL_MATRIX) {
606         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
607       }
608       val[jj++] = v2[j];
609     }
610     irow++;
611   }
612   PetscFunctionReturn(0);
613 }
614 
615 #undef __FUNCT__
616 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
617 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
618 {
619   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
620   PetscErrorCode    ierr;
621   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
622   PetscInt          *row,*col;
623   const PetscScalar *av, *bv,*v1,*v2;
624   PetscScalar       *val;
625   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
626   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
627   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
628 
629   PetscFunctionBegin;
630   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
631   av=aa->a; bv=bb->a;
632 
633   garray = mat->garray;
634 
635   if (reuse == MAT_INITIAL_MATRIX) {
636     nz   = aa->nz + bb->nz;
637     *nnz = nz;
638     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
639     col  = row + nz;
640     val  = (PetscScalar*)(col + nz);
641 
642     *r = row; *c = col; *v = val;
643   } else {
644     row = *r; col = *c; val = *v;
645   }
646 
647   jj = 0; irow = rstart;
648   for (i=0; i<m; i++) {
649     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
650     countA = ai[i+1] - ai[i];
651     countB = bi[i+1] - bi[i];
652     bjj    = bj + bi[i];
653     v1     = av + ai[i];
654     v2     = bv + bi[i];
655 
656     /* A-part */
657     for (j=0; j<countA; j++) {
658       if (reuse == MAT_INITIAL_MATRIX) {
659         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
660       }
661       val[jj++] = v1[j];
662     }
663 
664     /* B-part */
665     for (j=0; j < countB; j++) {
666       if (reuse == MAT_INITIAL_MATRIX) {
667         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
668       }
669       val[jj++] = v2[j];
670     }
671     irow++;
672   }
673   PetscFunctionReturn(0);
674 }
675 
676 #undef __FUNCT__
677 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
678 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
679 {
680   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
681   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
682   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
683   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
684   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
685   const PetscInt    bs2=mat->bs2;
686   PetscErrorCode    ierr;
687   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
688   PetscInt          *row,*col;
689   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
690   PetscScalar       *val;
691 
692   PetscFunctionBegin;
693   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
694   if (reuse == MAT_INITIAL_MATRIX) {
695     nz   = bs2*(aa->nz + bb->nz);
696     *nnz = nz;
697     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
698     col  = row + nz;
699     val  = (PetscScalar*)(col + nz);
700 
701     *r = row; *c = col; *v = val;
702   } else {
703     row = *r; col = *c; val = *v;
704   }
705 
706   jj = 0; irow = rstart;
707   for (i=0; i<mbs; i++) {
708     countA = ai[i+1] - ai[i];
709     countB = bi[i+1] - bi[i];
710     ajj    = aj + ai[i];
711     bjj    = bj + bi[i];
712     v1     = av + bs2*ai[i];
713     v2     = bv + bs2*bi[i];
714 
715     idx = 0;
716     /* A-part */
717     for (k=0; k<countA; k++) {
718       for (j=0; j<bs; j++) {
719         for (n=0; n<bs; n++) {
720           if (reuse == MAT_INITIAL_MATRIX) {
721             row[jj] = irow + n + shift;
722             col[jj] = rstart + bs*ajj[k] + j + shift;
723           }
724           val[jj++] = v1[idx++];
725         }
726       }
727     }
728 
729     idx = 0;
730     /* B-part */
731     for (k=0; k<countB; k++) {
732       for (j=0; j<bs; j++) {
733         for (n=0; n<bs; n++) {
734           if (reuse == MAT_INITIAL_MATRIX) {
735             row[jj] = irow + n + shift;
736             col[jj] = bs*garray[bjj[k]] + j + shift;
737           }
738           val[jj++] = v2[idx++];
739         }
740       }
741     }
742     irow += bs;
743   }
744   PetscFunctionReturn(0);
745 }
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
749 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
750 {
751   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
752   PetscErrorCode    ierr;
753   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
754   PetscInt          *row,*col;
755   const PetscScalar *av, *bv,*v1,*v2;
756   PetscScalar       *val;
757   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
758   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
759   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
760 
761   PetscFunctionBegin;
762   ai=aa->i; aj=aa->j; adiag=aa->diag;
763   bi=bb->i; bj=bb->j; garray = mat->garray;
764   av=aa->a; bv=bb->a;
765 
766   rstart = A->rmap->rstart;
767 
768   if (reuse == MAT_INITIAL_MATRIX) {
769     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
770     nzb = 0;    /* num of upper triangular entries in mat->B */
771     for (i=0; i<m; i++) {
772       nza   += (ai[i+1] - adiag[i]);
773       countB = bi[i+1] - bi[i];
774       bjj    = bj + bi[i];
775       for (j=0; j<countB; j++) {
776         if (garray[bjj[j]] > rstart) nzb++;
777       }
778     }
779 
780     nz   = nza + nzb; /* total nz of upper triangular part of mat */
781     *nnz = nz;
782     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
783     col  = row + nz;
784     val  = (PetscScalar*)(col + nz);
785 
786     *r = row; *c = col; *v = val;
787   } else {
788     row = *r; col = *c; val = *v;
789   }
790 
791   jj = 0; irow = rstart;
792   for (i=0; i<m; i++) {
793     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
794     v1     = av + adiag[i];
795     countA = ai[i+1] - adiag[i];
796     countB = bi[i+1] - bi[i];
797     bjj    = bj + bi[i];
798     v2     = bv + bi[i];
799 
800     /* A-part */
801     for (j=0; j<countA; j++) {
802       if (reuse == MAT_INITIAL_MATRIX) {
803         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
804       }
805       val[jj++] = v1[j];
806     }
807 
808     /* B-part */
809     for (j=0; j < countB; j++) {
810       if (garray[bjj[j]] > rstart) {
811         if (reuse == MAT_INITIAL_MATRIX) {
812           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
813         }
814         val[jj++] = v2[j];
815       }
816     }
817     irow++;
818   }
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "MatGetDiagonal_MUMPS"
824 PetscErrorCode MatGetDiagonal_MUMPS(Mat A,Vec v)
825 {
826   PetscFunctionBegin;
827   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: MUMPS factor");
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "MatDestroy_MUMPS"
833 PetscErrorCode MatDestroy_MUMPS(Mat A)
834 {
835   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
836   PetscErrorCode ierr;
837 
838   PetscFunctionBegin;
839   ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
840   ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
841   ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
842   ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
843   ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
844   ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
845   ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
846   ierr = PetscFree(mumps->info);CHKERRQ(ierr);
847   ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
848   mumps->id.job = JOB_END;
849   PetscMUMPS_c(&mumps->id);
850   ierr = MPI_Comm_free(&mumps->comm_mumps);CHKERRQ(ierr);
851   if (mumps->Destroy) {
852     ierr = (mumps->Destroy)(A);CHKERRQ(ierr);
853   }
854   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
855 
856   /* clear composed functions */
857   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
858   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
859   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
860   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
861   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
862 
863   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
864   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
865   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
866   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
867 
868   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetSchurIndices_C",NULL);CHKERRQ(ierr);
869   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsInvertSchurComplement_C",NULL);CHKERRQ(ierr);
870   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsCreateSchurComplement_C",NULL);CHKERRQ(ierr);
871   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetSchurComplement_C",NULL);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsRestoreSchurComplement_C",NULL);CHKERRQ(ierr);
873   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSolveSchurComplement_C",NULL);CHKERRQ(ierr);
874   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSolveSchurComplementTranspose_C",NULL);CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 #undef __FUNCT__
879 #define __FUNCT__ "MatSolve_MUMPS"
880 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
881 {
882   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->spptr;
883   PetscScalar      *array;
884   Vec              b_seq;
885   IS               is_iden,is_petsc;
886   PetscErrorCode   ierr;
887   PetscInt         i;
888   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
889 
890   PetscFunctionBegin;
891   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
892   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
893   mumps->id.nrhs = 1;
894   b_seq          = mumps->b_seq;
895   if (mumps->size > 1) {
896     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
897     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
898     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
899     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
900   } else {  /* size == 1 */
901     ierr = VecCopy(b,x);CHKERRQ(ierr);
902     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
903   }
904   if (!mumps->myid) { /* define rhs on the host */
905     mumps->id.nrhs = 1;
906     mumps->id.rhs = (MumpsScalar*)array;
907   }
908 
909   /* handle condensation step of Schur complement (if any) */
910   ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
911 
912   /* solve phase */
913   /*-------------*/
914   mumps->id.job = JOB_SOLVE;
915   PetscMUMPS_c(&mumps->id);
916   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
917 
918   /* handle expansion step of Schur complement (if any) */
919   ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
920 
921   if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
922     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
923       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
924       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
925     }
926     if (!mumps->scat_sol) { /* create scatter scat_sol */
927       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
928       for (i=0; i<mumps->id.lsol_loc; i++) {
929         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
930       }
931       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
932       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
933       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
934       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
935 
936       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
937     }
938 
939     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
940     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
941   }
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "MatSolveTranspose_MUMPS"
947 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
948 {
949   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
950   PetscErrorCode ierr;
951 
952   PetscFunctionBegin;
953   mumps->id.ICNTL(9) = 0;
954   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
955   mumps->id.ICNTL(9) = 1;
956   PetscFunctionReturn(0);
957 }
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "MatMatSolve_MUMPS"
961 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
962 {
963   PetscErrorCode ierr;
964   PetscBool      flg;
965   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
966   PetscInt       i,nrhs,M;
967   PetscScalar    *array,*bray;
968 
969   PetscFunctionBegin;
970   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
971   if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
972   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
973   if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
974   if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution");
975 
976   ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr);
977   mumps->id.nrhs = nrhs;
978   mumps->id.lrhs = M;
979 
980   if (mumps->size == 1) {
981     /* copy B to X */
982     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
983     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
984     ierr = PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));CHKERRQ(ierr);
985     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
986     mumps->id.rhs = (MumpsScalar*)array;
987     /* handle condensation step of Schur complement (if any) */
988     ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
989 
990     /* solve phase */
991     /*-------------*/
992     mumps->id.job = JOB_SOLVE;
993     PetscMUMPS_c(&mumps->id);
994     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
995 
996     /* handle expansion step of Schur complement (if any) */
997     ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
998     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
999   } else {  /*--------- parallel case --------*/
1000     PetscInt       lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save;
1001     MumpsScalar    *sol_loc,*sol_loc_save;
1002     IS             is_to,is_from;
1003     PetscInt       k,proc,j,m;
1004     const PetscInt *rstart;
1005     Vec            v_mpi,b_seq,x_seq;
1006     VecScatter     scat_rhs,scat_sol;
1007 
1008     /* create x_seq to hold local solution */
1009     isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */
1010     sol_loc_save  = mumps->id.sol_loc;
1011 
1012     lsol_loc  = mumps->id.INFO(23);
1013     nlsol_loc = nrhs*lsol_loc;     /* length of sol_loc */
1014     ierr = PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);CHKERRQ(ierr);
1015     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1016     mumps->id.isol_loc = isol_loc;
1017 
1018     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr);
1019 
1020     /* copy rhs matrix B into vector v_mpi */
1021     ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr);
1022     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
1023     ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr);
1024     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
1025 
1026     /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */
1027     /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B;
1028       iidx: inverse of idx, will be used by scattering xx_seq -> X       */
1029     ierr = PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);CHKERRQ(ierr);
1030     ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr);
1031     k = 0;
1032     for (proc=0; proc<mumps->size; proc++){
1033       for (j=0; j<nrhs; j++){
1034         for (i=rstart[proc]; i<rstart[proc+1]; i++){
1035           iidx[j*M + i] = k;
1036           idx[k++]      = j*M + i;
1037         }
1038       }
1039     }
1040 
1041     if (!mumps->myid) {
1042       ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr);
1043       ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
1044       ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr);
1045     } else {
1046       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr);
1047       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr);
1048       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr);
1049     }
1050     ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr);
1051     ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1052     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1053     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1054     ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1055 
1056     if (!mumps->myid) { /* define rhs on the host */
1057       ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr);
1058       mumps->id.rhs = (MumpsScalar*)bray;
1059       ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr);
1060     }
1061 
1062     /* solve phase */
1063     /*-------------*/
1064     mumps->id.job = JOB_SOLVE;
1065     PetscMUMPS_c(&mumps->id);
1066     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1067 
1068     /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */
1069     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
1070     ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr);
1071 
1072     /* create scatter scat_sol */
1073     ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr);
1074     ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr);
1075     for (i=0; i<lsol_loc; i++) {
1076       isol_loc[i] -= 1; /* change Fortran style to C style */
1077       idxx[i] = iidx[isol_loc[i]];
1078       for (j=1; j<nrhs; j++){
1079         idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M];
1080       }
1081     }
1082     ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
1083     ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr);
1084     ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1085     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1086     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1087     ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1088     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
1089 
1090     /* free spaces */
1091     mumps->id.sol_loc = sol_loc_save;
1092     mumps->id.isol_loc = isol_loc_save;
1093 
1094     ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr);
1095     ierr = PetscFree2(idx,iidx);CHKERRQ(ierr);
1096     ierr = PetscFree(idxx);CHKERRQ(ierr);
1097     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
1098     ierr = VecDestroy(&v_mpi);CHKERRQ(ierr);
1099     ierr = VecDestroy(&b_seq);CHKERRQ(ierr);
1100     ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr);
1101     ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr);
1102   }
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 #if !defined(PETSC_USE_COMPLEX)
1107 /*
1108   input:
1109    F:        numeric factor
1110   output:
1111    nneg:     total number of negative pivots
1112    nzero:    0
1113    npos:     (global dimension of F) - nneg
1114 */
1115 
1116 #undef __FUNCT__
1117 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
1118 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
1119 {
1120   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1121   PetscErrorCode ierr;
1122   PetscMPIInt    size;
1123 
1124   PetscFunctionBegin;
1125   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
1126   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
1127   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
1128 
1129   if (nneg) *nneg = mumps->id.INFOG(12);
1130   if (nzero || npos) {
1131     if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
1132     if (nzero) *nzero = mumps->id.INFOG(28);
1133     if (npos) *npos   = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
1134   }
1135   PetscFunctionReturn(0);
1136 }
1137 #endif /* !defined(PETSC_USE_COMPLEX) */
1138 
1139 #undef __FUNCT__
1140 #define __FUNCT__ "MatFactorNumeric_MUMPS"
1141 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
1142 {
1143   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->spptr;
1144   PetscErrorCode ierr;
1145   Mat            F_diag;
1146   PetscBool      isMPIAIJ;
1147 
1148   PetscFunctionBegin;
1149   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1150 
1151   /* numerical factorization phase */
1152   /*-------------------------------*/
1153   mumps->id.job = JOB_FACTNUMERIC;
1154   if (!mumps->id.ICNTL(18)) { /* A is centralized */
1155     if (!mumps->myid) {
1156       mumps->id.a = (MumpsScalar*)mumps->val;
1157     }
1158   } else {
1159     mumps->id.a_loc = (MumpsScalar*)mumps->val;
1160   }
1161   PetscMUMPS_c(&mumps->id);
1162   if (mumps->id.INFOG(1) < 0) {
1163     if (mumps->id.INFO(1) == -13) {
1164       if (mumps->id.INFO(2) < 0) {
1165         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2));
1166       } else {
1167         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2));
1168       }
1169     } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2));
1170   }
1171   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
1172 
1173   (F)->assembled        = PETSC_TRUE;
1174   mumps->matstruc       = SAME_NONZERO_PATTERN;
1175   mumps->schur_factored = PETSC_FALSE;
1176   mumps->schur_inverted = PETSC_FALSE;
1177 
1178   /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */
1179   if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3;
1180 
1181   if (mumps->size > 1) {
1182     PetscInt    lsol_loc;
1183     PetscScalar *sol_loc;
1184 
1185     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
1186     if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
1187     else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
1188     F_diag->assembled = PETSC_TRUE;
1189 
1190     /* distributed solution; Create x_seq=sol_loc for repeated use */
1191     if (mumps->x_seq) {
1192       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
1193       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
1194       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
1195     }
1196     lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
1197     ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
1198     mumps->id.lsol_loc = lsol_loc;
1199     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1200     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
1201   }
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 /* Sets MUMPS options from the options database */
1206 #undef __FUNCT__
1207 #define __FUNCT__ "PetscSetMUMPSFromOptions"
1208 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
1209 {
1210   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1211   PetscErrorCode ierr;
1212   PetscInt       icntl,info[40],i,ninfo=40;
1213   PetscBool      flg;
1214 
1215   PetscFunctionBegin;
1216   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
1217   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
1218   if (flg) mumps->id.ICNTL(1) = icntl;
1219   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
1220   if (flg) mumps->id.ICNTL(2) = icntl;
1221   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
1222   if (flg) mumps->id.ICNTL(3) = icntl;
1223 
1224   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
1225   if (flg) mumps->id.ICNTL(4) = icntl;
1226   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
1227 
1228   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
1229   if (flg) mumps->id.ICNTL(6) = icntl;
1230 
1231   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
1232   if (flg) {
1233     if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
1234     else mumps->id.ICNTL(7) = icntl;
1235   }
1236 
1237   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
1238   /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */
1239   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
1240   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
1241   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
1242   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
1243   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
1244   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
1245   if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */
1246     ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
1247   }
1248   /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */
1249   /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */
1250 
1251   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
1252   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
1253   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
1254   if (mumps->id.ICNTL(24)) {
1255     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
1256   }
1257 
1258   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
1259   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
1260   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
1261   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
1262   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
1263   ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr);
1264   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
1265   /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr);  -- not supported by PETSc API */
1266   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
1267 
1268   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
1269   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
1270   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
1271   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
1272   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
1273 
1274   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
1275 
1276   ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr);
1277   if (ninfo) {
1278     if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo);
1279     ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr);
1280     mumps->ninfo = ninfo;
1281     for (i=0; i<ninfo; i++) {
1282       if (info[i] < 0 || info[i]>40) {
1283         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo);
1284       } else {
1285         mumps->info[i] = info[i];
1286       }
1287     }
1288   }
1289 
1290   PetscOptionsEnd();
1291   PetscFunctionReturn(0);
1292 }
1293 
1294 #undef __FUNCT__
1295 #define __FUNCT__ "PetscInitializeMUMPS"
1296 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
1297 {
1298   PetscErrorCode ierr;
1299 
1300   PetscFunctionBegin;
1301   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
1302   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr);
1303   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr);
1304 
1305   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
1306 
1307   mumps->id.job = JOB_INIT;
1308   mumps->id.par = 1;  /* host participates factorizaton and solve */
1309   mumps->id.sym = mumps->sym;
1310   PetscMUMPS_c(&mumps->id);
1311 
1312   mumps->scat_rhs     = NULL;
1313   mumps->scat_sol     = NULL;
1314 
1315   /* set PETSc-MUMPS default options - override MUMPS default */
1316   mumps->id.ICNTL(3) = 0;
1317   mumps->id.ICNTL(4) = 0;
1318   if (mumps->size == 1) {
1319     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
1320   } else {
1321     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
1322     mumps->id.ICNTL(20) = 0;   /* rhs is in dense format */
1323     mumps->id.ICNTL(21) = 1;   /* distributed solution */
1324   }
1325 
1326   /* schur */
1327   mumps->id.size_schur      = 0;
1328   mumps->id.listvar_schur   = NULL;
1329   mumps->id.schur           = NULL;
1330   mumps->schur_second_solve = PETSC_FALSE;
1331   mumps->sizeredrhs         = 0;
1332   mumps->schur_pivots       = NULL;
1333   mumps->schur_work         = NULL;
1334   mumps->schur_sol          = NULL;
1335   mumps->schur_sizesol      = 0;
1336   mumps->schur_restored     = PETSC_TRUE;
1337   mumps->schur_factored     = PETSC_FALSE;
1338   mumps->schur_inverted     = PETSC_FALSE;
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
1343 #undef __FUNCT__
1344 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
1345 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1346 {
1347   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1348   PetscErrorCode ierr;
1349   Vec            b;
1350   IS             is_iden;
1351   const PetscInt M = A->rmap->N;
1352 
1353   PetscFunctionBegin;
1354   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1355 
1356   /* Set MUMPS options from the options database */
1357   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1358 
1359   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1360 
1361   /* analysis phase */
1362   /*----------------*/
1363   mumps->id.job = JOB_FACTSYMBOLIC;
1364   mumps->id.n   = M;
1365   switch (mumps->id.ICNTL(18)) {
1366   case 0:  /* centralized assembled matrix input */
1367     if (!mumps->myid) {
1368       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1369       if (mumps->id.ICNTL(6)>1) {
1370         mumps->id.a = (MumpsScalar*)mumps->val;
1371       }
1372       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
1373         /*
1374         PetscBool      flag;
1375         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
1376         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
1377         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
1378          */
1379         if (!mumps->myid) {
1380           const PetscInt *idx;
1381           PetscInt       i,*perm_in;
1382 
1383           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
1384           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
1385 
1386           mumps->id.perm_in = perm_in;
1387           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
1388           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
1389         }
1390       }
1391     }
1392     break;
1393   case 3:  /* distributed assembled matrix input (size>1) */
1394     mumps->id.nz_loc = mumps->nz;
1395     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1396     if (mumps->id.ICNTL(6)>1) {
1397       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1398     }
1399     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1400     if (!mumps->myid) {
1401       ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr);
1402       ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr);
1403     } else {
1404       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1405       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1406     }
1407     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1408     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1409     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1410     ierr = VecDestroy(&b);CHKERRQ(ierr);
1411     break;
1412   }
1413   PetscMUMPS_c(&mumps->id);
1414   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1415 
1416   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1417   F->ops->solve           = MatSolve_MUMPS;
1418   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1419   F->ops->matsolve        = MatMatSolve_MUMPS;
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 /* Note the Petsc r and c permutations are ignored */
1424 #undef __FUNCT__
1425 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
1426 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1427 {
1428   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1429   PetscErrorCode ierr;
1430   Vec            b;
1431   IS             is_iden;
1432   const PetscInt M = A->rmap->N;
1433 
1434   PetscFunctionBegin;
1435   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1436 
1437   /* Set MUMPS options from the options database */
1438   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1439 
1440   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1441 
1442   /* analysis phase */
1443   /*----------------*/
1444   mumps->id.job = JOB_FACTSYMBOLIC;
1445   mumps->id.n   = M;
1446   switch (mumps->id.ICNTL(18)) {
1447   case 0:  /* centralized assembled matrix input */
1448     if (!mumps->myid) {
1449       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1450       if (mumps->id.ICNTL(6)>1) {
1451         mumps->id.a = (MumpsScalar*)mumps->val;
1452       }
1453     }
1454     break;
1455   case 3:  /* distributed assembled matrix input (size>1) */
1456     mumps->id.nz_loc = mumps->nz;
1457     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1458     if (mumps->id.ICNTL(6)>1) {
1459       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1460     }
1461     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1462     if (!mumps->myid) {
1463       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1464       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1465     } else {
1466       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1467       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1468     }
1469     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1470     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1471     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1472     ierr = VecDestroy(&b);CHKERRQ(ierr);
1473     break;
1474   }
1475   PetscMUMPS_c(&mumps->id);
1476   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1477 
1478   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1479   F->ops->solve           = MatSolve_MUMPS;
1480   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1481   PetscFunctionReturn(0);
1482 }
1483 
1484 /* Note the Petsc r permutation and factor info are ignored */
1485 #undef __FUNCT__
1486 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
1487 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1488 {
1489   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1490   PetscErrorCode ierr;
1491   Vec            b;
1492   IS             is_iden;
1493   const PetscInt M = A->rmap->N;
1494 
1495   PetscFunctionBegin;
1496   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1497 
1498   /* Set MUMPS options from the options database */
1499   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1500 
1501   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1502 
1503   /* analysis phase */
1504   /*----------------*/
1505   mumps->id.job = JOB_FACTSYMBOLIC;
1506   mumps->id.n   = M;
1507   switch (mumps->id.ICNTL(18)) {
1508   case 0:  /* centralized assembled matrix input */
1509     if (!mumps->myid) {
1510       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1511       if (mumps->id.ICNTL(6)>1) {
1512         mumps->id.a = (MumpsScalar*)mumps->val;
1513       }
1514     }
1515     break;
1516   case 3:  /* distributed assembled matrix input (size>1) */
1517     mumps->id.nz_loc = mumps->nz;
1518     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1519     if (mumps->id.ICNTL(6)>1) {
1520       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1521     }
1522     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1523     if (!mumps->myid) {
1524       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1525       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1526     } else {
1527       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1528       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1529     }
1530     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1531     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1532     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1533     ierr = VecDestroy(&b);CHKERRQ(ierr);
1534     break;
1535   }
1536   PetscMUMPS_c(&mumps->id);
1537   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1538 
1539   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1540   F->ops->solve                 = MatSolve_MUMPS;
1541   F->ops->solvetranspose        = MatSolve_MUMPS;
1542   F->ops->matsolve              = MatMatSolve_MUMPS;
1543 #if defined(PETSC_USE_COMPLEX)
1544   F->ops->getinertia = NULL;
1545 #else
1546   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1547 #endif
1548   PetscFunctionReturn(0);
1549 }
1550 
1551 #undef __FUNCT__
1552 #define __FUNCT__ "MatView_MUMPS"
1553 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1554 {
1555   PetscErrorCode    ierr;
1556   PetscBool         iascii;
1557   PetscViewerFormat format;
1558   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1559 
1560   PetscFunctionBegin;
1561   /* check if matrix is mumps type */
1562   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1563 
1564   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1565   if (iascii) {
1566     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1567     if (format == PETSC_VIEWER_ASCII_INFO) {
1568       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1569       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1570       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1571       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1572       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1573       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1574       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1575       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1576       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1577       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1578       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1579       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1580       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1581       if (mumps->id.ICNTL(11)>0) {
1582         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1583         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1584         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1585         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1586         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1587         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1588       }
1589       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1590       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1591       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1592       /* ICNTL(15-17) not used */
1593       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1594       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1595       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1596       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1597       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1598       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1599 
1600       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1601       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1602       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1603       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1604       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1605       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1606 
1607       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1608       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1609       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1610 
1611       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1612       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1613       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1614       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1615       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1616 
1617       /* infomation local to each processor */
1618       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1619       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1620       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1621       ierr = PetscViewerFlush(viewer);
1622       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1623       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1624       ierr = PetscViewerFlush(viewer);
1625       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1626       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1627       ierr = PetscViewerFlush(viewer);
1628 
1629       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1630       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1631       ierr = PetscViewerFlush(viewer);
1632 
1633       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1634       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1635       ierr = PetscViewerFlush(viewer);
1636 
1637       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1638       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1639       ierr = PetscViewerFlush(viewer);
1640 
1641       if (mumps->ninfo && mumps->ninfo <= 40){
1642         PetscInt i;
1643         for (i=0; i<mumps->ninfo; i++){
1644           ierr = PetscViewerASCIIPrintf(viewer, "  INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr);
1645           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr);
1646           ierr = PetscViewerFlush(viewer);
1647         }
1648       }
1649 
1650 
1651       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1652 
1653       if (!mumps->myid) { /* information from the host */
1654         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1655         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1656         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1657         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1658 
1659         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1660         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1661         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1662         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1663         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1664         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1665         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1666         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1667         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1668         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1669         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1670         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1671         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1672         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1673         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1674         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1675         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1676         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1677         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1678         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1679         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1680         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1681         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1682         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1683         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1684         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1685         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1686         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1687         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1688       }
1689     }
1690   }
1691   PetscFunctionReturn(0);
1692 }
1693 
1694 #undef __FUNCT__
1695 #define __FUNCT__ "MatGetInfo_MUMPS"
1696 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1697 {
1698   Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1699 
1700   PetscFunctionBegin;
1701   info->block_size        = 1.0;
1702   info->nz_allocated      = mumps->id.INFOG(20);
1703   info->nz_used           = mumps->id.INFOG(20);
1704   info->nz_unneeded       = 0.0;
1705   info->assemblies        = 0.0;
1706   info->mallocs           = 0.0;
1707   info->memory            = 0.0;
1708   info->fill_ratio_given  = 0;
1709   info->fill_ratio_needed = 0;
1710   info->factor_mallocs    = 0;
1711   PetscFunctionReturn(0);
1712 }
1713 
1714 /* -------------------------------------------------------------------------------------------*/
1715 #undef __FUNCT__
1716 #define __FUNCT__ "MatMumpsSetSchurIndices_MUMPS"
1717 PetscErrorCode MatMumpsSetSchurIndices_MUMPS(Mat F,PetscInt size,PetscInt idxs[])
1718 {
1719   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1720   PetscErrorCode ierr;
1721 
1722   PetscFunctionBegin;
1723   if (mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS parallel Schur complements not yet supported from PETSc\n");
1724   if (mumps->id.size_schur != size) {
1725     ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
1726     mumps->id.size_schur = size;
1727     mumps->id.schur_lld = size;
1728     ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr);
1729   }
1730   ierr = PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));CHKERRQ(ierr);
1731   if (F->factortype == MAT_FACTOR_LU) {
1732     mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */
1733   } else {
1734     mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */
1735   }
1736   /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */
1737   mumps->id.ICNTL(26) = -1;
1738   PetscFunctionReturn(0);
1739 }
1740 
1741 #undef __FUNCT__
1742 #define __FUNCT__ "MatMumpsSetSchurIndices"
1743 /*@
1744   MatMumpsSetSchurIndices - Set indices defining the Schur complement that MUMPS will compute during the factorization steps
1745 
1746    Logically Collective on Mat
1747 
1748    Input Parameters:
1749 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1750 .  size - size of the Schur complement indices
1751 -  idxs[] - array of Schur complement indices
1752 
1753    Notes:
1754    The user has to free the array idxs[] since the indices are copied by the routine.
1755    MUMPS Schur complement mode is currently implemented for sequential matrices.
1756 
1757    Level: advanced
1758 
1759    References: MUMPS Users' Guide
1760 
1761 .seealso: MatGetFactor(), MatMumpsCreateSchurComplement(), MatMumpsGetSchurComplement()
1762 @*/
1763 PetscErrorCode MatMumpsSetSchurIndices(Mat F,PetscInt size,PetscInt idxs[])
1764 {
1765   PetscErrorCode ierr;
1766 
1767   PetscFunctionBegin;
1768   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1769   if (size) PetscValidIntPointer(idxs,3);
1770   ierr = PetscTryMethod(F,"MatMumpsSetSchurIndices_C",(Mat,PetscInt,PetscInt[]),(F,size,idxs));CHKERRQ(ierr);
1771   PetscFunctionReturn(0);
1772 }
1773 
1774 /* -------------------------------------------------------------------------------------------*/
1775 #undef __FUNCT__
1776 #define __FUNCT__ "MatMumpsCreateSchurComplement_MUMPS"
1777 PetscErrorCode MatMumpsCreateSchurComplement_MUMPS(Mat F,Mat* S)
1778 {
1779   Mat            St;
1780   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1781   PetscScalar    *array;
1782 #if defined(PETSC_USE_COMPLEX)
1783   PetscScalar    im = PetscSqrtScalar((PetscScalar)-1.0);
1784 #endif
1785   PetscErrorCode ierr;
1786 
1787   PetscFunctionBegin;
1788   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1789   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1790   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1791 
1792   ierr = MatCreate(PetscObjectComm((PetscObject)F),&St);CHKERRQ(ierr);
1793   ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr);
1794   ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr);
1795   ierr = MatSetUp(St);CHKERRQ(ierr);
1796   ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr);
1797   if (!mumps->sym) { /* MUMPS always return a full matrix */
1798     if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1799       PetscInt i,j,N=mumps->id.size_schur;
1800       for (i=0;i<N;i++) {
1801         for (j=0;j<N;j++) {
1802 #if !defined(PETSC_USE_COMPLEX)
1803           PetscScalar val = mumps->id.schur[i*N+j];
1804 #else
1805           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1806 #endif
1807           array[j*N+i] = val;
1808         }
1809       }
1810     } else { /* stored by columns */
1811       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1812     }
1813   } else { /* either full or lower-triangular (not packed) */
1814     if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */
1815       PetscInt i,j,N=mumps->id.size_schur;
1816       for (i=0;i<N;i++) {
1817         for (j=i;j<N;j++) {
1818 #if !defined(PETSC_USE_COMPLEX)
1819           PetscScalar val = mumps->id.schur[i*N+j];
1820 #else
1821           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1822 #endif
1823           array[i*N+j] = val;
1824           array[j*N+i] = val;
1825         }
1826       }
1827     } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */
1828       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1829     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
1830       PetscInt i,j,N=mumps->id.size_schur;
1831       for (i=0;i<N;i++) {
1832         for (j=0;j<i+1;j++) {
1833 #if !defined(PETSC_USE_COMPLEX)
1834           PetscScalar val = mumps->id.schur[i*N+j];
1835 #else
1836           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1837 #endif
1838           array[i*N+j] = val;
1839           array[j*N+i] = val;
1840         }
1841       }
1842     }
1843   }
1844   ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr);
1845   *S = St;
1846   PetscFunctionReturn(0);
1847 }
1848 
1849 #undef __FUNCT__
1850 #define __FUNCT__ "MatMumpsCreateSchurComplement"
1851 /*@
1852   MatMumpsCreateSchurComplement - Create a Schur complement matrix object using Schur data computed by MUMPS during the factorization step
1853 
1854    Logically Collective on Mat
1855 
1856    Input Parameters:
1857 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1858 .  *S - location where to return the Schur complement (MATDENSE)
1859 
1860    Notes:
1861    MUMPS Schur complement mode is currently implemented for sequential matrices.
1862    The routine provides a copy of the Schur data stored within MUMPS data strutures. The caller must destroy the object when it is no longer needed.
1863    If MatMumpsInvertSchurComplement has been called, the routine gets back the inverse
1864 
1865    Level: advanced
1866 
1867    References: MUMPS Users' Guide
1868 
1869 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsGetSchurComplement()
1870 @*/
1871 PetscErrorCode MatMumpsCreateSchurComplement(Mat F,Mat* S)
1872 {
1873   PetscErrorCode ierr;
1874 
1875   PetscFunctionBegin;
1876   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1877   ierr = PetscTryMethod(F,"MatMumpsCreateSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1878   PetscFunctionReturn(0);
1879 }
1880 
1881 /* -------------------------------------------------------------------------------------------*/
1882 #undef __FUNCT__
1883 #define __FUNCT__ "MatMumpsGetSchurComplement_MUMPS"
1884 PetscErrorCode MatMumpsGetSchurComplement_MUMPS(Mat F,Mat* S)
1885 {
1886   Mat            St;
1887   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1888   PetscErrorCode ierr;
1889 
1890   PetscFunctionBegin;
1891   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1892   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1893   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1894 
1895   /* It should be the responsibility of the user to handle different ICNTL(19) cases if they want to work with the raw data */
1896   /* should I also add errors when the Schur complement has been already factored? */
1897   ierr = MatCreateSeqDense(PetscObjectComm((PetscObject)F),mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&St);CHKERRQ(ierr);
1898   *S = St;
1899   mumps->schur_restored = PETSC_FALSE;
1900   PetscFunctionReturn(0);
1901 }
1902 
1903 #undef __FUNCT__
1904 #define __FUNCT__ "MatMumpsGetSchurComplement"
1905 /*@
1906   MatMumpsGetSchurComplement - Get a Schur complement matrix object using the current status of the raw Schur data computed by MUMPS during the factorization step
1907 
1908    Logically Collective on Mat
1909 
1910    Input Parameters:
1911 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1912 .  *S - location where to return the Schur complement (MATDENSE)
1913 
1914    Notes:
1915    MUMPS Schur complement mode is currently implemented for sequential matrices.
1916    The routine uses the pointer to the raw data of the Schur Complement stored within MUMPS data strutures. The caller should call MatMumpsRestoreSchurComplement when the object is no longer needed.
1917    If MatMumpsInvertSchurComplement has been called, the routine gets back the inverse
1918 
1919    Level: advanced
1920 
1921    References: MUMPS Users' Guide
1922 
1923 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsRestoreSchurComplement(), MatMumpsCreateSchurComplement()
1924 @*/
1925 PetscErrorCode MatMumpsGetSchurComplement(Mat F,Mat* S)
1926 {
1927   PetscErrorCode ierr;
1928 
1929   PetscFunctionBegin;
1930   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1931   ierr = PetscUseMethod(F,"MatMumpsGetSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1932   PetscFunctionReturn(0);
1933 }
1934 
1935 /* -------------------------------------------------------------------------------------------*/
1936 #undef __FUNCT__
1937 #define __FUNCT__ "MatMumpsRestoreSchurComplement_MUMPS"
1938 PetscErrorCode MatMumpsRestoreSchurComplement_MUMPS(Mat F,Mat* S)
1939 {
1940   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1941   PetscErrorCode ierr;
1942 
1943   PetscFunctionBegin;
1944   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1945   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1946   else if (mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has been already restored");
1947   ierr = MatDestroy(S);CHKERRQ(ierr);
1948   *S = NULL;
1949   mumps->schur_restored = PETSC_TRUE;
1950   PetscFunctionReturn(0);
1951 }
1952 
1953 #undef __FUNCT__
1954 #define __FUNCT__ "MatMumpsRestoreSchurComplement"
1955 /*@
1956   MatMumpsRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to MatGetSchurComplement
1957 
1958    Logically Collective on Mat
1959 
1960    Input Parameters:
1961 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1962 .  *S - location where the Schur complement is stored
1963 
1964    Notes:
1965    MUMPS Schur complement mode is currently implemented for sequential matrices.
1966 
1967    Level: advanced
1968 
1969    References: MUMPS Users' Guide
1970 
1971 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsGetSchurComplement(), MatMumpsCreateSchurComplement()
1972 @*/
1973 PetscErrorCode MatMumpsRestoreSchurComplement(Mat F,Mat* S)
1974 {
1975   PetscErrorCode ierr;
1976 
1977   PetscFunctionBegin;
1978   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1979   ierr = PetscUseMethod(F,"MatMumpsRestoreSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 /* -------------------------------------------------------------------------------------------*/
1984 #undef __FUNCT__
1985 #define __FUNCT__ "MatMumpsInvertSchurComplement_MUMPS"
1986 PetscErrorCode MatMumpsInvertSchurComplement_MUMPS(Mat F)
1987 {
1988   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1989   PetscErrorCode ierr;
1990 
1991   PetscFunctionBegin;
1992   if (!mumps->id.ICNTL(19)) { /* do nothing */
1993     PetscFunctionReturn(0);
1994   }
1995   if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1996   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1997   ierr = MatMumpsInvertSchur_Private(mumps);CHKERRQ(ierr);
1998   PetscFunctionReturn(0);
1999 }
2000 
2001 #undef __FUNCT__
2002 #define __FUNCT__ "MatMumpsInvertSchurComplement"
2003 /*@
2004   MatMumpsInvertSchurComplement - Invert the raw Schur data computed by MUMPS during the factorization step
2005 
2006    Logically Collective on Mat
2007 
2008    Input Parameters:
2009 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2010 
2011    Notes:
2012    MUMPS Schur complement mode is currently implemented for sequential matrices.
2013    The routine uses the pointer to the raw data of the Schur Complement stored within MUMPS data strutures.
2014 
2015    Level: advanced
2016 
2017    References: MUMPS Users' Guide
2018 
2019 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2020 @*/
2021 PetscErrorCode MatMumpsInvertSchurComplement(Mat F)
2022 {
2023   PetscErrorCode ierr;
2024 
2025   PetscFunctionBegin;
2026   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2027   ierr = PetscTryMethod(F,"MatMumpsInvertSchurComplement_C",(Mat),(F));CHKERRQ(ierr);
2028   PetscFunctionReturn(0);
2029 }
2030 
2031 /* -------------------------------------------------------------------------------------------*/
2032 #undef __FUNCT__
2033 #define __FUNCT__ "MatMumpsSolveSchurComplement_MUMPS"
2034 PetscErrorCode MatMumpsSolveSchurComplement_MUMPS(Mat F, Vec rhs, Vec sol)
2035 {
2036   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
2037   MumpsScalar    *orhs;
2038   PetscScalar    *osol,*nrhs,*nsol;
2039   PetscInt       orhs_size,osol_size,olrhs_size;
2040   PetscErrorCode ierr;
2041 
2042   PetscFunctionBegin;
2043   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
2044   if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
2045   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
2046 
2047   /* swap pointers */
2048   orhs = mumps->id.redrhs;
2049   olrhs_size = mumps->id.lredrhs;
2050   orhs_size = mumps->sizeredrhs;
2051   osol = mumps->schur_sol;
2052   osol_size = mumps->schur_sizesol;
2053   ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr);
2054   ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr);
2055   mumps->id.redrhs = (MumpsScalar*)nrhs;
2056   ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr);
2057   mumps->id.lredrhs = mumps->sizeredrhs;
2058   mumps->schur_sol = nsol;
2059   ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr);
2060 
2061   /* solve Schur complement */
2062   mumps->id.nrhs = 1;
2063   ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr);
2064   /* restore pointers */
2065   ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr);
2066   ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr);
2067   mumps->id.redrhs = orhs;
2068   mumps->id.lredrhs = olrhs_size;
2069   mumps->sizeredrhs = orhs_size;
2070   mumps->schur_sol = osol;
2071   mumps->schur_sizesol = osol_size;
2072   PetscFunctionReturn(0);
2073 }
2074 
2075 #undef __FUNCT__
2076 #define __FUNCT__ "MatMumpsSolveSchurComplement"
2077 /*@
2078   MatMumpsSolveSchurComplement - Solve the Schur complement system computed by MUMPS during the factorization step
2079 
2080    Logically Collective on Mat
2081 
2082    Input Parameters:
2083 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2084 .  rhs - location where the right hand side of the Schur complement system is stored
2085 -  sol - location where the solution of the Schur complement system has to be returned
2086 
2087    Notes:
2088    MUMPS Schur complement mode is currently implemented for sequential matrices.
2089    The sizes of the vectors should match the size of the Schur complement
2090 
2091    Level: advanced
2092 
2093    References: MUMPS Users' Guide
2094 
2095 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2096 @*/
2097 PetscErrorCode MatMumpsSolveSchurComplement(Mat F, Vec rhs, Vec sol)
2098 {
2099   PetscErrorCode ierr;
2100 
2101   PetscFunctionBegin;
2102   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2103   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
2104   PetscValidHeaderSpecific(sol,VEC_CLASSID,2);
2105   PetscCheckSameComm(F,1,rhs,2);
2106   PetscCheckSameComm(F,1,sol,3);
2107   ierr = PetscUseMethod(F,"MatMumpsSolveSchurComplement_C",(Mat,Vec,Vec),(F,rhs,sol));CHKERRQ(ierr);
2108   PetscFunctionReturn(0);
2109 }
2110 
2111 /* -------------------------------------------------------------------------------------------*/
2112 #undef __FUNCT__
2113 #define __FUNCT__ "MatMumpsSolveSchurComplementTranspose_MUMPS"
2114 PetscErrorCode MatMumpsSolveSchurComplementTranspose_MUMPS(Mat F, Vec rhs, Vec sol)
2115 {
2116   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
2117   MumpsScalar    *orhs;
2118   PetscScalar    *osol,*nrhs,*nsol;
2119   PetscInt       orhs_size,osol_size;
2120   PetscErrorCode ierr;
2121 
2122   PetscFunctionBegin;
2123   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
2124   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
2125   if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
2126 
2127   /* swap pointers */
2128   orhs = mumps->id.redrhs;
2129   orhs_size = mumps->sizeredrhs;
2130   osol = mumps->schur_sol;
2131   osol_size = mumps->schur_sizesol;
2132   ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr);
2133   ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr);
2134   mumps->id.redrhs = (MumpsScalar*)nrhs;
2135   ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr);
2136   mumps->schur_sol = nsol;
2137   ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr);
2138 
2139   /* solve Schur complement */
2140   mumps->id.nrhs = 1;
2141   mumps->id.ICNTL(9) = 0;
2142   ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr);
2143   mumps->id.ICNTL(9) = 1;
2144   /* restore pointers */
2145   ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr);
2146   ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr);
2147   mumps->id.redrhs = orhs;
2148   mumps->sizeredrhs = orhs_size;
2149   mumps->schur_sol = osol;
2150   mumps->schur_sizesol = osol_size;
2151   PetscFunctionReturn(0);
2152 }
2153 
2154 #undef __FUNCT__
2155 #define __FUNCT__ "MatMumpsSolveSchurComplementTranspose"
2156 /*@
2157   MatMumpsSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed by MUMPS during the factorization step
2158 
2159    Logically Collective on Mat
2160 
2161    Input Parameters:
2162 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2163 .  rhs - location where the right hand side of the Schur complement system is stored
2164 -  sol - location where the solution of the Schur complement system has to be returned
2165 
2166    Notes:
2167    MUMPS Schur complement mode is currently implemented for sequential matrices.
2168    The sizes of the vectors should match the size of the Schur complement
2169 
2170    Level: advanced
2171 
2172    References: MUMPS Users' Guide
2173 
2174 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2175 @*/
2176 PetscErrorCode MatMumpsSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
2177 {
2178   PetscErrorCode ierr;
2179 
2180   PetscFunctionBegin;
2181   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2182   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
2183   PetscValidHeaderSpecific(sol,VEC_CLASSID,2);
2184   PetscCheckSameComm(F,1,rhs,2);
2185   PetscCheckSameComm(F,1,sol,3);
2186   ierr = PetscUseMethod(F,"MatMumpsSolveSchurComplementTranspose_C",(Mat,Vec,Vec),(F,rhs,sol));CHKERRQ(ierr);
2187   PetscFunctionReturn(0);
2188 }
2189 
2190 /* -------------------------------------------------------------------------------------------*/
2191 #undef __FUNCT__
2192 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS"
2193 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
2194 {
2195   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2196 
2197   PetscFunctionBegin;
2198   mumps->id.ICNTL(icntl) = ival;
2199   PetscFunctionReturn(0);
2200 }
2201 
2202 #undef __FUNCT__
2203 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS"
2204 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
2205 {
2206   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2207 
2208   PetscFunctionBegin;
2209   *ival = mumps->id.ICNTL(icntl);
2210   PetscFunctionReturn(0);
2211 }
2212 
2213 #undef __FUNCT__
2214 #define __FUNCT__ "MatMumpsSetIcntl"
2215 /*@
2216   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
2217 
2218    Logically Collective on Mat
2219 
2220    Input Parameters:
2221 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2222 .  icntl - index of MUMPS parameter array ICNTL()
2223 -  ival - value of MUMPS ICNTL(icntl)
2224 
2225   Options Database:
2226 .   -mat_mumps_icntl_<icntl> <ival>
2227 
2228    Level: beginner
2229 
2230    References: MUMPS Users' Guide
2231 
2232 .seealso: MatGetFactor()
2233 @*/
2234 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
2235 {
2236   PetscErrorCode ierr;
2237 
2238   PetscFunctionBegin;
2239   PetscValidLogicalCollectiveInt(F,icntl,2);
2240   PetscValidLogicalCollectiveInt(F,ival,3);
2241   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
2242   PetscFunctionReturn(0);
2243 }
2244 
2245 #undef __FUNCT__
2246 #define __FUNCT__ "MatMumpsGetIcntl"
2247 /*@
2248   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
2249 
2250    Logically Collective on Mat
2251 
2252    Input Parameters:
2253 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2254 -  icntl - index of MUMPS parameter array ICNTL()
2255 
2256   Output Parameter:
2257 .  ival - value of MUMPS ICNTL(icntl)
2258 
2259    Level: beginner
2260 
2261    References: MUMPS Users' Guide
2262 
2263 .seealso: MatGetFactor()
2264 @*/
2265 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
2266 {
2267   PetscErrorCode ierr;
2268 
2269   PetscFunctionBegin;
2270   PetscValidLogicalCollectiveInt(F,icntl,2);
2271   PetscValidIntPointer(ival,3);
2272   ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2273   PetscFunctionReturn(0);
2274 }
2275 
2276 /* -------------------------------------------------------------------------------------------*/
2277 #undef __FUNCT__
2278 #define __FUNCT__ "MatMumpsSetCntl_MUMPS"
2279 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
2280 {
2281   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2282 
2283   PetscFunctionBegin;
2284   mumps->id.CNTL(icntl) = val;
2285   PetscFunctionReturn(0);
2286 }
2287 
2288 #undef __FUNCT__
2289 #define __FUNCT__ "MatMumpsGetCntl_MUMPS"
2290 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
2291 {
2292   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2293 
2294   PetscFunctionBegin;
2295   *val = mumps->id.CNTL(icntl);
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 #undef __FUNCT__
2300 #define __FUNCT__ "MatMumpsSetCntl"
2301 /*@
2302   MatMumpsSetCntl - Set MUMPS parameter CNTL()
2303 
2304    Logically Collective on Mat
2305 
2306    Input Parameters:
2307 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2308 .  icntl - index of MUMPS parameter array CNTL()
2309 -  val - value of MUMPS CNTL(icntl)
2310 
2311   Options Database:
2312 .   -mat_mumps_cntl_<icntl> <val>
2313 
2314    Level: beginner
2315 
2316    References: MUMPS Users' Guide
2317 
2318 .seealso: MatGetFactor()
2319 @*/
2320 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
2321 {
2322   PetscErrorCode ierr;
2323 
2324   PetscFunctionBegin;
2325   PetscValidLogicalCollectiveInt(F,icntl,2);
2326   PetscValidLogicalCollectiveReal(F,val,3);
2327   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
2328   PetscFunctionReturn(0);
2329 }
2330 
2331 #undef __FUNCT__
2332 #define __FUNCT__ "MatMumpsGetCntl"
2333 /*@
2334   MatMumpsGetCntl - Get MUMPS parameter CNTL()
2335 
2336    Logically Collective on Mat
2337 
2338    Input Parameters:
2339 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2340 -  icntl - index of MUMPS parameter array CNTL()
2341 
2342   Output Parameter:
2343 .  val - value of MUMPS CNTL(icntl)
2344 
2345    Level: beginner
2346 
2347    References: MUMPS Users' Guide
2348 
2349 .seealso: MatGetFactor()
2350 @*/
2351 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
2352 {
2353   PetscErrorCode ierr;
2354 
2355   PetscFunctionBegin;
2356   PetscValidLogicalCollectiveInt(F,icntl,2);
2357   PetscValidRealPointer(val,3);
2358   ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2359   PetscFunctionReturn(0);
2360 }
2361 
2362 #undef __FUNCT__
2363 #define __FUNCT__ "MatMumpsGetInfo_MUMPS"
2364 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
2365 {
2366   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2367 
2368   PetscFunctionBegin;
2369   *info = mumps->id.INFO(icntl);
2370   PetscFunctionReturn(0);
2371 }
2372 
2373 #undef __FUNCT__
2374 #define __FUNCT__ "MatMumpsGetInfog_MUMPS"
2375 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
2376 {
2377   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2378 
2379   PetscFunctionBegin;
2380   *infog = mumps->id.INFOG(icntl);
2381   PetscFunctionReturn(0);
2382 }
2383 
2384 #undef __FUNCT__
2385 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS"
2386 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
2387 {
2388   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2389 
2390   PetscFunctionBegin;
2391   *rinfo = mumps->id.RINFO(icntl);
2392   PetscFunctionReturn(0);
2393 }
2394 
2395 #undef __FUNCT__
2396 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS"
2397 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
2398 {
2399   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2400 
2401   PetscFunctionBegin;
2402   *rinfog = mumps->id.RINFOG(icntl);
2403   PetscFunctionReturn(0);
2404 }
2405 
2406 #undef __FUNCT__
2407 #define __FUNCT__ "MatMumpsGetInfo"
2408 /*@
2409   MatMumpsGetInfo - Get MUMPS parameter INFO()
2410 
2411    Logically Collective on Mat
2412 
2413    Input Parameters:
2414 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2415 -  icntl - index of MUMPS parameter array INFO()
2416 
2417   Output Parameter:
2418 .  ival - value of MUMPS INFO(icntl)
2419 
2420    Level: beginner
2421 
2422    References: MUMPS Users' Guide
2423 
2424 .seealso: MatGetFactor()
2425 @*/
2426 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
2427 {
2428   PetscErrorCode ierr;
2429 
2430   PetscFunctionBegin;
2431   PetscValidIntPointer(ival,3);
2432   ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2433   PetscFunctionReturn(0);
2434 }
2435 
2436 #undef __FUNCT__
2437 #define __FUNCT__ "MatMumpsGetInfog"
2438 /*@
2439   MatMumpsGetInfog - Get MUMPS parameter INFOG()
2440 
2441    Logically Collective on Mat
2442 
2443    Input Parameters:
2444 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2445 -  icntl - index of MUMPS parameter array INFOG()
2446 
2447   Output Parameter:
2448 .  ival - value of MUMPS INFOG(icntl)
2449 
2450    Level: beginner
2451 
2452    References: MUMPS Users' Guide
2453 
2454 .seealso: MatGetFactor()
2455 @*/
2456 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
2457 {
2458   PetscErrorCode ierr;
2459 
2460   PetscFunctionBegin;
2461   PetscValidIntPointer(ival,3);
2462   ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2463   PetscFunctionReturn(0);
2464 }
2465 
2466 #undef __FUNCT__
2467 #define __FUNCT__ "MatMumpsGetRinfo"
2468 /*@
2469   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
2470 
2471    Logically Collective on Mat
2472 
2473    Input Parameters:
2474 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2475 -  icntl - index of MUMPS parameter array RINFO()
2476 
2477   Output Parameter:
2478 .  val - value of MUMPS RINFO(icntl)
2479 
2480    Level: beginner
2481 
2482    References: MUMPS Users' Guide
2483 
2484 .seealso: MatGetFactor()
2485 @*/
2486 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
2487 {
2488   PetscErrorCode ierr;
2489 
2490   PetscFunctionBegin;
2491   PetscValidRealPointer(val,3);
2492   ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2493   PetscFunctionReturn(0);
2494 }
2495 
2496 #undef __FUNCT__
2497 #define __FUNCT__ "MatMumpsGetRinfog"
2498 /*@
2499   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
2500 
2501    Logically Collective on Mat
2502 
2503    Input Parameters:
2504 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2505 -  icntl - index of MUMPS parameter array RINFOG()
2506 
2507   Output Parameter:
2508 .  val - value of MUMPS RINFOG(icntl)
2509 
2510    Level: beginner
2511 
2512    References: MUMPS Users' Guide
2513 
2514 .seealso: MatGetFactor()
2515 @*/
2516 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
2517 {
2518   PetscErrorCode ierr;
2519 
2520   PetscFunctionBegin;
2521   PetscValidRealPointer(val,3);
2522   ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2523   PetscFunctionReturn(0);
2524 }
2525 
2526 /*MC
2527   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
2528   distributed and sequential matrices via the external package MUMPS.
2529 
2530   Works with MATAIJ and MATSBAIJ matrices
2531 
2532   Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with MUMPS
2533 
2534   Use -pc_type cholesky or lu -pc_factor_mat_solver_package mumps to us this direct solver
2535 
2536   Options Database Keys:
2537 +  -mat_mumps_icntl_1 <6>: ICNTL(1): output stream for error messages (None)
2538 .  -mat_mumps_icntl_2 <0>: ICNTL(2): output stream for diagnostic printing, statistics, and warning (None)
2539 .  -mat_mumps_icntl_3 <0>: ICNTL(3): output stream for global information, collected on the host (None)
2540 .  -mat_mumps_icntl_4 <0>: ICNTL(4): level of printing (0 to 4) (None)
2541 .  -mat_mumps_icntl_6 <7>: ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) (None)
2542 .  -mat_mumps_icntl_7 <7>: ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis (None)
2543 .  -mat_mumps_icntl_8 <77>: ICNTL(8): scaling strategy (-2 to 8 or 77) (None)
2544 .  -mat_mumps_icntl_10 <0>: ICNTL(10): max num of refinements (None)
2545 .  -mat_mumps_icntl_11 <0>: ICNTL(11): statistics related to an error analysis (via -ksp_view) (None)
2546 .  -mat_mumps_icntl_12 <1>: ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) (None)
2547 .  -mat_mumps_icntl_13 <0>: ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting (None)
2548 .  -mat_mumps_icntl_14 <20>: ICNTL(14): percentage increase in the estimated working space (None)
2549 .  -mat_mumps_icntl_19 <0>: ICNTL(19): computes the Schur complement (None)
2550 .  -mat_mumps_icntl_22 <0>: ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) (None)
2551 .  -mat_mumps_icntl_23 <0>: ICNTL(23): max size of the working memory (MB) that can allocate per processor (None)
2552 .  -mat_mumps_icntl_24 <0>: ICNTL(24): detection of null pivot rows (0 or 1) (None)
2553 .  -mat_mumps_icntl_25 <0>: ICNTL(25): compute a solution of a deficient matrix and a null space basis (None)
2554 .  -mat_mumps_icntl_26 <0>: ICNTL(26): drives the solution phase if a Schur complement matrix (None)
2555 .  -mat_mumps_icntl_28 <1>: ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering (None)
2556 .  -mat_mumps_icntl_29 <0>: ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis (None)
2557 .  -mat_mumps_icntl_30 <0>: ICNTL(30): compute user-specified set of entries in inv(A) (None)
2558 .  -mat_mumps_icntl_31 <0>: ICNTL(31): indicates which factors may be discarded during factorization (None)
2559 .  -mat_mumps_icntl_33 <0>: ICNTL(33): compute determinant (None)
2560 .  -mat_mumps_cntl_1 <0.01>: CNTL(1): relative pivoting threshold (None)
2561 .  -mat_mumps_cntl_2 <1.49012e-08>: CNTL(2): stopping criterion of refinement (None)
2562 .  -mat_mumps_cntl_3 <0>: CNTL(3): absolute pivoting threshold (None)
2563 .  -mat_mumps_cntl_4 <-1>: CNTL(4): value for static pivoting (None)
2564 -  -mat_mumps_cntl_5 <0>: CNTL(5): fixation for null pivots (None)
2565 
2566   Level: beginner
2567 
2568 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
2569 
2570 M*/
2571 
2572 #undef __FUNCT__
2573 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
2574 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
2575 {
2576   PetscFunctionBegin;
2577   *type = MATSOLVERMUMPS;
2578   PetscFunctionReturn(0);
2579 }
2580 
2581 /* MatGetFactor for Seq and MPI AIJ matrices */
2582 #undef __FUNCT__
2583 #define __FUNCT__ "MatGetFactor_aij_mumps"
2584 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
2585 {
2586   Mat            B;
2587   PetscErrorCode ierr;
2588   Mat_MUMPS      *mumps;
2589   PetscBool      isSeqAIJ;
2590 
2591   PetscFunctionBegin;
2592   /* Create the factorization matrix */
2593   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
2594   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2595   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2596   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2597   if (isSeqAIJ) {
2598     ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
2599   } else {
2600     ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
2601   }
2602 
2603   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2604 
2605   B->ops->view        = MatView_MUMPS;
2606   B->ops->getinfo     = MatGetInfo_MUMPS;
2607   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2608 
2609   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2610   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2611   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2612   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2613   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2614 
2615   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2616   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2617   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2618   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2619 
2620   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2621   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2622   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2623   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2624   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2625   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2626   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2627 
2628   if (ftype == MAT_FACTOR_LU) {
2629     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2630     B->factortype            = MAT_FACTOR_LU;
2631     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
2632     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
2633     mumps->sym = 0;
2634   } else {
2635     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2636     B->factortype                  = MAT_FACTOR_CHOLESKY;
2637     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
2638     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
2639 #if defined(PETSC_USE_COMPLEX)
2640     mumps->sym = 2;
2641 #else
2642     if (A->spd_set && A->spd) mumps->sym = 1;
2643     else                      mumps->sym = 2;
2644 #endif
2645   }
2646 
2647   mumps->isAIJ    = PETSC_TRUE;
2648   mumps->Destroy  = B->ops->destroy;
2649   B->ops->destroy = MatDestroy_MUMPS;
2650   B->spptr        = (void*)mumps;
2651 
2652   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2653 
2654   *F = B;
2655   PetscFunctionReturn(0);
2656 }
2657 
2658 /* MatGetFactor for Seq and MPI SBAIJ matrices */
2659 #undef __FUNCT__
2660 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
2661 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
2662 {
2663   Mat            B;
2664   PetscErrorCode ierr;
2665   Mat_MUMPS      *mumps;
2666   PetscBool      isSeqSBAIJ;
2667 
2668   PetscFunctionBegin;
2669   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
2670   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
2671   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
2672   /* Create the factorization matrix */
2673   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2674   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2675   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2676   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2677   if (isSeqSBAIJ) {
2678     ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
2679 
2680     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
2681   } else {
2682     ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
2683 
2684     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
2685   }
2686 
2687   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2688   B->ops->view                   = MatView_MUMPS;
2689   B->ops->getdiagonal            = MatGetDiagonal_MUMPS;
2690 
2691   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2692   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2693   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2694   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2695   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2696 
2697   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2698   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2699   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2700   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2701 
2702   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2703   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2704   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2705   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2706   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2707   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2708   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2709 
2710   B->factortype = MAT_FACTOR_CHOLESKY;
2711 #if defined(PETSC_USE_COMPLEX)
2712   mumps->sym = 2;
2713 #else
2714   if (A->spd_set && A->spd) mumps->sym = 1;
2715   else                      mumps->sym = 2;
2716 #endif
2717 
2718   mumps->isAIJ    = PETSC_FALSE;
2719   mumps->Destroy  = B->ops->destroy;
2720   B->ops->destroy = MatDestroy_MUMPS;
2721   B->spptr        = (void*)mumps;
2722 
2723   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2724 
2725   *F = B;
2726   PetscFunctionReturn(0);
2727 }
2728 
2729 #undef __FUNCT__
2730 #define __FUNCT__ "MatGetFactor_baij_mumps"
2731 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
2732 {
2733   Mat            B;
2734   PetscErrorCode ierr;
2735   Mat_MUMPS      *mumps;
2736   PetscBool      isSeqBAIJ;
2737 
2738   PetscFunctionBegin;
2739   /* Create the factorization matrix */
2740   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
2741   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2742   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2743   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2744   if (isSeqBAIJ) {
2745     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr);
2746   } else {
2747     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
2748   }
2749 
2750   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2751   if (ftype == MAT_FACTOR_LU) {
2752     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
2753     B->factortype            = MAT_FACTOR_LU;
2754     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
2755     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
2756     mumps->sym = 0;
2757   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
2758 
2759   B->ops->view        = MatView_MUMPS;
2760   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2761 
2762   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2763   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2764   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2765   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2766   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2767 
2768   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2769   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2770   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2771   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2772 
2773   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2774   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2775   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2776   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2777   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2778   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2779   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2780 
2781   mumps->isAIJ    = PETSC_TRUE;
2782   mumps->Destroy  = B->ops->destroy;
2783   B->ops->destroy = MatDestroy_MUMPS;
2784   B->spptr        = (void*)mumps;
2785 
2786   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2787 
2788   *F = B;
2789   PetscFunctionReturn(0);
2790 }
2791 
2792 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*);
2793 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
2794 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat,MatFactorType,Mat*);
2795 
2796 #undef __FUNCT__
2797 #define __FUNCT__ "MatSolverPackageRegister_MUMPS"
2798 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void)
2799 {
2800   PetscErrorCode ierr;
2801 
2802   PetscFunctionBegin;
2803   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2804   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2805   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2806   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2807   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2808   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2809   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2810   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2811   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2812   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2813   PetscFunctionReturn(0);
2814 }
2815 
2816