xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision bebe2cf65d55febe21a5af8db2bd2e168caaa2e7)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 #include <petscblaslapack.h>
9 
10 EXTERN_C_BEGIN
11 #if defined(PETSC_USE_COMPLEX)
12 #if defined(PETSC_USE_REAL_SINGLE)
13 #include <cmumps_c.h>
14 #else
15 #include <zmumps_c.h>
16 #endif
17 #else
18 #if defined(PETSC_USE_REAL_SINGLE)
19 #include <smumps_c.h>
20 #else
21 #include <dmumps_c.h>
22 #endif
23 #endif
24 EXTERN_C_END
25 #define JOB_INIT -1
26 #define JOB_FACTSYMBOLIC 1
27 #define JOB_FACTNUMERIC 2
28 #define JOB_SOLVE 3
29 #define JOB_END -2
30 
31 /* calls to MUMPS */
32 #if defined(PETSC_USE_COMPLEX)
33 #if defined(PETSC_USE_REAL_SINGLE)
34 #define PetscMUMPS_c cmumps_c
35 #else
36 #define PetscMUMPS_c zmumps_c
37 #endif
38 #else
39 #if defined(PETSC_USE_REAL_SINGLE)
40 #define PetscMUMPS_c smumps_c
41 #else
42 #define PetscMUMPS_c dmumps_c
43 #endif
44 #endif
45 
46 /* declare MumpsScalar */
47 #if defined(PETSC_USE_COMPLEX)
48 #if defined(PETSC_USE_REAL_SINGLE)
49 #define MumpsScalar mumps_complex
50 #else
51 #define MumpsScalar mumps_double_complex
52 #endif
53 #else
54 #define MumpsScalar PetscScalar
55 #endif
56 
57 /* macros s.t. indices match MUMPS documentation */
58 #define ICNTL(I) icntl[(I)-1]
59 #define CNTL(I) cntl[(I)-1]
60 #define INFOG(I) infog[(I)-1]
61 #define INFO(I) info[(I)-1]
62 #define RINFOG(I) rinfog[(I)-1]
63 #define RINFO(I) rinfo[(I)-1]
64 
65 typedef struct {
66 #if defined(PETSC_USE_COMPLEX)
67 #if defined(PETSC_USE_REAL_SINGLE)
68   CMUMPS_STRUC_C id;
69 #else
70   ZMUMPS_STRUC_C id;
71 #endif
72 #else
73 #if defined(PETSC_USE_REAL_SINGLE)
74   SMUMPS_STRUC_C id;
75 #else
76   DMUMPS_STRUC_C id;
77 #endif
78 #endif
79 
80   MatStructure matstruc;
81   PetscMPIInt  myid,size;
82   PetscInt     *irn,*jcn,nz,sym;
83   PetscScalar  *val;
84   MPI_Comm     comm_mumps;
85   PetscBool    isAIJ,CleanUpMUMPS;
86   PetscInt     ICNTL9_pre;           /* check if ICNTL(9) is changed from previous MatSolve */
87   VecScatter   scat_rhs, scat_sol;   /* used by MatSolve() */
88   Vec          b_seq,x_seq;
89   PetscInt     ninfo,*info;          /* display INFO */
90   PetscBool    schur_second_solve;
91   PetscInt     sizeredrhs;
92   PetscInt     *schur_pivots;
93   PetscInt     schur_B_lwork;
94   PetscScalar  *schur_work;
95   PetscScalar  *schur_sol;
96   PetscInt     schur_sizesol;
97   PetscBool    schur_restored;
98   PetscBool    schur_factored;
99   PetscBool    schur_inverted;
100 
101   PetscErrorCode (*Destroy)(Mat);
102   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
103 } Mat_MUMPS;
104 
105 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
106 
107 #undef __FUNCT__
108 #define __FUNCT__ "MatMumpsResetSchur_Private"
109 static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
115   ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
116   ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
117   ierr = PetscFree(mumps->schur_pivots);CHKERRQ(ierr);
118   ierr = PetscFree(mumps->schur_work);CHKERRQ(ierr);
119   if (!mumps->schur_restored) {
120     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
121   }
122   mumps->id.size_schur = 0;
123   mumps->id.ICNTL(19) = 0;
124   PetscFunctionReturn(0);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "MatMumpsFactorSchur_Private"
129 static PetscErrorCode MatMumpsFactorSchur_Private(Mat_MUMPS* mumps)
130 {
131   PetscBLASInt   B_N,B_ierr,B_slda;
132   PetscErrorCode ierr;
133 
134   PetscFunctionBegin;
135   if (mumps->schur_factored) {
136     PetscFunctionReturn(0);
137   }
138   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
139   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
140   if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
141     if (!mumps->schur_pivots) {
142       ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr);
143     }
144     ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
145     PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&B_N,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&B_ierr));
146     ierr = PetscFPTrapPop();CHKERRQ(ierr);
147     if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)B_ierr);
148   } else { /* either full or lower-triangular (not packed) */
149     char ord[2];
150     if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
151       sprintf(ord,"L");
152     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
153       sprintf(ord,"U");
154     }
155     if (mumps->id.sym == 2) {
156       if (!mumps->schur_pivots) {
157         PetscScalar  lwork;
158 
159         ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr);
160         mumps->schur_B_lwork=-1;
161         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
162         PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
163         ierr = PetscFPTrapPop();CHKERRQ(ierr);
164         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to SYTRF Lapack routine %d",(int)B_ierr);
165         ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr);
166         ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr);
167       }
168       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
169       PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
170       ierr = PetscFPTrapPop();CHKERRQ(ierr);
171       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRF Lapack routine %d",(int)B_ierr);
172     } else {
173       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
174       PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,&B_ierr));
175       ierr = PetscFPTrapPop();CHKERRQ(ierr);
176       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr);
177     }
178   }
179   mumps->schur_factored = PETSC_TRUE;
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "MatMumpsInvertSchur_Private"
185 static PetscErrorCode MatMumpsInvertSchur_Private(Mat_MUMPS* mumps)
186 {
187   PetscBLASInt   B_N,B_ierr,B_slda;
188   PetscErrorCode ierr;
189 
190   PetscFunctionBegin;
191   ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr);
192   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
193   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
194   if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
195     if (!mumps->schur_work) {
196       PetscScalar lwork;
197 
198       mumps->schur_B_lwork = -1;
199       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
200       PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
201       ierr = PetscFPTrapPop();CHKERRQ(ierr);
202       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to GETRI Lapack routine %d",(int)B_ierr);
203       ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr);
204       ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr);
205     }
206     ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
207     PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
208     ierr = PetscFPTrapPop();CHKERRQ(ierr);
209     if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRI Lapack routine %d",(int)B_ierr);
210   } else { /* either full or lower-triangular (not packed) */
211     char ord[2];
212     if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
213       sprintf(ord,"L");
214     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
215       sprintf(ord,"U");
216     }
217     if (mumps->id.sym == 2) {
218       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
219       PetscStackCallBLAS("LAPACKsytri",LAPACKsytri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&B_ierr));
220       ierr = PetscFPTrapPop();CHKERRQ(ierr);
221       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRI Lapack routine %d",(int)B_ierr);
222     } else {
223       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
224       PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,&B_ierr));
225       ierr = PetscFPTrapPop();CHKERRQ(ierr);
226       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr);
227     }
228   }
229   mumps->schur_inverted = PETSC_TRUE;
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatMumpsSolveSchur_Private"
235 static PetscErrorCode MatMumpsSolveSchur_Private(Mat_MUMPS* mumps, PetscBool sol_in_redrhs)
236 {
237   PetscBLASInt   B_N,B_Nrhs,B_ierr,B_slda,B_rlda;
238   PetscScalar    one=1.,zero=0.;
239   PetscErrorCode ierr;
240 
241   PetscFunctionBegin;
242   ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr);
243   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
244   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
245   ierr = PetscBLASIntCast(mumps->id.nrhs,&B_Nrhs);CHKERRQ(ierr);
246   ierr = PetscBLASIntCast(mumps->id.lredrhs,&B_rlda);CHKERRQ(ierr);
247   if (mumps->schur_inverted) {
248     PetscInt sizesol = B_Nrhs*B_N;
249     if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
250       ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
251       ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr);
252       mumps->schur_sizesol = sizesol;
253     }
254     if (!mumps->sym) {
255       char type[2];
256       if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
257         if (!mumps->id.ICNTL(9)) { /* transpose solve */
258           sprintf(type,"N");
259         } else {
260           sprintf(type,"T");
261         }
262       } else { /* stored by columns */
263         if (!mumps->id.ICNTL(9)) { /* transpose solve */
264           sprintf(type,"T");
265         } else {
266           sprintf(type,"N");
267         }
268       }
269       PetscStackCallBLAS("BLASgemm",BLASgemm_(type,"N",&B_N,&B_Nrhs,&B_N,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
270     } else {
271       char ord[2];
272       if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
273         sprintf(ord,"L");
274       } else { /* ICNTL(19) == 1 lower triangular stored by rows */
275         sprintf(ord,"U");
276       }
277       PetscStackCallBLAS("BLASsymm",BLASsymm_("L",ord,&B_N,&B_Nrhs,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
278     }
279     if (sol_in_redrhs) {
280       ierr = PetscMemcpy(mumps->id.redrhs,mumps->schur_sol,sizesol*sizeof(PetscScalar));CHKERRQ(ierr);
281     }
282   } else { /* Schur complement has not been inverted */
283     MumpsScalar *orhs=NULL;
284 
285     if (!sol_in_redrhs) {
286       PetscInt sizesol = B_Nrhs*B_N;
287       if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
288         ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
289         ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr);
290         mumps->schur_sizesol = sizesol;
291       }
292       orhs = mumps->id.redrhs;
293       ierr = PetscMemcpy(mumps->schur_sol,mumps->id.redrhs,sizesol*sizeof(PetscScalar));CHKERRQ(ierr);
294       mumps->id.redrhs = (MumpsScalar*)mumps->schur_sol;
295     }
296     if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
297       char type[2];
298       if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
299         if (!mumps->id.ICNTL(9)) { /* transpose solve */
300           sprintf(type,"N");
301         } else {
302           sprintf(type,"T");
303         }
304       } else { /* stored by columns */
305         if (!mumps->id.ICNTL(9)) { /* transpose solve */
306           sprintf(type,"T");
307         } else {
308           sprintf(type,"N");
309         }
310       }
311       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
312       PetscStackCallBLAS("LAPACKgetrs",LAPACKgetrs_(type,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
313       ierr = PetscFPTrapPop();CHKERRQ(ierr);
314       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRS Lapack routine %d",(int)B_ierr);
315     } else { /* either full or lower-triangular (not packed) */
316       char ord[2];
317       if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
318         sprintf(ord,"L");
319       } else { /* ICNTL(19) == 1 lower triangular stored by rows */
320         sprintf(ord,"U");
321       }
322       if (mumps->id.sym == 2) {
323         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
324         PetscStackCallBLAS("LAPACKsytrs",LAPACKsytrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
325         ierr = PetscFPTrapPop();CHKERRQ(ierr);
326         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRS Lapack routine %d",(int)B_ierr);
327       } else {
328         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
329         PetscStackCallBLAS("LAPACKpotrs",LAPACKpotrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
330         ierr = PetscFPTrapPop();CHKERRQ(ierr);
331         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRS Lapack routine %d",(int)B_ierr);
332       }
333     }
334     if (!sol_in_redrhs) {
335       mumps->id.redrhs = orhs;
336     }
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "MatMumpsHandleSchur_Private"
343 static PetscErrorCode MatMumpsHandleSchur_Private(Mat_MUMPS* mumps)
344 {
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */
349     PetscFunctionReturn(0);
350   }
351   if (!mumps->schur_second_solve) { /* prepare for the condensation step */
352     /* check if schur complement has been computed
353        We set by default ICNTL(26) == -1 when Schur indices have been provided by the user.
354        According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase
355        Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system.
356        This requires an extra call to PetscMUMPS_c and the computation of the factors for S, handled setting double_schur_solve to PETSC_TRUE */
357     if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) {
358       PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur;
359       /* allocate MUMPS internal array to store reduced right-hand sides */
360       if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) {
361         ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
362         mumps->id.lredrhs = mumps->id.size_schur;
363         ierr = PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);CHKERRQ(ierr);
364         mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs;
365       }
366       mumps->schur_second_solve = PETSC_TRUE;
367       mumps->id.ICNTL(26) = 1; /* condensation phase */
368     }
369   } else { /* prepare for the expansion step */
370     /* solve Schur complement (this should be done by the MUMPS user, so basically us) */
371     ierr = MatMumpsSolveSchur_Private(mumps,PETSC_TRUE);CHKERRQ(ierr);
372     mumps->id.ICNTL(26) = 2; /* expansion phase */
373     PetscMUMPS_c(&mumps->id);
374     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
375     /* restore defaults */
376     mumps->id.ICNTL(26) = -1;
377     mumps->schur_second_solve = PETSC_FALSE;
378   }
379   PetscFunctionReturn(0);
380 }
381 
382 /*
383   MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz]
384 
385   input:
386     A       - matrix in aij,baij or sbaij (bs=1) format
387     shift   - 0: C style output triple; 1: Fortran style output triple.
388     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
389               MAT_REUSE_MATRIX:   only the values in v array are updated
390   output:
391     nnz     - dim of r, c, and v (number of local nonzero entries of A)
392     r, c, v - row and col index, matrix values (matrix triples)
393 
394   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
395   freed with PetscFree((mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
396   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
397 
398  */
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
402 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
403 {
404   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
405   PetscInt       nz,rnz,i,j;
406   PetscErrorCode ierr;
407   PetscInt       *row,*col;
408   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
409 
410   PetscFunctionBegin;
411   *v=aa->a;
412   if (reuse == MAT_INITIAL_MATRIX) {
413     nz   = aa->nz;
414     ai   = aa->i;
415     aj   = aa->j;
416     *nnz = nz;
417     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
418     col  = row + nz;
419 
420     nz = 0;
421     for (i=0; i<M; i++) {
422       rnz = ai[i+1] - ai[i];
423       ajj = aj + ai[i];
424       for (j=0; j<rnz; j++) {
425         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
426       }
427     }
428     *r = row; *c = col;
429   }
430   PetscFunctionReturn(0);
431 }
432 
433 #undef __FUNCT__
434 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
435 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
436 {
437   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
438   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
439   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
440   PetscErrorCode ierr;
441   PetscInt       *row,*col;
442 
443   PetscFunctionBegin;
444   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
445   M = A->rmap->N/bs;
446   *v = aa->a;
447   if (reuse == MAT_INITIAL_MATRIX) {
448     ai   = aa->i; aj = aa->j;
449     nz   = bs2*aa->nz;
450     *nnz = nz;
451     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
452     col  = row + nz;
453 
454     for (i=0; i<M; i++) {
455       ajj = aj + ai[i];
456       rnz = ai[i+1] - ai[i];
457       for (k=0; k<rnz; k++) {
458         for (j=0; j<bs; j++) {
459           for (m=0; m<bs; m++) {
460             row[idx]   = i*bs + m + shift;
461             col[idx++] = bs*(ajj[k]) + j + shift;
462           }
463         }
464       }
465     }
466     *r = row; *c = col;
467   }
468   PetscFunctionReturn(0);
469 }
470 
471 #undef __FUNCT__
472 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
473 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
474 {
475   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
476   PetscInt       nz,rnz,i,j;
477   PetscErrorCode ierr;
478   PetscInt       *row,*col;
479   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
480 
481   PetscFunctionBegin;
482   *v = aa->a;
483   if (reuse == MAT_INITIAL_MATRIX) {
484     nz   = aa->nz;
485     ai   = aa->i;
486     aj   = aa->j;
487     *v   = aa->a;
488     *nnz = nz;
489     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
490     col  = row + nz;
491 
492     nz = 0;
493     for (i=0; i<M; i++) {
494       rnz = ai[i+1] - ai[i];
495       ajj = aj + ai[i];
496       for (j=0; j<rnz; j++) {
497         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
498       }
499     }
500     *r = row; *c = col;
501   }
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
507 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
508 {
509   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
510   PetscInt          nz,rnz,i,j;
511   const PetscScalar *av,*v1;
512   PetscScalar       *val;
513   PetscErrorCode    ierr;
514   PetscInt          *row,*col;
515   Mat_SeqAIJ        *aa=(Mat_SeqAIJ*)A->data;
516 
517   PetscFunctionBegin;
518   ai   =aa->i; aj=aa->j;av=aa->a;
519   adiag=aa->diag;
520   if (reuse == MAT_INITIAL_MATRIX) {
521     /* count nz in the uppper triangular part of A */
522     nz = 0;
523     for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
524     *nnz = nz;
525 
526     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
527     col  = row + nz;
528     val  = (PetscScalar*)(col + nz);
529 
530     nz = 0;
531     for (i=0; i<M; i++) {
532       rnz = ai[i+1] - adiag[i];
533       ajj = aj + adiag[i];
534       v1  = av + adiag[i];
535       for (j=0; j<rnz; j++) {
536         row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
537       }
538     }
539     *r = row; *c = col; *v = val;
540   } else {
541     nz = 0; val = *v;
542     for (i=0; i <M; i++) {
543       rnz = ai[i+1] - adiag[i];
544       ajj = aj + adiag[i];
545       v1  = av + adiag[i];
546       for (j=0; j<rnz; j++) {
547         val[nz++] = v1[j];
548       }
549     }
550   }
551   PetscFunctionReturn(0);
552 }
553 
554 #undef __FUNCT__
555 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
556 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
557 {
558   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
559   PetscErrorCode    ierr;
560   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
561   PetscInt          *row,*col;
562   const PetscScalar *av, *bv,*v1,*v2;
563   PetscScalar       *val;
564   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
565   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
566   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
567 
568   PetscFunctionBegin;
569   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
570   av=aa->a; bv=bb->a;
571 
572   garray = mat->garray;
573 
574   if (reuse == MAT_INITIAL_MATRIX) {
575     nz   = aa->nz + bb->nz;
576     *nnz = nz;
577     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
578     col  = row + nz;
579     val  = (PetscScalar*)(col + nz);
580 
581     *r = row; *c = col; *v = val;
582   } else {
583     row = *r; col = *c; val = *v;
584   }
585 
586   jj = 0; irow = rstart;
587   for (i=0; i<m; i++) {
588     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
589     countA = ai[i+1] - ai[i];
590     countB = bi[i+1] - bi[i];
591     bjj    = bj + bi[i];
592     v1     = av + ai[i];
593     v2     = bv + bi[i];
594 
595     /* A-part */
596     for (j=0; j<countA; j++) {
597       if (reuse == MAT_INITIAL_MATRIX) {
598         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
599       }
600       val[jj++] = v1[j];
601     }
602 
603     /* B-part */
604     for (j=0; j < countB; j++) {
605       if (reuse == MAT_INITIAL_MATRIX) {
606         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
607       }
608       val[jj++] = v2[j];
609     }
610     irow++;
611   }
612   PetscFunctionReturn(0);
613 }
614 
615 #undef __FUNCT__
616 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
617 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
618 {
619   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
620   PetscErrorCode    ierr;
621   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
622   PetscInt          *row,*col;
623   const PetscScalar *av, *bv,*v1,*v2;
624   PetscScalar       *val;
625   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
626   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
627   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
628 
629   PetscFunctionBegin;
630   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
631   av=aa->a; bv=bb->a;
632 
633   garray = mat->garray;
634 
635   if (reuse == MAT_INITIAL_MATRIX) {
636     nz   = aa->nz + bb->nz;
637     *nnz = nz;
638     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
639     col  = row + nz;
640     val  = (PetscScalar*)(col + nz);
641 
642     *r = row; *c = col; *v = val;
643   } else {
644     row = *r; col = *c; val = *v;
645   }
646 
647   jj = 0; irow = rstart;
648   for (i=0; i<m; i++) {
649     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
650     countA = ai[i+1] - ai[i];
651     countB = bi[i+1] - bi[i];
652     bjj    = bj + bi[i];
653     v1     = av + ai[i];
654     v2     = bv + bi[i];
655 
656     /* A-part */
657     for (j=0; j<countA; j++) {
658       if (reuse == MAT_INITIAL_MATRIX) {
659         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
660       }
661       val[jj++] = v1[j];
662     }
663 
664     /* B-part */
665     for (j=0; j < countB; j++) {
666       if (reuse == MAT_INITIAL_MATRIX) {
667         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
668       }
669       val[jj++] = v2[j];
670     }
671     irow++;
672   }
673   PetscFunctionReturn(0);
674 }
675 
676 #undef __FUNCT__
677 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
678 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
679 {
680   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
681   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
682   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
683   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
684   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
685   const PetscInt    bs2=mat->bs2;
686   PetscErrorCode    ierr;
687   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
688   PetscInt          *row,*col;
689   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
690   PetscScalar       *val;
691 
692   PetscFunctionBegin;
693   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
694   if (reuse == MAT_INITIAL_MATRIX) {
695     nz   = bs2*(aa->nz + bb->nz);
696     *nnz = nz;
697     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
698     col  = row + nz;
699     val  = (PetscScalar*)(col + nz);
700 
701     *r = row; *c = col; *v = val;
702   } else {
703     row = *r; col = *c; val = *v;
704   }
705 
706   jj = 0; irow = rstart;
707   for (i=0; i<mbs; i++) {
708     countA = ai[i+1] - ai[i];
709     countB = bi[i+1] - bi[i];
710     ajj    = aj + ai[i];
711     bjj    = bj + bi[i];
712     v1     = av + bs2*ai[i];
713     v2     = bv + bs2*bi[i];
714 
715     idx = 0;
716     /* A-part */
717     for (k=0; k<countA; k++) {
718       for (j=0; j<bs; j++) {
719         for (n=0; n<bs; n++) {
720           if (reuse == MAT_INITIAL_MATRIX) {
721             row[jj] = irow + n + shift;
722             col[jj] = rstart + bs*ajj[k] + j + shift;
723           }
724           val[jj++] = v1[idx++];
725         }
726       }
727     }
728 
729     idx = 0;
730     /* B-part */
731     for (k=0; k<countB; k++) {
732       for (j=0; j<bs; j++) {
733         for (n=0; n<bs; n++) {
734           if (reuse == MAT_INITIAL_MATRIX) {
735             row[jj] = irow + n + shift;
736             col[jj] = bs*garray[bjj[k]] + j + shift;
737           }
738           val[jj++] = v2[idx++];
739         }
740       }
741     }
742     irow += bs;
743   }
744   PetscFunctionReturn(0);
745 }
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
749 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
750 {
751   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
752   PetscErrorCode    ierr;
753   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
754   PetscInt          *row,*col;
755   const PetscScalar *av, *bv,*v1,*v2;
756   PetscScalar       *val;
757   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
758   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
759   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
760 
761   PetscFunctionBegin;
762   ai=aa->i; aj=aa->j; adiag=aa->diag;
763   bi=bb->i; bj=bb->j; garray = mat->garray;
764   av=aa->a; bv=bb->a;
765 
766   rstart = A->rmap->rstart;
767 
768   if (reuse == MAT_INITIAL_MATRIX) {
769     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
770     nzb = 0;    /* num of upper triangular entries in mat->B */
771     for (i=0; i<m; i++) {
772       nza   += (ai[i+1] - adiag[i]);
773       countB = bi[i+1] - bi[i];
774       bjj    = bj + bi[i];
775       for (j=0; j<countB; j++) {
776         if (garray[bjj[j]] > rstart) nzb++;
777       }
778     }
779 
780     nz   = nza + nzb; /* total nz of upper triangular part of mat */
781     *nnz = nz;
782     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
783     col  = row + nz;
784     val  = (PetscScalar*)(col + nz);
785 
786     *r = row; *c = col; *v = val;
787   } else {
788     row = *r; col = *c; val = *v;
789   }
790 
791   jj = 0; irow = rstart;
792   for (i=0; i<m; i++) {
793     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
794     v1     = av + adiag[i];
795     countA = ai[i+1] - adiag[i];
796     countB = bi[i+1] - bi[i];
797     bjj    = bj + bi[i];
798     v2     = bv + bi[i];
799 
800     /* A-part */
801     for (j=0; j<countA; j++) {
802       if (reuse == MAT_INITIAL_MATRIX) {
803         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
804       }
805       val[jj++] = v1[j];
806     }
807 
808     /* B-part */
809     for (j=0; j < countB; j++) {
810       if (garray[bjj[j]] > rstart) {
811         if (reuse == MAT_INITIAL_MATRIX) {
812           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
813         }
814         val[jj++] = v2[j];
815       }
816     }
817     irow++;
818   }
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "MatGetDiagonal_MUMPS"
824 PetscErrorCode MatGetDiagonal_MUMPS(Mat A,Vec v)
825 {
826   PetscFunctionBegin;
827   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: MUMPS factor");
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "MatDestroy_MUMPS"
833 PetscErrorCode MatDestroy_MUMPS(Mat A)
834 {
835   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
836   PetscErrorCode ierr;
837 
838   PetscFunctionBegin;
839   if (mumps->CleanUpMUMPS) {
840     /* Terminate instance, deallocate memories */
841     ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
842     ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
843     ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
844     ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
845     ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
846     ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
847     ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
848     ierr = PetscFree(mumps->info);CHKERRQ(ierr);
849     ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
850     mumps->id.job = JOB_END;
851     PetscMUMPS_c(&mumps->id);
852     ierr = MPI_Comm_free(&(mumps->comm_mumps));CHKERRQ(ierr);
853   }
854   if (mumps->Destroy) {
855     ierr = (mumps->Destroy)(A);CHKERRQ(ierr);
856   }
857   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
858 
859   /* clear composed functions */
860   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
861   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
862   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
863   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
864   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
865 
866   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
867   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
868   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
869   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
870 
871   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetSchurIndices_C",NULL);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsInvertSchurComplement_C",NULL);CHKERRQ(ierr);
873   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsCreateSchurComplement_C",NULL);CHKERRQ(ierr);
874   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetSchurComplement_C",NULL);CHKERRQ(ierr);
875   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsRestoreSchurComplement_C",NULL);CHKERRQ(ierr);
876   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSolveSchurComplement_C",NULL);CHKERRQ(ierr);
877   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSolveSchurComplementTranspose_C",NULL);CHKERRQ(ierr);
878   PetscFunctionReturn(0);
879 }
880 
881 #undef __FUNCT__
882 #define __FUNCT__ "MatSolve_MUMPS"
883 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
884 {
885   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->spptr;
886   PetscScalar      *array;
887   Vec              b_seq;
888   IS               is_iden,is_petsc;
889   PetscErrorCode   ierr;
890   PetscInt         i;
891   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
892 
893   PetscFunctionBegin;
894   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
895   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
896   mumps->id.nrhs = 1;
897   b_seq          = mumps->b_seq;
898   if (mumps->size > 1) {
899     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
900     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
901     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
902     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
903   } else {  /* size == 1 */
904     ierr = VecCopy(b,x);CHKERRQ(ierr);
905     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
906   }
907   if (!mumps->myid) { /* define rhs on the host */
908     mumps->id.nrhs = 1;
909     mumps->id.rhs = (MumpsScalar*)array;
910   }
911 
912   /* handle condensation step of Schur complement (if any) */
913   ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
914 
915   /* solve phase */
916   /*-------------*/
917   mumps->id.job = JOB_SOLVE;
918   PetscMUMPS_c(&mumps->id);
919   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
920 
921   /* handle expansion step of Schur complement (if any) */
922   ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
923 
924   if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
925     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
926       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
927       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
928     }
929     if (!mumps->scat_sol) { /* create scatter scat_sol */
930       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
931       for (i=0; i<mumps->id.lsol_loc; i++) {
932         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
933       }
934       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
935       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
936       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
937       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
938 
939       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
940     }
941 
942     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
943     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
944   }
945   PetscFunctionReturn(0);
946 }
947 
948 #undef __FUNCT__
949 #define __FUNCT__ "MatSolveTranspose_MUMPS"
950 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
951 {
952   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
953   PetscErrorCode ierr;
954 
955   PetscFunctionBegin;
956   mumps->id.ICNTL(9) = 0;
957   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
958   mumps->id.ICNTL(9) = 1;
959   PetscFunctionReturn(0);
960 }
961 
962 #undef __FUNCT__
963 #define __FUNCT__ "MatMatSolve_MUMPS"
964 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
965 {
966   PetscErrorCode ierr;
967   PetscBool      flg;
968   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
969   PetscInt       i,nrhs,M;
970   PetscScalar    *array,*bray;
971 
972   PetscFunctionBegin;
973   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
974   if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
975   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
976   if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
977   if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution");
978 
979   ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr);
980   mumps->id.nrhs = nrhs;
981   mumps->id.lrhs = M;
982 
983   if (mumps->size == 1) {
984     /* copy B to X */
985     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
986     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
987     ierr = PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));CHKERRQ(ierr);
988     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
989     mumps->id.rhs = (MumpsScalar*)array;
990     /* handle condensation step of Schur complement (if any) */
991     ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
992 
993     /* solve phase */
994     /*-------------*/
995     mumps->id.job = JOB_SOLVE;
996     PetscMUMPS_c(&mumps->id);
997     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
998 
999     /* handle expansion step of Schur complement (if any) */
1000     ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
1001     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
1002   } else {  /*--------- parallel case --------*/
1003     PetscInt       lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save;
1004     MumpsScalar    *sol_loc,*sol_loc_save;
1005     IS             is_to,is_from;
1006     PetscInt       k,proc,j,m;
1007     const PetscInt *rstart;
1008     Vec            v_mpi,b_seq,x_seq;
1009     VecScatter     scat_rhs,scat_sol;
1010 
1011     /* create x_seq to hold local solution */
1012     isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */
1013     sol_loc_save  = mumps->id.sol_loc;
1014 
1015     lsol_loc  = mumps->id.INFO(23);
1016     nlsol_loc = nrhs*lsol_loc;     /* length of sol_loc */
1017     ierr = PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);CHKERRQ(ierr);
1018     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1019     mumps->id.isol_loc = isol_loc;
1020 
1021     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr);
1022 
1023     /* copy rhs matrix B into vector v_mpi */
1024     ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr);
1025     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
1026     ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr);
1027     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
1028 
1029     /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */
1030     /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B;
1031       iidx: inverse of idx, will be used by scattering xx_seq -> X       */
1032     ierr = PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);CHKERRQ(ierr);
1033     ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr);
1034     k = 0;
1035     for (proc=0; proc<mumps->size; proc++){
1036       for (j=0; j<nrhs; j++){
1037         for (i=rstart[proc]; i<rstart[proc+1]; i++){
1038           iidx[j*M + i] = k;
1039           idx[k++]      = j*M + i;
1040         }
1041       }
1042     }
1043 
1044     if (!mumps->myid) {
1045       ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr);
1046       ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
1047       ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr);
1048     } else {
1049       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr);
1050       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr);
1051       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr);
1052     }
1053     ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr);
1054     ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1055     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1056     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1057     ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1058 
1059     if (!mumps->myid) { /* define rhs on the host */
1060       ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr);
1061       mumps->id.rhs = (MumpsScalar*)bray;
1062       ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr);
1063     }
1064 
1065     /* solve phase */
1066     /*-------------*/
1067     mumps->id.job = JOB_SOLVE;
1068     PetscMUMPS_c(&mumps->id);
1069     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1070 
1071     /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */
1072     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
1073     ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr);
1074 
1075     /* create scatter scat_sol */
1076     ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr);
1077     ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr);
1078     for (i=0; i<lsol_loc; i++) {
1079       isol_loc[i] -= 1; /* change Fortran style to C style */
1080       idxx[i] = iidx[isol_loc[i]];
1081       for (j=1; j<nrhs; j++){
1082         idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M];
1083       }
1084     }
1085     ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
1086     ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr);
1087     ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1088     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1089     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1090     ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1091     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
1092 
1093     /* free spaces */
1094     mumps->id.sol_loc = sol_loc_save;
1095     mumps->id.isol_loc = isol_loc_save;
1096 
1097     ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr);
1098     ierr = PetscFree2(idx,iidx);CHKERRQ(ierr);
1099     ierr = PetscFree(idxx);CHKERRQ(ierr);
1100     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
1101     ierr = VecDestroy(&v_mpi);CHKERRQ(ierr);
1102     ierr = VecDestroy(&b_seq);CHKERRQ(ierr);
1103     ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr);
1104     ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr);
1105   }
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 #if !defined(PETSC_USE_COMPLEX)
1110 /*
1111   input:
1112    F:        numeric factor
1113   output:
1114    nneg:     total number of negative pivots
1115    nzero:    0
1116    npos:     (global dimension of F) - nneg
1117 */
1118 
1119 #undef __FUNCT__
1120 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
1121 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
1122 {
1123   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1124   PetscErrorCode ierr;
1125   PetscMPIInt    size;
1126 
1127   PetscFunctionBegin;
1128   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
1129   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
1130   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
1131 
1132   if (nneg) *nneg = mumps->id.INFOG(12);
1133   if (nzero || npos) {
1134     if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
1135     if (nzero) *nzero = mumps->id.INFOG(28);
1136     if (npos) *npos   = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
1137   }
1138   PetscFunctionReturn(0);
1139 }
1140 #endif /* !defined(PETSC_USE_COMPLEX) */
1141 
1142 #undef __FUNCT__
1143 #define __FUNCT__ "MatFactorNumeric_MUMPS"
1144 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
1145 {
1146   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->spptr;
1147   PetscErrorCode ierr;
1148   Mat            F_diag;
1149   PetscBool      isMPIAIJ;
1150 
1151   PetscFunctionBegin;
1152   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1153 
1154   /* numerical factorization phase */
1155   /*-------------------------------*/
1156   mumps->id.job = JOB_FACTNUMERIC;
1157   if (!mumps->id.ICNTL(18)) { /* A is centralized */
1158     if (!mumps->myid) {
1159       mumps->id.a = (MumpsScalar*)mumps->val;
1160     }
1161   } else {
1162     mumps->id.a_loc = (MumpsScalar*)mumps->val;
1163   }
1164   PetscMUMPS_c(&mumps->id);
1165   if (mumps->id.INFOG(1) < 0) {
1166     if (mumps->id.INFO(1) == -13) {
1167       if (mumps->id.INFO(2) < 0) {
1168         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2));
1169       } else {
1170         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2));
1171       }
1172     } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2));
1173   }
1174   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
1175 
1176   (F)->assembled        = PETSC_TRUE;
1177   mumps->matstruc       = SAME_NONZERO_PATTERN;
1178   mumps->CleanUpMUMPS   = PETSC_TRUE;
1179   mumps->schur_factored = PETSC_FALSE;
1180   mumps->schur_inverted = PETSC_FALSE;
1181 
1182   /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */
1183   if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3;
1184 
1185   if (mumps->size > 1) {
1186     PetscInt    lsol_loc;
1187     PetscScalar *sol_loc;
1188 
1189     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
1190     if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
1191     else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
1192     F_diag->assembled = PETSC_TRUE;
1193 
1194     /* distributed solution; Create x_seq=sol_loc for repeated use */
1195     if (mumps->x_seq) {
1196       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
1197       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
1198       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
1199     }
1200     lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
1201     ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
1202     mumps->id.lsol_loc = lsol_loc;
1203     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1204     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
1205   }
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 /* Sets MUMPS options from the options database */
1210 #undef __FUNCT__
1211 #define __FUNCT__ "PetscSetMUMPSFromOptions"
1212 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
1213 {
1214   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1215   PetscErrorCode ierr;
1216   PetscInt       icntl,info[40],i,ninfo=40;
1217   PetscBool      flg;
1218 
1219   PetscFunctionBegin;
1220   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
1221   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
1222   if (flg) mumps->id.ICNTL(1) = icntl;
1223   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
1224   if (flg) mumps->id.ICNTL(2) = icntl;
1225   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
1226   if (flg) mumps->id.ICNTL(3) = icntl;
1227 
1228   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
1229   if (flg) mumps->id.ICNTL(4) = icntl;
1230   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
1231 
1232   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
1233   if (flg) mumps->id.ICNTL(6) = icntl;
1234 
1235   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
1236   if (flg) {
1237     if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
1238     else mumps->id.ICNTL(7) = icntl;
1239   }
1240 
1241   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
1242   /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */
1243   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
1244   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
1245   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
1246   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
1247   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
1248   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
1249   if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */
1250     ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
1251   }
1252   /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */
1253   /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */
1254 
1255   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
1256   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
1257   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
1258   if (mumps->id.ICNTL(24)) {
1259     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
1260   }
1261 
1262   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
1263   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
1264   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
1265   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
1266   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
1267   ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr);
1268   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
1269   /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr);  -- not supported by PETSc API */
1270   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
1271 
1272   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
1273   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
1274   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
1275   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
1276   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
1277 
1278   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
1279 
1280   ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr);
1281   if (ninfo) {
1282     if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo);
1283     ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr);
1284     mumps->ninfo = ninfo;
1285     for (i=0; i<ninfo; i++) {
1286       if (info[i] < 0 || info[i]>40) {
1287         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo);
1288       } else {
1289         mumps->info[i] = info[i];
1290       }
1291     }
1292   }
1293 
1294   PetscOptionsEnd();
1295   PetscFunctionReturn(0);
1296 }
1297 
1298 #undef __FUNCT__
1299 #define __FUNCT__ "PetscInitializeMUMPS"
1300 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
1301 {
1302   PetscErrorCode ierr;
1303 
1304   PetscFunctionBegin;
1305   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
1306   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr);
1307   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr);
1308 
1309   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
1310 
1311   mumps->id.job = JOB_INIT;
1312   mumps->id.par = 1;  /* host participates factorizaton and solve */
1313   mumps->id.sym = mumps->sym;
1314   PetscMUMPS_c(&mumps->id);
1315 
1316   mumps->CleanUpMUMPS = PETSC_FALSE;
1317   mumps->scat_rhs     = NULL;
1318   mumps->scat_sol     = NULL;
1319 
1320   /* set PETSc-MUMPS default options - override MUMPS default */
1321   mumps->id.ICNTL(3) = 0;
1322   mumps->id.ICNTL(4) = 0;
1323   if (mumps->size == 1) {
1324     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
1325   } else {
1326     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
1327     mumps->id.ICNTL(20) = 0;   /* rhs is in dense format */
1328     mumps->id.ICNTL(21) = 1;   /* distributed solution */
1329   }
1330 
1331   /* schur */
1332   mumps->id.size_schur      = 0;
1333   mumps->id.listvar_schur   = NULL;
1334   mumps->id.schur           = NULL;
1335   mumps->schur_second_solve = PETSC_FALSE;
1336   mumps->sizeredrhs         = 0;
1337   mumps->schur_pivots       = NULL;
1338   mumps->schur_work         = NULL;
1339   mumps->schur_sol          = NULL;
1340   mumps->schur_sizesol      = 0;
1341   mumps->schur_restored     = PETSC_TRUE;
1342   mumps->schur_factored     = PETSC_FALSE;
1343   mumps->schur_inverted     = PETSC_FALSE;
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
1348 #undef __FUNCT__
1349 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
1350 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1351 {
1352   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1353   PetscErrorCode ierr;
1354   Vec            b;
1355   IS             is_iden;
1356   const PetscInt M = A->rmap->N;
1357 
1358   PetscFunctionBegin;
1359   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1360 
1361   /* Set MUMPS options from the options database */
1362   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1363 
1364   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1365 
1366   /* analysis phase */
1367   /*----------------*/
1368   mumps->id.job = JOB_FACTSYMBOLIC;
1369   mumps->id.n   = M;
1370   switch (mumps->id.ICNTL(18)) {
1371   case 0:  /* centralized assembled matrix input */
1372     if (!mumps->myid) {
1373       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1374       if (mumps->id.ICNTL(6)>1) {
1375         mumps->id.a = (MumpsScalar*)mumps->val;
1376       }
1377       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
1378         /*
1379         PetscBool      flag;
1380         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
1381         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
1382         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
1383          */
1384         if (!mumps->myid) {
1385           const PetscInt *idx;
1386           PetscInt       i,*perm_in;
1387 
1388           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
1389           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
1390 
1391           mumps->id.perm_in = perm_in;
1392           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
1393           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
1394         }
1395       }
1396     }
1397     break;
1398   case 3:  /* distributed assembled matrix input (size>1) */
1399     mumps->id.nz_loc = mumps->nz;
1400     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1401     if (mumps->id.ICNTL(6)>1) {
1402       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1403     }
1404     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1405     if (!mumps->myid) {
1406       ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr);
1407       ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr);
1408     } else {
1409       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1410       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1411     }
1412     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1413     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1414     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1415     ierr = VecDestroy(&b);CHKERRQ(ierr);
1416     break;
1417   }
1418   PetscMUMPS_c(&mumps->id);
1419   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1420 
1421   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1422   F->ops->solve           = MatSolve_MUMPS;
1423   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1424   F->ops->matsolve        = MatMatSolve_MUMPS;
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 /* Note the Petsc r and c permutations are ignored */
1429 #undef __FUNCT__
1430 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
1431 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1432 {
1433   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1434   PetscErrorCode ierr;
1435   Vec            b;
1436   IS             is_iden;
1437   const PetscInt M = A->rmap->N;
1438 
1439   PetscFunctionBegin;
1440   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1441 
1442   /* Set MUMPS options from the options database */
1443   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1444 
1445   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1446 
1447   /* analysis phase */
1448   /*----------------*/
1449   mumps->id.job = JOB_FACTSYMBOLIC;
1450   mumps->id.n   = M;
1451   switch (mumps->id.ICNTL(18)) {
1452   case 0:  /* centralized assembled matrix input */
1453     if (!mumps->myid) {
1454       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1455       if (mumps->id.ICNTL(6)>1) {
1456         mumps->id.a = (MumpsScalar*)mumps->val;
1457       }
1458     }
1459     break;
1460   case 3:  /* distributed assembled matrix input (size>1) */
1461     mumps->id.nz_loc = mumps->nz;
1462     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1463     if (mumps->id.ICNTL(6)>1) {
1464       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1465     }
1466     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1467     if (!mumps->myid) {
1468       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1469       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1470     } else {
1471       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1472       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1473     }
1474     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1475     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1476     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1477     ierr = VecDestroy(&b);CHKERRQ(ierr);
1478     break;
1479   }
1480   PetscMUMPS_c(&mumps->id);
1481   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1482 
1483   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1484   F->ops->solve           = MatSolve_MUMPS;
1485   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1486   PetscFunctionReturn(0);
1487 }
1488 
1489 /* Note the Petsc r permutation and factor info are ignored */
1490 #undef __FUNCT__
1491 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
1492 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1493 {
1494   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1495   PetscErrorCode ierr;
1496   Vec            b;
1497   IS             is_iden;
1498   const PetscInt M = A->rmap->N;
1499 
1500   PetscFunctionBegin;
1501   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1502 
1503   /* Set MUMPS options from the options database */
1504   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1505 
1506   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1507 
1508   /* analysis phase */
1509   /*----------------*/
1510   mumps->id.job = JOB_FACTSYMBOLIC;
1511   mumps->id.n   = M;
1512   switch (mumps->id.ICNTL(18)) {
1513   case 0:  /* centralized assembled matrix input */
1514     if (!mumps->myid) {
1515       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1516       if (mumps->id.ICNTL(6)>1) {
1517         mumps->id.a = (MumpsScalar*)mumps->val;
1518       }
1519     }
1520     break;
1521   case 3:  /* distributed assembled matrix input (size>1) */
1522     mumps->id.nz_loc = mumps->nz;
1523     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1524     if (mumps->id.ICNTL(6)>1) {
1525       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1526     }
1527     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1528     if (!mumps->myid) {
1529       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1530       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1531     } else {
1532       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1533       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1534     }
1535     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1536     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1537     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1538     ierr = VecDestroy(&b);CHKERRQ(ierr);
1539     break;
1540   }
1541   PetscMUMPS_c(&mumps->id);
1542   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1543 
1544   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1545   F->ops->solve                 = MatSolve_MUMPS;
1546   F->ops->solvetranspose        = MatSolve_MUMPS;
1547   F->ops->matsolve              = MatMatSolve_MUMPS;
1548 #if defined(PETSC_USE_COMPLEX)
1549   F->ops->getinertia = NULL;
1550 #else
1551   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1552 #endif
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 #undef __FUNCT__
1557 #define __FUNCT__ "MatView_MUMPS"
1558 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1559 {
1560   PetscErrorCode    ierr;
1561   PetscBool         iascii;
1562   PetscViewerFormat format;
1563   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1564 
1565   PetscFunctionBegin;
1566   /* check if matrix is mumps type */
1567   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1568 
1569   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1570   if (iascii) {
1571     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1572     if (format == PETSC_VIEWER_ASCII_INFO) {
1573       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1574       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1575       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1576       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1577       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1578       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1579       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1580       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1581       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1582       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1583       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1584       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1585       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1586       if (mumps->id.ICNTL(11)>0) {
1587         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1588         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1589         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1590         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1591         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1592         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1593       }
1594       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1595       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1596       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1597       /* ICNTL(15-17) not used */
1598       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1599       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1600       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1601       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1602       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1603       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1604 
1605       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1606       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1607       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1608       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1609       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1610       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1611 
1612       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1613       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1614       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1615 
1616       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1617       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1618       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1619       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1620       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1621 
1622       /* infomation local to each processor */
1623       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1624       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
1625       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1626       ierr = PetscViewerFlush(viewer);
1627       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1628       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1629       ierr = PetscViewerFlush(viewer);
1630       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1631       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1632       ierr = PetscViewerFlush(viewer);
1633 
1634       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1635       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1636       ierr = PetscViewerFlush(viewer);
1637 
1638       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1639       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1640       ierr = PetscViewerFlush(viewer);
1641 
1642       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1643       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1644       ierr = PetscViewerFlush(viewer);
1645 
1646       if (mumps->ninfo && mumps->ninfo <= 40){
1647         PetscInt i;
1648         for (i=0; i<mumps->ninfo; i++){
1649           ierr = PetscViewerASCIIPrintf(viewer, "  INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr);
1650           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr);
1651           ierr = PetscViewerFlush(viewer);
1652         }
1653       }
1654 
1655 
1656       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
1657 
1658       if (!mumps->myid) { /* information from the host */
1659         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1660         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1661         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1662         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1663 
1664         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1665         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1666         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1667         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1668         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1669         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1670         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1671         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1672         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1673         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1674         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1675         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1676         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1677         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1678         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1679         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1680         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1681         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1682         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1683         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1684         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1685         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1686         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1687         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1688         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1689         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1690         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1691         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1692         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1693       }
1694     }
1695   }
1696   PetscFunctionReturn(0);
1697 }
1698 
1699 #undef __FUNCT__
1700 #define __FUNCT__ "MatGetInfo_MUMPS"
1701 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1702 {
1703   Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1704 
1705   PetscFunctionBegin;
1706   info->block_size        = 1.0;
1707   info->nz_allocated      = mumps->id.INFOG(20);
1708   info->nz_used           = mumps->id.INFOG(20);
1709   info->nz_unneeded       = 0.0;
1710   info->assemblies        = 0.0;
1711   info->mallocs           = 0.0;
1712   info->memory            = 0.0;
1713   info->fill_ratio_given  = 0;
1714   info->fill_ratio_needed = 0;
1715   info->factor_mallocs    = 0;
1716   PetscFunctionReturn(0);
1717 }
1718 
1719 /* -------------------------------------------------------------------------------------------*/
1720 #undef __FUNCT__
1721 #define __FUNCT__ "MatMumpsSetSchurIndices_MUMPS"
1722 PetscErrorCode MatMumpsSetSchurIndices_MUMPS(Mat F,PetscInt size,PetscInt idxs[])
1723 {
1724   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1725   PetscErrorCode ierr;
1726 
1727   PetscFunctionBegin;
1728   if (mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS parallel Schur complements not yet supported from PETSc\n");
1729   if (mumps->id.size_schur != size) {
1730     ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
1731     mumps->id.size_schur = size;
1732     mumps->id.schur_lld = size;
1733     ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr);
1734   }
1735   ierr = PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));CHKERRQ(ierr);
1736   if (F->factortype == MAT_FACTOR_LU) {
1737     mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */
1738   } else {
1739     mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */
1740   }
1741   /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */
1742   mumps->id.ICNTL(26) = -1;
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 #undef __FUNCT__
1747 #define __FUNCT__ "MatMumpsSetSchurIndices"
1748 /*@
1749   MatMumpsSetSchurIndices - Set indices defining the Schur complement that MUMPS will compute during the factorization steps
1750 
1751    Logically Collective on Mat
1752 
1753    Input Parameters:
1754 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1755 .  size - size of the Schur complement indices
1756 -  idxs[] - array of Schur complement indices
1757 
1758    Notes:
1759    The user has to free the array idxs[] since the indices are copied by the routine.
1760    MUMPS Schur complement mode is currently implemented for sequential matrices.
1761 
1762    Level: advanced
1763 
1764    References: MUMPS Users' Guide
1765 
1766 .seealso: MatGetFactor(), MatMumpsCreateSchurComplement(), MatMumpsGetSchurComplement()
1767 @*/
1768 PetscErrorCode MatMumpsSetSchurIndices(Mat F,PetscInt size,PetscInt idxs[])
1769 {
1770   PetscErrorCode ierr;
1771 
1772   PetscFunctionBegin;
1773   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1774   if (size) PetscValidIntPointer(idxs,3);
1775   ierr = PetscTryMethod(F,"MatMumpsSetSchurIndices_C",(Mat,PetscInt,PetscInt[]),(F,size,idxs));CHKERRQ(ierr);
1776   PetscFunctionReturn(0);
1777 }
1778 
1779 /* -------------------------------------------------------------------------------------------*/
1780 #undef __FUNCT__
1781 #define __FUNCT__ "MatMumpsCreateSchurComplement_MUMPS"
1782 PetscErrorCode MatMumpsCreateSchurComplement_MUMPS(Mat F,Mat* S)
1783 {
1784   Mat            St;
1785   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1786   PetscScalar    *array;
1787 #if defined(PETSC_USE_COMPLEX)
1788   PetscScalar    im = PetscSqrtScalar((PetscScalar)-1.0);
1789 #endif
1790   PetscErrorCode ierr;
1791 
1792   PetscFunctionBegin;
1793   if (!mumps->CleanUpMUMPS) { /* CleanUpMUMPS is set to true after numerical factorization */
1794     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Numerical factorization phase not yet performed! You should call MatFactorSymbolic/Numeric before");
1795   } else if (!mumps->id.ICNTL(19)) {
1796     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1797   } else if (!mumps->id.size_schur) {
1798     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1799   } else if (!mumps->schur_restored) {
1800     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1801   }
1802   ierr = MatCreate(PetscObjectComm((PetscObject)F),&St);CHKERRQ(ierr);
1803   ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr);
1804   ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr);
1805   ierr = MatSetUp(St);CHKERRQ(ierr);
1806   ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr);
1807   if (!mumps->sym) { /* MUMPS always return a full matrix */
1808     if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1809       PetscInt i,j,N=mumps->id.size_schur;
1810       for (i=0;i<N;i++) {
1811         for (j=0;j<N;j++) {
1812 #if !defined(PETSC_USE_COMPLEX)
1813           PetscScalar val = mumps->id.schur[i*N+j];
1814 #else
1815           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1816 #endif
1817           array[j*N+i] = val;
1818         }
1819       }
1820     } else { /* stored by columns */
1821       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1822     }
1823   } else { /* either full or lower-triangular (not packed) */
1824     if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */
1825       PetscInt i,j,N=mumps->id.size_schur;
1826       for (i=0;i<N;i++) {
1827         for (j=i;j<N;j++) {
1828 #if !defined(PETSC_USE_COMPLEX)
1829           PetscScalar val = mumps->id.schur[i*N+j];
1830 #else
1831           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1832 #endif
1833           array[i*N+j] = val;
1834           array[j*N+i] = val;
1835         }
1836       }
1837     } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */
1838       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1839     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
1840       PetscInt i,j,N=mumps->id.size_schur;
1841       for (i=0;i<N;i++) {
1842         for (j=0;j<i+1;j++) {
1843 #if !defined(PETSC_USE_COMPLEX)
1844           PetscScalar val = mumps->id.schur[i*N+j];
1845 #else
1846           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1847 #endif
1848           array[i*N+j] = val;
1849           array[j*N+i] = val;
1850         }
1851       }
1852     }
1853   }
1854   ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr);
1855   *S = St;
1856   PetscFunctionReturn(0);
1857 }
1858 
1859 #undef __FUNCT__
1860 #define __FUNCT__ "MatMumpsCreateSchurComplement"
1861 /*@
1862   MatMumpsCreateSchurComplement - Create a Schur complement matrix object using Schur data computed by MUMPS during the factorization step
1863 
1864    Logically Collective on Mat
1865 
1866    Input Parameters:
1867 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1868 .  *S - location where to return the Schur complement (MATDENSE)
1869 
1870    Notes:
1871    MUMPS Schur complement mode is currently implemented for sequential matrices.
1872    The routine provides a copy of the Schur data stored within MUMPS data strutures. The caller must destroy the object when it is no longer needed.
1873    If MatMumpsInvertSchurComplement has been called, the routine gets back the inverse
1874 
1875    Level: advanced
1876 
1877    References: MUMPS Users' Guide
1878 
1879 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsGetSchurComplement()
1880 @*/
1881 PetscErrorCode MatMumpsCreateSchurComplement(Mat F,Mat* S)
1882 {
1883   PetscErrorCode ierr;
1884 
1885   PetscFunctionBegin;
1886   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1887   ierr = PetscTryMethod(F,"MatMumpsCreateSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1888   PetscFunctionReturn(0);
1889 }
1890 
1891 /* -------------------------------------------------------------------------------------------*/
1892 #undef __FUNCT__
1893 #define __FUNCT__ "MatMumpsGetSchurComplement_MUMPS"
1894 PetscErrorCode MatMumpsGetSchurComplement_MUMPS(Mat F,Mat* S)
1895 {
1896   Mat            St;
1897   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1898   PetscErrorCode ierr;
1899 
1900   PetscFunctionBegin;
1901   if (!mumps->CleanUpMUMPS) { /* CleanUpMUMPS is set to true after numerical factorization */
1902     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Numerical factorization phase not yet performed! You should call MatFactorSymbolic/Numeric before");
1903   } else if (!mumps->id.ICNTL(19)) {
1904     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1905   } else if (!mumps->id.size_schur) {
1906     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1907   } else if (!mumps->schur_restored) {
1908     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1909   }
1910   /* It should be the responsibility of the user to handle different ICNTL(19) cases if they want to work with the raw data */
1911   /* should I also add errors when the Schur complement has been already factored? */
1912   ierr = MatCreateSeqDense(PetscObjectComm((PetscObject)F),mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&St);CHKERRQ(ierr);
1913   *S = St;
1914   mumps->schur_restored = PETSC_FALSE;
1915   PetscFunctionReturn(0);
1916 }
1917 
1918 #undef __FUNCT__
1919 #define __FUNCT__ "MatMumpsGetSchurComplement"
1920 /*@
1921   MatMumpsGetSchurComplement - Get a Schur complement matrix object using the current status of the raw Schur data computed by MUMPS during the factorization step
1922 
1923    Logically Collective on Mat
1924 
1925    Input Parameters:
1926 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1927 .  *S - location where to return the Schur complement (MATDENSE)
1928 
1929    Notes:
1930    MUMPS Schur complement mode is currently implemented for sequential matrices.
1931    The routine uses the pointer to the raw data of the Schur Complement stored within MUMPS data strutures. The caller should call MatMumpsRestoreSchurComplement when the object is no longer needed.
1932    If MatMumpsInvertSchurComplement has been called, the routine gets back the inverse
1933 
1934    Level: advanced
1935 
1936    References: MUMPS Users' Guide
1937 
1938 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsRestoreSchurComplement(), MatMumpsCreateSchurComplement()
1939 @*/
1940 PetscErrorCode MatMumpsGetSchurComplement(Mat F,Mat* S)
1941 {
1942   PetscErrorCode ierr;
1943 
1944   PetscFunctionBegin;
1945   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1946   ierr = PetscUseMethod(F,"MatMumpsGetSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1947   PetscFunctionReturn(0);
1948 }
1949 
1950 /* -------------------------------------------------------------------------------------------*/
1951 #undef __FUNCT__
1952 #define __FUNCT__ "MatMumpsRestoreSchurComplement_MUMPS"
1953 PetscErrorCode MatMumpsRestoreSchurComplement_MUMPS(Mat F,Mat* S)
1954 {
1955   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1956   PetscErrorCode ierr;
1957 
1958   PetscFunctionBegin;
1959   if (!mumps->CleanUpMUMPS) { /* CleanUpMUMPS is set to true after numerical factorization */
1960     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Numerical factorization phase not yet performed! You should call MatFactorSymbolic/Numeric before");
1961   } else if (!mumps->id.ICNTL(19)) {
1962     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1963   } else if (!mumps->id.size_schur) {
1964     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1965   } else if (mumps->schur_restored) {
1966     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has been already restored");
1967   }
1968   ierr = MatDestroy(S);CHKERRQ(ierr);
1969   *S = NULL;
1970   mumps->schur_restored = PETSC_TRUE;
1971   PetscFunctionReturn(0);
1972 }
1973 
1974 #undef __FUNCT__
1975 #define __FUNCT__ "MatMumpsRestoreSchurComplement"
1976 /*@
1977   MatMumpsRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to MatGetSchurComplement
1978 
1979    Logically Collective on Mat
1980 
1981    Input Parameters:
1982 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1983 .  *S - location where the Schur complement is stored
1984 
1985    Notes:
1986    MUMPS Schur complement mode is currently implemented for sequential matrices.
1987 
1988    Level: advanced
1989 
1990    References: MUMPS Users' Guide
1991 
1992 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsGetSchurComplement(), MatMumpsCreateSchurComplement()
1993 @*/
1994 PetscErrorCode MatMumpsRestoreSchurComplement(Mat F,Mat* S)
1995 {
1996   PetscErrorCode ierr;
1997 
1998   PetscFunctionBegin;
1999   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2000   ierr = PetscUseMethod(F,"MatMumpsRestoreSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
2001   PetscFunctionReturn(0);
2002 }
2003 
2004 /* -------------------------------------------------------------------------------------------*/
2005 #undef __FUNCT__
2006 #define __FUNCT__ "MatMumpsInvertSchurComplement_MUMPS"
2007 PetscErrorCode MatMumpsInvertSchurComplement_MUMPS(Mat F)
2008 {
2009   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
2010   PetscErrorCode ierr;
2011 
2012   PetscFunctionBegin;
2013   if (!mumps->id.ICNTL(19)) { /* do nothing */
2014     PetscFunctionReturn(0);
2015   }
2016   if (!mumps->CleanUpMUMPS) { /* CleanUpMUMPS is set to true after numerical factorization */
2017     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Numerical factorization phase not yet performed! You should call MatFactorSymbolic/Numeric before");
2018   } else if (!mumps->id.size_schur) {
2019     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
2020   } else if (!mumps->schur_restored) {
2021     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
2022   }
2023   ierr = MatMumpsInvertSchur_Private(mumps);CHKERRQ(ierr);
2024   PetscFunctionReturn(0);
2025 }
2026 
2027 #undef __FUNCT__
2028 #define __FUNCT__ "MatMumpsInvertSchurComplement"
2029 /*@
2030   MatMumpsInvertSchurComplement - Invert the raw Schur data computed by MUMPS during the factorization step
2031 
2032    Logically Collective on Mat
2033 
2034    Input Parameters:
2035 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2036 
2037    Notes:
2038    MUMPS Schur complement mode is currently implemented for sequential matrices.
2039    The routine uses the pointer to the raw data of the Schur Complement stored within MUMPS data strutures.
2040 
2041    Level: advanced
2042 
2043    References: MUMPS Users' Guide
2044 
2045 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2046 @*/
2047 PetscErrorCode MatMumpsInvertSchurComplement(Mat F)
2048 {
2049   PetscErrorCode ierr;
2050 
2051   PetscFunctionBegin;
2052   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2053   ierr = PetscTryMethod(F,"MatMumpsInvertSchurComplement_C",(Mat),(F));CHKERRQ(ierr);
2054   PetscFunctionReturn(0);
2055 }
2056 
2057 /* -------------------------------------------------------------------------------------------*/
2058 #undef __FUNCT__
2059 #define __FUNCT__ "MatMumpsSolveSchurComplement_MUMPS"
2060 PetscErrorCode MatMumpsSolveSchurComplement_MUMPS(Mat F, Vec rhs, Vec sol)
2061 {
2062   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
2063   MumpsScalar    *orhs;
2064   PetscScalar    *osol,*nrhs,*nsol;
2065   PetscInt       orhs_size,osol_size;
2066   PetscErrorCode ierr;
2067 
2068   PetscFunctionBegin;
2069   if (!mumps->CleanUpMUMPS) { /* CleanUpMUMPS is set to true after numerical factorization */
2070     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Numerical factorization phase not yet performed! You should call MatFactorSymbolic/Numeric before");
2071   } else if (!mumps->id.ICNTL(19)) {
2072     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
2073   } else if (!mumps->id.size_schur) {
2074     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
2075   } else if (!mumps->schur_restored) {
2076     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
2077   }
2078   /* swap pointers */
2079   orhs = mumps->id.redrhs;
2080   orhs_size = mumps->sizeredrhs;
2081   osol = mumps->schur_sol;
2082   osol_size = mumps->schur_sizesol;
2083   ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr);
2084   ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr);
2085   mumps->id.redrhs = (MumpsScalar*)nrhs;
2086   ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr);
2087   mumps->schur_sol = nsol;
2088   ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr);
2089 
2090   /* solve Schur complement */
2091   mumps->id.nrhs = 1;
2092   ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr);
2093   /* restore pointers */
2094   ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr);
2095   ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr);
2096   mumps->id.redrhs = orhs;
2097   mumps->sizeredrhs = orhs_size;
2098   mumps->schur_sol = osol;
2099   mumps->schur_sizesol = osol_size;
2100   PetscFunctionReturn(0);
2101 }
2102 
2103 #undef __FUNCT__
2104 #define __FUNCT__ "MatMumpsSolveSchurComplement"
2105 /*@
2106   MatMumpsSolveSchurComplement - Solve the Schur complement system computed by MUMPS during the factorization step
2107 
2108    Logically Collective on Mat
2109 
2110    Input Parameters:
2111 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2112 .  rhs - location where the right hand side of the Schur complement system is stored
2113 -  sol - location where the solution of the Schur complement system has to be returned
2114 
2115    Notes:
2116    MUMPS Schur complement mode is currently implemented for sequential matrices.
2117    The sizes of the vectors should match the size of the Schur complement
2118 
2119    Level: advanced
2120 
2121    References: MUMPS Users' Guide
2122 
2123 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2124 @*/
2125 PetscErrorCode MatMumpsSolveSchurComplement(Mat F, Vec rhs, Vec sol)
2126 {
2127   PetscErrorCode ierr;
2128 
2129   PetscFunctionBegin;
2130   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2131   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
2132   PetscValidHeaderSpecific(sol,VEC_CLASSID,2);
2133   PetscCheckSameComm(F,1,rhs,2);
2134   PetscCheckSameComm(F,1,sol,3);
2135   ierr = PetscUseMethod(F,"MatMumpsSolveSchurComplement_C",(Mat,Vec,Vec),(F,rhs,sol));CHKERRQ(ierr);
2136   PetscFunctionReturn(0);
2137 }
2138 
2139 /* -------------------------------------------------------------------------------------------*/
2140 #undef __FUNCT__
2141 #define __FUNCT__ "MatMumpsSolveSchurComplementTranspose_MUMPS"
2142 PetscErrorCode MatMumpsSolveSchurComplementTranspose_MUMPS(Mat F, Vec rhs, Vec sol)
2143 {
2144   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
2145   MumpsScalar    *orhs;
2146   PetscScalar    *osol,*nrhs,*nsol;
2147   PetscInt       orhs_size,osol_size;
2148   PetscErrorCode ierr;
2149 
2150   PetscFunctionBegin;
2151   if (!mumps->CleanUpMUMPS) { /* CleanUpMUMPS is set to true after numerical factorization */
2152     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Numerical factorization phase not yet performed! You should call MatFactorSymbolic/Numeric before");
2153   } else if (!mumps->id.ICNTL(19)) {
2154     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
2155   } else if (!mumps->id.size_schur) {
2156     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
2157   } else if (!mumps->schur_restored) {
2158     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
2159   }
2160   /* swap pointers */
2161   orhs = mumps->id.redrhs;
2162   orhs_size = mumps->sizeredrhs;
2163   osol = mumps->schur_sol;
2164   osol_size = mumps->schur_sizesol;
2165   ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr);
2166   ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr);
2167   mumps->id.redrhs = (MumpsScalar*)nrhs;
2168   ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr);
2169   mumps->schur_sol = nsol;
2170   ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr);
2171 
2172   /* solve Schur complement */
2173   mumps->id.nrhs = 1;
2174   mumps->id.ICNTL(9) = 0;
2175   ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr);
2176   mumps->id.ICNTL(9) = 1;
2177   /* restore pointers */
2178   ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr);
2179   ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr);
2180   mumps->id.redrhs = orhs;
2181   mumps->sizeredrhs = orhs_size;
2182   mumps->schur_sol = osol;
2183   mumps->schur_sizesol = osol_size;
2184   PetscFunctionReturn(0);
2185 }
2186 
2187 #undef __FUNCT__
2188 #define __FUNCT__ "MatMumpsSolveSchurComplementTranspose"
2189 /*@
2190   MatMumpsSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed by MUMPS during the factorization step
2191 
2192    Logically Collective on Mat
2193 
2194    Input Parameters:
2195 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2196 .  rhs - location where the right hand side of the Schur complement system is stored
2197 -  sol - location where the solution of the Schur complement system has to be returned
2198 
2199    Notes:
2200    MUMPS Schur complement mode is currently implemented for sequential matrices.
2201    The sizes of the vectors should match the size of the Schur complement
2202 
2203    Level: advanced
2204 
2205    References: MUMPS Users' Guide
2206 
2207 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2208 @*/
2209 PetscErrorCode MatMumpsSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
2210 {
2211   PetscErrorCode ierr;
2212 
2213   PetscFunctionBegin;
2214   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2215   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
2216   PetscValidHeaderSpecific(sol,VEC_CLASSID,2);
2217   PetscCheckSameComm(F,1,rhs,2);
2218   PetscCheckSameComm(F,1,sol,3);
2219   ierr = PetscUseMethod(F,"MatMumpsSolveSchurComplementTranspose_C",(Mat,Vec,Vec),(F,rhs,sol));CHKERRQ(ierr);
2220   PetscFunctionReturn(0);
2221 }
2222 
2223 /* -------------------------------------------------------------------------------------------*/
2224 #undef __FUNCT__
2225 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS"
2226 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
2227 {
2228   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2229 
2230   PetscFunctionBegin;
2231   mumps->id.ICNTL(icntl) = ival;
2232   PetscFunctionReturn(0);
2233 }
2234 
2235 #undef __FUNCT__
2236 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS"
2237 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
2238 {
2239   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2240 
2241   PetscFunctionBegin;
2242   *ival = mumps->id.ICNTL(icntl);
2243   PetscFunctionReturn(0);
2244 }
2245 
2246 #undef __FUNCT__
2247 #define __FUNCT__ "MatMumpsSetIcntl"
2248 /*@
2249   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
2250 
2251    Logically Collective on Mat
2252 
2253    Input Parameters:
2254 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2255 .  icntl - index of MUMPS parameter array ICNTL()
2256 -  ival - value of MUMPS ICNTL(icntl)
2257 
2258   Options Database:
2259 .   -mat_mumps_icntl_<icntl> <ival>
2260 
2261    Level: beginner
2262 
2263    References: MUMPS Users' Guide
2264 
2265 .seealso: MatGetFactor()
2266 @*/
2267 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
2268 {
2269   PetscErrorCode ierr;
2270 
2271   PetscFunctionBegin;
2272   PetscValidLogicalCollectiveInt(F,icntl,2);
2273   PetscValidLogicalCollectiveInt(F,ival,3);
2274   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
2275   PetscFunctionReturn(0);
2276 }
2277 
2278 #undef __FUNCT__
2279 #define __FUNCT__ "MatMumpsGetIcntl"
2280 /*@
2281   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
2282 
2283    Logically Collective on Mat
2284 
2285    Input Parameters:
2286 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2287 -  icntl - index of MUMPS parameter array ICNTL()
2288 
2289   Output Parameter:
2290 .  ival - value of MUMPS ICNTL(icntl)
2291 
2292    Level: beginner
2293 
2294    References: MUMPS Users' Guide
2295 
2296 .seealso: MatGetFactor()
2297 @*/
2298 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
2299 {
2300   PetscErrorCode ierr;
2301 
2302   PetscFunctionBegin;
2303   PetscValidLogicalCollectiveInt(F,icntl,2);
2304   PetscValidIntPointer(ival,3);
2305   ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2306   PetscFunctionReturn(0);
2307 }
2308 
2309 /* -------------------------------------------------------------------------------------------*/
2310 #undef __FUNCT__
2311 #define __FUNCT__ "MatMumpsSetCntl_MUMPS"
2312 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
2313 {
2314   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2315 
2316   PetscFunctionBegin;
2317   mumps->id.CNTL(icntl) = val;
2318   PetscFunctionReturn(0);
2319 }
2320 
2321 #undef __FUNCT__
2322 #define __FUNCT__ "MatMumpsGetCntl_MUMPS"
2323 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
2324 {
2325   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2326 
2327   PetscFunctionBegin;
2328   *val = mumps->id.CNTL(icntl);
2329   PetscFunctionReturn(0);
2330 }
2331 
2332 #undef __FUNCT__
2333 #define __FUNCT__ "MatMumpsSetCntl"
2334 /*@
2335   MatMumpsSetCntl - Set MUMPS parameter CNTL()
2336 
2337    Logically Collective on Mat
2338 
2339    Input Parameters:
2340 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2341 .  icntl - index of MUMPS parameter array CNTL()
2342 -  val - value of MUMPS CNTL(icntl)
2343 
2344   Options Database:
2345 .   -mat_mumps_cntl_<icntl> <val>
2346 
2347    Level: beginner
2348 
2349    References: MUMPS Users' Guide
2350 
2351 .seealso: MatGetFactor()
2352 @*/
2353 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
2354 {
2355   PetscErrorCode ierr;
2356 
2357   PetscFunctionBegin;
2358   PetscValidLogicalCollectiveInt(F,icntl,2);
2359   PetscValidLogicalCollectiveReal(F,val,3);
2360   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
2361   PetscFunctionReturn(0);
2362 }
2363 
2364 #undef __FUNCT__
2365 #define __FUNCT__ "MatMumpsGetCntl"
2366 /*@
2367   MatMumpsGetCntl - Get MUMPS parameter CNTL()
2368 
2369    Logically Collective on Mat
2370 
2371    Input Parameters:
2372 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2373 -  icntl - index of MUMPS parameter array CNTL()
2374 
2375   Output Parameter:
2376 .  val - value of MUMPS CNTL(icntl)
2377 
2378    Level: beginner
2379 
2380    References: MUMPS Users' Guide
2381 
2382 .seealso: MatGetFactor()
2383 @*/
2384 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
2385 {
2386   PetscErrorCode ierr;
2387 
2388   PetscFunctionBegin;
2389   PetscValidLogicalCollectiveInt(F,icntl,2);
2390   PetscValidRealPointer(val,3);
2391   ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2392   PetscFunctionReturn(0);
2393 }
2394 
2395 #undef __FUNCT__
2396 #define __FUNCT__ "MatMumpsGetInfo_MUMPS"
2397 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
2398 {
2399   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2400 
2401   PetscFunctionBegin;
2402   *info = mumps->id.INFO(icntl);
2403   PetscFunctionReturn(0);
2404 }
2405 
2406 #undef __FUNCT__
2407 #define __FUNCT__ "MatMumpsGetInfog_MUMPS"
2408 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
2409 {
2410   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2411 
2412   PetscFunctionBegin;
2413   *infog = mumps->id.INFOG(icntl);
2414   PetscFunctionReturn(0);
2415 }
2416 
2417 #undef __FUNCT__
2418 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS"
2419 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
2420 {
2421   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2422 
2423   PetscFunctionBegin;
2424   *rinfo = mumps->id.RINFO(icntl);
2425   PetscFunctionReturn(0);
2426 }
2427 
2428 #undef __FUNCT__
2429 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS"
2430 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
2431 {
2432   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2433 
2434   PetscFunctionBegin;
2435   *rinfog = mumps->id.RINFOG(icntl);
2436   PetscFunctionReturn(0);
2437 }
2438 
2439 #undef __FUNCT__
2440 #define __FUNCT__ "MatMumpsGetInfo"
2441 /*@
2442   MatMumpsGetInfo - Get MUMPS parameter INFO()
2443 
2444    Logically Collective on Mat
2445 
2446    Input Parameters:
2447 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2448 -  icntl - index of MUMPS parameter array INFO()
2449 
2450   Output Parameter:
2451 .  ival - value of MUMPS INFO(icntl)
2452 
2453    Level: beginner
2454 
2455    References: MUMPS Users' Guide
2456 
2457 .seealso: MatGetFactor()
2458 @*/
2459 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
2460 {
2461   PetscErrorCode ierr;
2462 
2463   PetscFunctionBegin;
2464   PetscValidIntPointer(ival,3);
2465   ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2466   PetscFunctionReturn(0);
2467 }
2468 
2469 #undef __FUNCT__
2470 #define __FUNCT__ "MatMumpsGetInfog"
2471 /*@
2472   MatMumpsGetInfog - Get MUMPS parameter INFOG()
2473 
2474    Logically Collective on Mat
2475 
2476    Input Parameters:
2477 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2478 -  icntl - index of MUMPS parameter array INFOG()
2479 
2480   Output Parameter:
2481 .  ival - value of MUMPS INFOG(icntl)
2482 
2483    Level: beginner
2484 
2485    References: MUMPS Users' Guide
2486 
2487 .seealso: MatGetFactor()
2488 @*/
2489 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
2490 {
2491   PetscErrorCode ierr;
2492 
2493   PetscFunctionBegin;
2494   PetscValidIntPointer(ival,3);
2495   ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2496   PetscFunctionReturn(0);
2497 }
2498 
2499 #undef __FUNCT__
2500 #define __FUNCT__ "MatMumpsGetRinfo"
2501 /*@
2502   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
2503 
2504    Logically Collective on Mat
2505 
2506    Input Parameters:
2507 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2508 -  icntl - index of MUMPS parameter array RINFO()
2509 
2510   Output Parameter:
2511 .  val - value of MUMPS RINFO(icntl)
2512 
2513    Level: beginner
2514 
2515    References: MUMPS Users' Guide
2516 
2517 .seealso: MatGetFactor()
2518 @*/
2519 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
2520 {
2521   PetscErrorCode ierr;
2522 
2523   PetscFunctionBegin;
2524   PetscValidRealPointer(val,3);
2525   ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2526   PetscFunctionReturn(0);
2527 }
2528 
2529 #undef __FUNCT__
2530 #define __FUNCT__ "MatMumpsGetRinfog"
2531 /*@
2532   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
2533 
2534    Logically Collective on Mat
2535 
2536    Input Parameters:
2537 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2538 -  icntl - index of MUMPS parameter array RINFOG()
2539 
2540   Output Parameter:
2541 .  val - value of MUMPS RINFOG(icntl)
2542 
2543    Level: beginner
2544 
2545    References: MUMPS Users' Guide
2546 
2547 .seealso: MatGetFactor()
2548 @*/
2549 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
2550 {
2551   PetscErrorCode ierr;
2552 
2553   PetscFunctionBegin;
2554   PetscValidRealPointer(val,3);
2555   ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2556   PetscFunctionReturn(0);
2557 }
2558 
2559 /*MC
2560   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
2561   distributed and sequential matrices via the external package MUMPS.
2562 
2563   Works with MATAIJ and MATSBAIJ matrices
2564 
2565   Options Database Keys:
2566 +  -mat_mumps_icntl_1 <6>: ICNTL(1): output stream for error messages (None)
2567 .  -mat_mumps_icntl_2 <0>: ICNTL(2): output stream for diagnostic printing, statistics, and warning (None)
2568 .  -mat_mumps_icntl_3 <0>: ICNTL(3): output stream for global information, collected on the host (None)
2569 .  -mat_mumps_icntl_4 <0>: ICNTL(4): level of printing (0 to 4) (None)
2570 .  -mat_mumps_icntl_6 <7>: ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) (None)
2571 .  -mat_mumps_icntl_7 <7>: ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis (None)
2572 .  -mat_mumps_icntl_8 <77>: ICNTL(8): scaling strategy (-2 to 8 or 77) (None)
2573 .  -mat_mumps_icntl_10 <0>: ICNTL(10): max num of refinements (None)
2574 .  -mat_mumps_icntl_11 <0>: ICNTL(11): statistics related to an error analysis (via -ksp_view) (None)
2575 .  -mat_mumps_icntl_12 <1>: ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) (None)
2576 .  -mat_mumps_icntl_13 <0>: ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting (None)
2577 .  -mat_mumps_icntl_14 <20>: ICNTL(14): percentage increase in the estimated working space (None)
2578 .  -mat_mumps_icntl_19 <0>: ICNTL(19): computes the Schur complement (None)
2579 .  -mat_mumps_icntl_22 <0>: ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) (None)
2580 .  -mat_mumps_icntl_23 <0>: ICNTL(23): max size of the working memory (MB) that can allocate per processor (None)
2581 .  -mat_mumps_icntl_24 <0>: ICNTL(24): detection of null pivot rows (0 or 1) (None)
2582 .  -mat_mumps_icntl_25 <0>: ICNTL(25): compute a solution of a deficient matrix and a null space basis (None)
2583 .  -mat_mumps_icntl_26 <0>: ICNTL(26): drives the solution phase if a Schur complement matrix (None)
2584 .  -mat_mumps_icntl_28 <1>: ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering (None)
2585 .  -mat_mumps_icntl_29 <0>: ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis (None)
2586 .  -mat_mumps_icntl_30 <0>: ICNTL(30): compute user-specified set of entries in inv(A) (None)
2587 .  -mat_mumps_icntl_31 <0>: ICNTL(31): indicates which factors may be discarded during factorization (None)
2588 .  -mat_mumps_icntl_33 <0>: ICNTL(33): compute determinant (None)
2589 .  -mat_mumps_cntl_1 <0.01>: CNTL(1): relative pivoting threshold (None)
2590 .  -mat_mumps_cntl_2 <1.49012e-08>: CNTL(2): stopping criterion of refinement (None)
2591 .  -mat_mumps_cntl_3 <0>: CNTL(3): absolute pivoting threshold (None)
2592 .  -mat_mumps_cntl_4 <-1>: CNTL(4): value for static pivoting (None)
2593 -  -mat_mumps_cntl_5 <0>: CNTL(5): fixation for null pivots (None)
2594 
2595   Level: beginner
2596 
2597 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
2598 
2599 M*/
2600 
2601 #undef __FUNCT__
2602 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
2603 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
2604 {
2605   PetscFunctionBegin;
2606   *type = MATSOLVERMUMPS;
2607   PetscFunctionReturn(0);
2608 }
2609 
2610 /* MatGetFactor for Seq and MPI AIJ matrices */
2611 #undef __FUNCT__
2612 #define __FUNCT__ "MatGetFactor_aij_mumps"
2613 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
2614 {
2615   Mat            B;
2616   PetscErrorCode ierr;
2617   Mat_MUMPS      *mumps;
2618   PetscBool      isSeqAIJ;
2619 
2620   PetscFunctionBegin;
2621   /* Create the factorization matrix */
2622   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
2623   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2624   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2625   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2626   if (isSeqAIJ) {
2627     ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
2628   } else {
2629     ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
2630   }
2631 
2632   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2633 
2634   B->ops->view        = MatView_MUMPS;
2635   B->ops->getinfo     = MatGetInfo_MUMPS;
2636   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2637 
2638   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2639   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2640   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2641   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2642   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2643 
2644   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2645   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2646   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2647   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2648 
2649   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2650   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2651   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2652   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2653   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2654   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2655   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2656 
2657   if (ftype == MAT_FACTOR_LU) {
2658     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2659     B->factortype            = MAT_FACTOR_LU;
2660     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
2661     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
2662     mumps->sym = 0;
2663   } else {
2664     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2665     B->factortype                  = MAT_FACTOR_CHOLESKY;
2666     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
2667     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
2668 #if defined(PETSC_USE_COMPLEX)
2669     mumps->sym = 2;
2670 #else
2671     if (A->spd_set && A->spd) mumps->sym = 1;
2672     else                      mumps->sym = 2;
2673 #endif
2674   }
2675 
2676   mumps->isAIJ    = PETSC_TRUE;
2677   mumps->Destroy  = B->ops->destroy;
2678   B->ops->destroy = MatDestroy_MUMPS;
2679   B->spptr        = (void*)mumps;
2680 
2681   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2682 
2683   *F = B;
2684   PetscFunctionReturn(0);
2685 }
2686 
2687 /* MatGetFactor for Seq and MPI SBAIJ matrices */
2688 #undef __FUNCT__
2689 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
2690 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
2691 {
2692   Mat            B;
2693   PetscErrorCode ierr;
2694   Mat_MUMPS      *mumps;
2695   PetscBool      isSeqSBAIJ;
2696 
2697   PetscFunctionBegin;
2698   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
2699   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
2700   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
2701   /* Create the factorization matrix */
2702   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2703   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2704   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2705   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2706   if (isSeqSBAIJ) {
2707     ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
2708 
2709     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
2710   } else {
2711     ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
2712 
2713     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
2714   }
2715 
2716   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2717   B->ops->view                   = MatView_MUMPS;
2718   B->ops->getdiagonal            = MatGetDiagonal_MUMPS;
2719 
2720   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2721   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2722   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2723   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2724   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2725 
2726   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2727   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2728   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2729   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2730 
2731   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2732   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2733   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2734   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2735   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2736   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2737   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2738 
2739   B->factortype = MAT_FACTOR_CHOLESKY;
2740 #if defined(PETSC_USE_COMPLEX)
2741   mumps->sym = 2;
2742 #else
2743   if (A->spd_set && A->spd) mumps->sym = 1;
2744   else                      mumps->sym = 2;
2745 #endif
2746 
2747   mumps->isAIJ    = PETSC_FALSE;
2748   mumps->Destroy  = B->ops->destroy;
2749   B->ops->destroy = MatDestroy_MUMPS;
2750   B->spptr        = (void*)mumps;
2751 
2752   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2753 
2754   *F = B;
2755   PetscFunctionReturn(0);
2756 }
2757 
2758 #undef __FUNCT__
2759 #define __FUNCT__ "MatGetFactor_baij_mumps"
2760 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
2761 {
2762   Mat            B;
2763   PetscErrorCode ierr;
2764   Mat_MUMPS      *mumps;
2765   PetscBool      isSeqBAIJ;
2766 
2767   PetscFunctionBegin;
2768   /* Create the factorization matrix */
2769   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
2770   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2771   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2772   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2773   if (isSeqBAIJ) {
2774     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr);
2775   } else {
2776     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
2777   }
2778 
2779   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2780   if (ftype == MAT_FACTOR_LU) {
2781     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
2782     B->factortype            = MAT_FACTOR_LU;
2783     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
2784     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
2785     mumps->sym = 0;
2786   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
2787 
2788   B->ops->view        = MatView_MUMPS;
2789   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2790 
2791   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2792   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2793   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2794   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2795   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2796 
2797   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2798   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2799   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2800   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2801 
2802   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2803   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2804   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2805   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2806   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2807   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2808   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2809 
2810   mumps->isAIJ    = PETSC_TRUE;
2811   mumps->Destroy  = B->ops->destroy;
2812   B->ops->destroy = MatDestroy_MUMPS;
2813   B->spptr        = (void*)mumps;
2814 
2815   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2816 
2817   *F = B;
2818   PetscFunctionReturn(0);
2819 }
2820 
2821 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*);
2822 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
2823 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat,MatFactorType,Mat*);
2824 
2825 #undef __FUNCT__
2826 #define __FUNCT__ "MatSolverPackageRegister_MUMPS"
2827 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void)
2828 {
2829   PetscErrorCode ierr;
2830 
2831   PetscFunctionBegin;
2832   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2833   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2834   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2835   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2836   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2837   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2838   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2839   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2840   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2841   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2842   PetscFunctionReturn(0);
2843 }
2844 
2845