xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision 2475b7ca256cea2a4b7cbf2d8babcda14e5fa36e)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 #include <../src/mat/impls/sell/mpi/mpisell.h>
9 
10 EXTERN_C_BEGIN
11 #if defined(PETSC_USE_COMPLEX)
12 #if defined(PETSC_USE_REAL_SINGLE)
13 #include <cmumps_c.h>
14 #else
15 #include <zmumps_c.h>
16 #endif
17 #else
18 #if defined(PETSC_USE_REAL_SINGLE)
19 #include <smumps_c.h>
20 #else
21 #include <dmumps_c.h>
22 #endif
23 #endif
24 EXTERN_C_END
25 #define JOB_INIT -1
26 #define JOB_FACTSYMBOLIC 1
27 #define JOB_FACTNUMERIC 2
28 #define JOB_SOLVE 3
29 #define JOB_END -2
30 
31 /* calls to MUMPS */
32 #if defined(PETSC_USE_COMPLEX)
33 #if defined(PETSC_USE_REAL_SINGLE)
34 #define MUMPS_c cmumps_c
35 #else
36 #define MUMPS_c zmumps_c
37 #endif
38 #else
39 #if defined(PETSC_USE_REAL_SINGLE)
40 #define MUMPS_c smumps_c
41 #else
42 #define MUMPS_c dmumps_c
43 #endif
44 #endif
45 
46 /* if using PETSc OpenMP support, we only call MUMPS on master ranks. Before/after the call, we change/restore CPUs the master ranks can run on */
47 #if defined(PETSC_HAVE_OPENMP_SUPPORT)
48 #define PetscMUMPS_c(mumps) \
49   do { \
50     if (mumps->use_petsc_omp_support) { \
51       if (mumps->is_omp_master) { \
52         ierr = PetscOmpCtrlOmpRegionOnMasterBegin(mumps->omp_ctrl);CHKERRQ(ierr); \
53         MUMPS_c(&mumps->id); \
54         ierr = PetscOmpCtrlOmpRegionOnMasterEnd(mumps->omp_ctrl);CHKERRQ(ierr); \
55       } \
56       ierr = PetscOmpCtrlBarrier(mumps->omp_ctrl);CHKERRQ(ierr); \
57     } else { \
58       MUMPS_c(&mumps->id); \
59     } \
60   } while(0)
61 #else
62 #define PetscMUMPS_c(mumps) \
63   do { MUMPS_c(&mumps->id); } while (0)
64 #endif
65 
66 /* declare MumpsScalar */
67 #if defined(PETSC_USE_COMPLEX)
68 #if defined(PETSC_USE_REAL_SINGLE)
69 #define MumpsScalar mumps_complex
70 #else
71 #define MumpsScalar mumps_double_complex
72 #endif
73 #else
74 #define MumpsScalar PetscScalar
75 #endif
76 
77 /* macros s.t. indices match MUMPS documentation */
78 #define ICNTL(I) icntl[(I)-1]
79 #define CNTL(I) cntl[(I)-1]
80 #define INFOG(I) infog[(I)-1]
81 #define INFO(I) info[(I)-1]
82 #define RINFOG(I) rinfog[(I)-1]
83 #define RINFO(I) rinfo[(I)-1]
84 
85 typedef struct {
86 #if defined(PETSC_USE_COMPLEX)
87 #if defined(PETSC_USE_REAL_SINGLE)
88   CMUMPS_STRUC_C id;
89 #else
90   ZMUMPS_STRUC_C id;
91 #endif
92 #else
93 #if defined(PETSC_USE_REAL_SINGLE)
94   SMUMPS_STRUC_C id;
95 #else
96   DMUMPS_STRUC_C id;
97 #endif
98 #endif
99 
100   MatStructure matstruc;
101   PetscMPIInt  myid,petsc_size;
102   PetscInt     *irn,*jcn,nz,sym;
103   PetscScalar  *val;
104   MPI_Comm     mumps_comm;
105   PetscInt     ICNTL9_pre;           /* check if ICNTL(9) is changed from previous MatSolve */
106   VecScatter   scat_rhs, scat_sol;   /* used by MatSolve() */
107   Vec          b_seq,x_seq;
108   PetscInt     ninfo,*info;          /* display INFO */
109   PetscInt     sizeredrhs;
110   PetscScalar  *schur_sol;
111   PetscInt     schur_sizesol;
112 
113   PetscBool    use_petsc_omp_support;
114   PetscOmpCtrl omp_ctrl;             /* an OpenMP controler that blocked processes will release their CPU (MPI_Barrier does not have this guarantee) */
115   MPI_Comm     petsc_comm,omp_comm;  /* petsc_comm is petsc matrix's comm */
116   PetscMPIInt  mpinz;                /* on master rank, nz = sum(mpinz) over omp_comm; on other ranks, mpinz = nz*/
117   PetscMPIInt  omp_comm_size;
118   PetscBool    is_omp_master;        /* is this rank the master of omp_comm */
119   PetscMPIInt  *recvcount,*displs;
120 
121   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
122 } Mat_MUMPS;
123 
124 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
125 
126 static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps)
127 {
128   PetscErrorCode ierr;
129 
130   PetscFunctionBegin;
131   ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
132   ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
133   ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
134   mumps->id.size_schur = 0;
135   mumps->id.schur_lld  = 0;
136   mumps->id.ICNTL(19)  = 0;
137   PetscFunctionReturn(0);
138 }
139 
140 /* solve with rhs in mumps->id.redrhs and return in the same location */
141 static PetscErrorCode MatMumpsSolveSchur_Private(Mat F)
142 {
143   Mat_MUMPS            *mumps=(Mat_MUMPS*)F->data;
144   Mat                  S,B,X;
145   MatFactorSchurStatus schurstatus;
146   PetscInt             sizesol;
147   PetscErrorCode       ierr;
148 
149   PetscFunctionBegin;
150   ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr);
151   ierr = MatFactorGetSchurComplement(F,&S,&schurstatus);CHKERRQ(ierr);
152   ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&B);CHKERRQ(ierr);
153   switch (schurstatus) {
154   case MAT_FACTOR_SCHUR_FACTORED:
155     ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&X);CHKERRQ(ierr);
156     if (!mumps->id.ICNTL(9)) { /* transpose solve */
157       ierr = MatMatSolveTranspose(S,B,X);CHKERRQ(ierr);
158     } else {
159       ierr = MatMatSolve(S,B,X);CHKERRQ(ierr);
160     }
161     break;
162   case MAT_FACTOR_SCHUR_INVERTED:
163     sizesol = mumps->id.nrhs*mumps->id.size_schur;
164     if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
165       ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
166       ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr);
167       mumps->schur_sizesol = sizesol;
168     }
169     ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,mumps->schur_sol,&X);CHKERRQ(ierr);
170     if (!mumps->id.ICNTL(9)) { /* transpose solve */
171       ierr = MatTransposeMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);CHKERRQ(ierr);
172     } else {
173       ierr = MatMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);CHKERRQ(ierr);
174     }
175     ierr = MatCopy(X,B,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
176     break;
177   default:
178     SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status);
179     break;
180   }
181   ierr = MatFactorRestoreSchurComplement(F,&S,schurstatus);CHKERRQ(ierr);
182   ierr = MatDestroy(&B);CHKERRQ(ierr);
183   ierr = MatDestroy(&X);CHKERRQ(ierr);
184   PetscFunctionReturn(0);
185 }
186 
187 static PetscErrorCode MatMumpsHandleSchur_Private(Mat F, PetscBool expansion)
188 {
189   Mat_MUMPS     *mumps=(Mat_MUMPS*)F->data;
190   PetscErrorCode ierr;
191 
192   PetscFunctionBegin;
193   if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */
194     PetscFunctionReturn(0);
195   }
196   if (!expansion) { /* prepare for the condensation step */
197     PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur;
198     /* allocate MUMPS internal array to store reduced right-hand sides */
199     if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) {
200       ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
201       mumps->id.lredrhs = mumps->id.size_schur;
202       ierr = PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);CHKERRQ(ierr);
203       mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs;
204     }
205     mumps->id.ICNTL(26) = 1; /* condensation phase */
206   } else { /* prepare for the expansion step */
207     /* solve Schur complement (this has to be done by the MUMPS user, so basically us) */
208     ierr = MatMumpsSolveSchur_Private(F);CHKERRQ(ierr);
209     mumps->id.ICNTL(26) = 2; /* expansion phase */
210     PetscMUMPS_c(mumps);
211     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
212     /* restore defaults */
213     mumps->id.ICNTL(26) = -1;
214     /* free MUMPS internal array for redrhs if we have solved for multiple rhs in order to save memory space */
215     if (mumps->id.nrhs > 1) {
216       ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
217       mumps->id.lredrhs = 0;
218       mumps->sizeredrhs = 0;
219     }
220   }
221   PetscFunctionReturn(0);
222 }
223 
224 /*
225   MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz]
226 
227   input:
228     A       - matrix in aij,baij or sbaij (bs=1) format
229     shift   - 0: C style output triple; 1: Fortran style output triple.
230     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
231               MAT_REUSE_MATRIX:   only the values in v array are updated
232   output:
233     nnz     - dim of r, c, and v (number of local nonzero entries of A)
234     r, c, v - row and col index, matrix values (matrix triples)
235 
236   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
237   freed with PetscFree(mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
238   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
239 
240  */
241 
242 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
243 {
244   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
245   PetscInt       nz,rnz,i,j;
246   PetscErrorCode ierr;
247   PetscInt       *row,*col;
248   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
249 
250   PetscFunctionBegin;
251   *v=aa->a;
252   if (reuse == MAT_INITIAL_MATRIX) {
253     nz   = aa->nz;
254     ai   = aa->i;
255     aj   = aa->j;
256     *nnz = nz;
257     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
258     col  = row + nz;
259 
260     nz = 0;
261     for (i=0; i<M; i++) {
262       rnz = ai[i+1] - ai[i];
263       ajj = aj + ai[i];
264       for (j=0; j<rnz; j++) {
265         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
266       }
267     }
268     *r = row; *c = col;
269   }
270   PetscFunctionReturn(0);
271 }
272 
273 PetscErrorCode MatConvertToTriples_seqsell_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
274 {
275   Mat_SeqSELL *a=(Mat_SeqSELL*)A->data;
276   PetscInt    *ptr;
277 
278   PetscFunctionBegin;
279   *v = a->val;
280   if (reuse == MAT_INITIAL_MATRIX) {
281     PetscInt       nz,i,j,row;
282     PetscErrorCode ierr;
283 
284     nz   = a->sliidx[a->totalslices];
285     *nnz = nz;
286     ierr = PetscMalloc1(2*nz, &ptr);CHKERRQ(ierr);
287     *r   = ptr;
288     *c   = ptr + nz;
289 
290     for (i=0; i<a->totalslices; i++) {
291       for (j=a->sliidx[i],row=0; j<a->sliidx[i+1]; j++,row=((row+1)&0x07)) {
292         *ptr++ = 8*i + row + shift;
293       }
294     }
295     for (i=0;i<nz;i++) *ptr++ = a->colidx[i] + shift;
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
301 {
302   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
303   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
304   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
305   PetscErrorCode ierr;
306   PetscInt       *row,*col;
307 
308   PetscFunctionBegin;
309   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
310   M = A->rmap->N/bs;
311   *v = aa->a;
312   if (reuse == MAT_INITIAL_MATRIX) {
313     ai   = aa->i; aj = aa->j;
314     nz   = bs2*aa->nz;
315     *nnz = nz;
316     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
317     col  = row + nz;
318 
319     for (i=0; i<M; i++) {
320       ajj = aj + ai[i];
321       rnz = ai[i+1] - ai[i];
322       for (k=0; k<rnz; k++) {
323         for (j=0; j<bs; j++) {
324           for (m=0; m<bs; m++) {
325             row[idx]   = i*bs + m + shift;
326             col[idx++] = bs*(ajj[k]) + j + shift;
327           }
328         }
329       }
330     }
331     *r = row; *c = col;
332   }
333   PetscFunctionReturn(0);
334 }
335 
336 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
337 {
338   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
339   PetscInt       nz,rnz,i,j;
340   PetscErrorCode ierr;
341   PetscInt       *row,*col;
342   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
343 
344   PetscFunctionBegin;
345   *v = aa->a;
346   if (reuse == MAT_INITIAL_MATRIX) {
347     nz   = aa->nz;
348     ai   = aa->i;
349     aj   = aa->j;
350     *v   = aa->a;
351     *nnz = nz;
352     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
353     col  = row + nz;
354 
355     nz = 0;
356     for (i=0; i<M; i++) {
357       rnz = ai[i+1] - ai[i];
358       ajj = aj + ai[i];
359       for (j=0; j<rnz; j++) {
360         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
361       }
362     }
363     *r = row; *c = col;
364   }
365   PetscFunctionReturn(0);
366 }
367 
368 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
369 {
370   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
371   PetscInt          nz,rnz,i,j;
372   const PetscScalar *av,*v1;
373   PetscScalar       *val;
374   PetscErrorCode    ierr;
375   PetscInt          *row,*col;
376   Mat_SeqAIJ        *aa=(Mat_SeqAIJ*)A->data;
377   PetscBool         missing;
378 
379   PetscFunctionBegin;
380   ai    = aa->i; aj = aa->j; av = aa->a;
381   adiag = aa->diag;
382   ierr  = MatMissingDiagonal_SeqAIJ(A,&missing,&i);CHKERRQ(ierr);
383   if (reuse == MAT_INITIAL_MATRIX) {
384     /* count nz in the upper triangular part of A */
385     nz = 0;
386     if (missing) {
387       for (i=0; i<M; i++) {
388         if (PetscUnlikely(adiag[i] >= ai[i+1])) {
389           for (j=ai[i];j<ai[i+1];j++) {
390             if (aj[j] < i) continue;
391             nz++;
392           }
393         } else {
394           nz += ai[i+1] - adiag[i];
395         }
396       }
397     } else {
398       for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
399     }
400     *nnz = nz;
401 
402     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
403     col  = row + nz;
404     val  = (PetscScalar*)(col + nz);
405 
406     nz = 0;
407     if (missing) {
408       for (i=0; i<M; i++) {
409         if (PetscUnlikely(adiag[i] >= ai[i+1])) {
410           for (j=ai[i];j<ai[i+1];j++) {
411             if (aj[j] < i) continue;
412             row[nz] = i+shift;
413             col[nz] = aj[j]+shift;
414             val[nz] = av[j];
415             nz++;
416           }
417         } else {
418           rnz = ai[i+1] - adiag[i];
419           ajj = aj + adiag[i];
420           v1  = av + adiag[i];
421           for (j=0; j<rnz; j++) {
422             row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
423           }
424         }
425       }
426     } else {
427       for (i=0; i<M; i++) {
428         rnz = ai[i+1] - adiag[i];
429         ajj = aj + adiag[i];
430         v1  = av + adiag[i];
431         for (j=0; j<rnz; j++) {
432           row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
433         }
434       }
435     }
436     *r = row; *c = col; *v = val;
437   } else {
438     nz = 0; val = *v;
439     if (missing) {
440       for (i=0; i <M; i++) {
441         if (PetscUnlikely(adiag[i] >= ai[i+1])) {
442           for (j=ai[i];j<ai[i+1];j++) {
443             if (aj[j] < i) continue;
444             val[nz++] = av[j];
445           }
446         } else {
447           rnz = ai[i+1] - adiag[i];
448           v1  = av + adiag[i];
449           for (j=0; j<rnz; j++) {
450             val[nz++] = v1[j];
451           }
452         }
453       }
454     } else {
455       for (i=0; i <M; i++) {
456         rnz = ai[i+1] - adiag[i];
457         v1  = av + adiag[i];
458         for (j=0; j<rnz; j++) {
459           val[nz++] = v1[j];
460         }
461       }
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
468 {
469   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
470   PetscErrorCode    ierr;
471   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
472   PetscInt          *row,*col;
473   const PetscScalar *av, *bv,*v1,*v2;
474   PetscScalar       *val;
475   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
476   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
477   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
478 
479   PetscFunctionBegin;
480   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
481   av=aa->a; bv=bb->a;
482 
483   garray = mat->garray;
484 
485   if (reuse == MAT_INITIAL_MATRIX) {
486     nz   = aa->nz + bb->nz;
487     *nnz = nz;
488     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
489     col  = row + nz;
490     val  = (PetscScalar*)(col + nz);
491 
492     *r = row; *c = col; *v = val;
493   } else {
494     row = *r; col = *c; val = *v;
495   }
496 
497   jj = 0; irow = rstart;
498   for (i=0; i<m; i++) {
499     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
500     countA = ai[i+1] - ai[i];
501     countB = bi[i+1] - bi[i];
502     bjj    = bj + bi[i];
503     v1     = av + ai[i];
504     v2     = bv + bi[i];
505 
506     /* A-part */
507     for (j=0; j<countA; j++) {
508       if (reuse == MAT_INITIAL_MATRIX) {
509         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
510       }
511       val[jj++] = v1[j];
512     }
513 
514     /* B-part */
515     for (j=0; j < countB; j++) {
516       if (reuse == MAT_INITIAL_MATRIX) {
517         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
518       }
519       val[jj++] = v2[j];
520     }
521     irow++;
522   }
523   PetscFunctionReturn(0);
524 }
525 
526 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
527 {
528   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
529   PetscErrorCode    ierr;
530   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
531   PetscInt          *row,*col;
532   const PetscScalar *av, *bv,*v1,*v2;
533   PetscScalar       *val;
534   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
535   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
536   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
537 
538   PetscFunctionBegin;
539   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
540   av=aa->a; bv=bb->a;
541 
542   garray = mat->garray;
543 
544   if (reuse == MAT_INITIAL_MATRIX) {
545     nz   = aa->nz + bb->nz;
546     *nnz = nz;
547     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
548     col  = row + nz;
549     val  = (PetscScalar*)(col + nz);
550 
551     *r = row; *c = col; *v = val;
552   } else {
553     row = *r; col = *c; val = *v;
554   }
555 
556   jj = 0; irow = rstart;
557   for (i=0; i<m; i++) {
558     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
559     countA = ai[i+1] - ai[i];
560     countB = bi[i+1] - bi[i];
561     bjj    = bj + bi[i];
562     v1     = av + ai[i];
563     v2     = bv + bi[i];
564 
565     /* A-part */
566     for (j=0; j<countA; j++) {
567       if (reuse == MAT_INITIAL_MATRIX) {
568         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
569       }
570       val[jj++] = v1[j];
571     }
572 
573     /* B-part */
574     for (j=0; j < countB; j++) {
575       if (reuse == MAT_INITIAL_MATRIX) {
576         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
577       }
578       val[jj++] = v2[j];
579     }
580     irow++;
581   }
582   PetscFunctionReturn(0);
583 }
584 
585 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
586 {
587   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
588   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
589   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
590   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
591   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
592   const PetscInt    bs2=mat->bs2;
593   PetscErrorCode    ierr;
594   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
595   PetscInt          *row,*col;
596   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
597   PetscScalar       *val;
598 
599   PetscFunctionBegin;
600   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
601   if (reuse == MAT_INITIAL_MATRIX) {
602     nz   = bs2*(aa->nz + bb->nz);
603     *nnz = nz;
604     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
605     col  = row + nz;
606     val  = (PetscScalar*)(col + nz);
607 
608     *r = row; *c = col; *v = val;
609   } else {
610     row = *r; col = *c; val = *v;
611   }
612 
613   jj = 0; irow = rstart;
614   for (i=0; i<mbs; i++) {
615     countA = ai[i+1] - ai[i];
616     countB = bi[i+1] - bi[i];
617     ajj    = aj + ai[i];
618     bjj    = bj + bi[i];
619     v1     = av + bs2*ai[i];
620     v2     = bv + bs2*bi[i];
621 
622     idx = 0;
623     /* A-part */
624     for (k=0; k<countA; k++) {
625       for (j=0; j<bs; j++) {
626         for (n=0; n<bs; n++) {
627           if (reuse == MAT_INITIAL_MATRIX) {
628             row[jj] = irow + n + shift;
629             col[jj] = rstart + bs*ajj[k] + j + shift;
630           }
631           val[jj++] = v1[idx++];
632         }
633       }
634     }
635 
636     idx = 0;
637     /* B-part */
638     for (k=0; k<countB; k++) {
639       for (j=0; j<bs; j++) {
640         for (n=0; n<bs; n++) {
641           if (reuse == MAT_INITIAL_MATRIX) {
642             row[jj] = irow + n + shift;
643             col[jj] = bs*garray[bjj[k]] + j + shift;
644           }
645           val[jj++] = v2[idx++];
646         }
647       }
648     }
649     irow += bs;
650   }
651   PetscFunctionReturn(0);
652 }
653 
654 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
655 {
656   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
657   PetscErrorCode    ierr;
658   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
659   PetscInt          *row,*col;
660   const PetscScalar *av, *bv,*v1,*v2;
661   PetscScalar       *val;
662   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
663   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
664   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
665 
666   PetscFunctionBegin;
667   ai=aa->i; aj=aa->j; adiag=aa->diag;
668   bi=bb->i; bj=bb->j; garray = mat->garray;
669   av=aa->a; bv=bb->a;
670 
671   rstart = A->rmap->rstart;
672 
673   if (reuse == MAT_INITIAL_MATRIX) {
674     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
675     nzb = 0;    /* num of upper triangular entries in mat->B */
676     for (i=0; i<m; i++) {
677       nza   += (ai[i+1] - adiag[i]);
678       countB = bi[i+1] - bi[i];
679       bjj    = bj + bi[i];
680       for (j=0; j<countB; j++) {
681         if (garray[bjj[j]] > rstart) nzb++;
682       }
683     }
684 
685     nz   = nza + nzb; /* total nz of upper triangular part of mat */
686     *nnz = nz;
687     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
688     col  = row + nz;
689     val  = (PetscScalar*)(col + nz);
690 
691     *r = row; *c = col; *v = val;
692   } else {
693     row = *r; col = *c; val = *v;
694   }
695 
696   jj = 0; irow = rstart;
697   for (i=0; i<m; i++) {
698     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
699     v1     = av + adiag[i];
700     countA = ai[i+1] - adiag[i];
701     countB = bi[i+1] - bi[i];
702     bjj    = bj + bi[i];
703     v2     = bv + bi[i];
704 
705     /* A-part */
706     for (j=0; j<countA; j++) {
707       if (reuse == MAT_INITIAL_MATRIX) {
708         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
709       }
710       val[jj++] = v1[j];
711     }
712 
713     /* B-part */
714     for (j=0; j < countB; j++) {
715       if (garray[bjj[j]] > rstart) {
716         if (reuse == MAT_INITIAL_MATRIX) {
717           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
718         }
719         val[jj++] = v2[j];
720       }
721     }
722     irow++;
723   }
724   PetscFunctionReturn(0);
725 }
726 
727 PetscErrorCode MatDestroy_MUMPS(Mat A)
728 {
729   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->data;
730   PetscErrorCode ierr;
731 
732   PetscFunctionBegin;
733   ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
734   ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
735   ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
736   ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
737   ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
738   ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
739   ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
740   ierr = PetscFree(mumps->info);CHKERRQ(ierr);
741   ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
742   mumps->id.job = JOB_END;
743   PetscMUMPS_c(mumps);
744 #if defined(PETSC_HAVE_OPENMP_SUPPORT)
745   if (mumps->use_petsc_omp_support) { ierr = PetscOmpCtrlDestroy(&mumps->omp_ctrl);CHKERRQ(ierr); }
746 #endif
747   ierr = PetscFree2(mumps->recvcount,mumps->displs);CHKERRQ(ierr);
748   ierr = PetscFree(A->data);CHKERRQ(ierr);
749 
750   /* clear composed functions */
751   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr);
752   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);CHKERRQ(ierr);
753   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorCreateSchurComplement_C",NULL);CHKERRQ(ierr);
754   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
755   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
756   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
757   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
758   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
759   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
760   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
761   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
762   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInverse_C",NULL);CHKERRQ(ierr);
763   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInverseTranspose_C",NULL);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
768 {
769   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->data;
770   PetscScalar      *array;
771   Vec              b_seq;
772   IS               is_iden,is_petsc;
773   PetscErrorCode   ierr;
774   PetscInt         i;
775   PetscBool        second_solve = PETSC_FALSE;
776   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
777 
778   PetscFunctionBegin;
779   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
780   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
781 
782   if (A->factorerrortype) {
783     ierr = PetscInfo2(A,"MatSolve is called with singular matrix factor, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
784     ierr = VecSetInf(x);CHKERRQ(ierr);
785     PetscFunctionReturn(0);
786   }
787 
788   mumps->id.ICNTL(20)= 0; /* dense RHS */
789   mumps->id.nrhs     = 1;
790   b_seq          = mumps->b_seq;
791   if (mumps->petsc_size > 1) {
792     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
793     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
794     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
795     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
796   } else {  /* petsc_size == 1 */
797     ierr = VecCopy(b,x);CHKERRQ(ierr);
798     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
799   }
800   if (!mumps->myid) { /* define rhs on the host */
801     mumps->id.nrhs = 1;
802     mumps->id.rhs = (MumpsScalar*)array;
803   }
804 
805   /*
806      handle condensation step of Schur complement (if any)
807      We set by default ICNTL(26) == -1 when Schur indices have been provided by the user.
808      According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase
809      Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system.
810      This requires an extra call to PetscMUMPS_c and the computation of the factors for S
811   */
812   if (mumps->id.size_schur > 0 && (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2)) {
813     if (mumps->petsc_size > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n");
814     second_solve = PETSC_TRUE;
815     ierr = MatMumpsHandleSchur_Private(A,PETSC_FALSE);CHKERRQ(ierr);
816   }
817   /* solve phase */
818   /*-------------*/
819   mumps->id.job = JOB_SOLVE;
820   PetscMUMPS_c(mumps);
821   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
822 
823   /* handle expansion step of Schur complement (if any) */
824   if (second_solve) {
825     ierr = MatMumpsHandleSchur_Private(A,PETSC_TRUE);CHKERRQ(ierr);
826   }
827 
828   if (mumps->petsc_size > 1) { /* convert mumps distributed solution to petsc mpi x */
829     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
830       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
831       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
832     }
833     if (!mumps->scat_sol) { /* create scatter scat_sol */
834       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
835       for (i=0; i<mumps->id.lsol_loc; i++) {
836         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
837       }
838       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
839       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
840       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
841       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
842 
843       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
844     }
845 
846     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
847     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
848   }
849 
850   if (mumps->petsc_size > 1) {if (!mumps->myid) {ierr = VecRestoreArray(b_seq,&array);CHKERRQ(ierr);}}
851   else {ierr = VecRestoreArray(x,&array);CHKERRQ(ierr);}
852 
853   ierr = PetscLogFlops(2.0*mumps->id.RINFO(3));CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
858 {
859   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->data;
860   PetscErrorCode ierr;
861 
862   PetscFunctionBegin;
863   mumps->id.ICNTL(9) = 0;
864   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
865   mumps->id.ICNTL(9) = 1;
866   PetscFunctionReturn(0);
867 }
868 
869 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
870 {
871   PetscErrorCode    ierr;
872   Mat               Bt = NULL;
873   PetscBool         flg, flgT;
874   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->data;
875   PetscInt          i,nrhs,M;
876   PetscScalar       *array;
877   const PetscScalar *rbray;
878   PetscInt          lsol_loc,nlsol_loc,*isol_loc,*idxx,*isol_loc_save,iidx = 0;
879   PetscScalar       *bray,*sol_loc,*sol_loc_save;
880   IS                is_to,is_from;
881   PetscInt          k,proc,j,m,myrstart;
882   const PetscInt    *rstart;
883   Vec               v_mpi,b_seq,msol_loc;
884   VecScatter        scat_rhs,scat_sol;
885   PetscScalar       *aa;
886   PetscInt          spnr,*ia,*ja;
887   Mat_MPIAIJ        *b = NULL;
888 
889   PetscFunctionBegin;
890   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
891   if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
892 
893   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
894   if (flg) { /* dense B */
895     if (B->rmap->n != X->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution");
896     mumps->id.ICNTL(20)= 0; /* dense RHS */
897   } else { /* sparse B */
898     ierr = PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT,&flgT);CHKERRQ(ierr);
899     if (flgT) { /* input B is transpose of actural RHS matrix,
900                  because mumps requires sparse compressed COLUMN storage! See MatMatTransposeSolve_MUMPS() */
901       ierr = MatTransposeGetMat(B,&Bt);CHKERRQ(ierr);
902     } else SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATTRANSPOSEMAT matrix");
903     mumps->id.ICNTL(20)= 1; /* sparse RHS */
904   }
905 
906   ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr);
907   mumps->id.nrhs = nrhs;
908   mumps->id.lrhs = M;
909   mumps->id.rhs  = NULL;
910 
911   if (mumps->petsc_size == 1) {
912     PetscScalar *aa;
913     PetscInt    spnr,*ia,*ja;
914     PetscBool   second_solve = PETSC_FALSE;
915 
916     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
917     mumps->id.rhs = (MumpsScalar*)array;
918 
919     if (!Bt) { /* dense B */
920       /* copy B to X */
921       ierr = MatDenseGetArrayRead(B,&rbray);CHKERRQ(ierr);
922       ierr = PetscArraycpy(array,rbray,M*nrhs);CHKERRQ(ierr);
923       ierr = MatDenseRestoreArrayRead(B,&rbray);CHKERRQ(ierr);
924     } else { /* sparse B */
925       ierr = MatSeqAIJGetArray(Bt,&aa);CHKERRQ(ierr);
926       ierr = MatGetRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr);
927       if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure");
928       /* mumps requires ia and ja start at 1! */
929       mumps->id.irhs_ptr    = ia;
930       mumps->id.irhs_sparse = ja;
931       mumps->id.nz_rhs      = ia[spnr] - 1;
932       mumps->id.rhs_sparse  = (MumpsScalar*)aa;
933     }
934     /* handle condensation step of Schur complement (if any) */
935     if (mumps->id.size_schur > 0 && (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2)) {
936       second_solve = PETSC_TRUE;
937       ierr = MatMumpsHandleSchur_Private(A,PETSC_FALSE);CHKERRQ(ierr);
938     }
939     /* solve phase */
940     /*-------------*/
941     mumps->id.job = JOB_SOLVE;
942     PetscMUMPS_c(mumps);
943     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
944 
945     /* handle expansion step of Schur complement (if any) */
946     if (second_solve) {
947       ierr = MatMumpsHandleSchur_Private(A,PETSC_TRUE);CHKERRQ(ierr);
948     }
949     if (Bt) { /* sparse B */
950       ierr = MatSeqAIJRestoreArray(Bt,&aa);CHKERRQ(ierr);
951       ierr = MatRestoreRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr);
952       if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot restore IJ structure");
953     }
954     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
955     PetscFunctionReturn(0);
956   }
957 
958   /*--------- parallel case: MUMPS requires rhs B to be centralized on the host! --------*/
959   if (mumps->petsc_size > 1 && mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n");
960 
961   /* create msol_loc to hold mumps local solution */
962   isol_loc_save = mumps->id.isol_loc; /* save it for MatSolve() */
963   sol_loc_save  = (PetscScalar*)mumps->id.sol_loc;
964 
965   lsol_loc  = mumps->id.lsol_loc;
966   nlsol_loc = nrhs*lsol_loc;     /* length of sol_loc */
967   ierr = PetscMalloc2(nlsol_loc,&sol_loc,lsol_loc,&isol_loc);CHKERRQ(ierr);
968   mumps->id.sol_loc  = (MumpsScalar*)sol_loc;
969   mumps->id.isol_loc = isol_loc;
970 
971   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&msol_loc);CHKERRQ(ierr);
972 
973   if (!Bt) { /* dense B */
974     /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */
975     /* wrap dense rhs matrix B into a vector v_mpi */
976     ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr);
977     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
978     ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr);
979     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
980 
981     /* scatter v_mpi to b_seq in proc[0]. MUMPS requires rhs to be centralized on the host! */
982     if (!mumps->myid) {
983       PetscInt *idx;
984       /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B */
985       ierr = PetscMalloc1(nrhs*M,&idx);CHKERRQ(ierr);
986       ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr);
987       k = 0;
988       for (proc=0; proc<mumps->petsc_size; proc++){
989         for (j=0; j<nrhs; j++){
990           for (i=rstart[proc]; i<rstart[proc+1]; i++) idx[k++] = j*M + i;
991         }
992       }
993 
994       ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr);
995       ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_OWN_POINTER,&is_to);CHKERRQ(ierr);
996       ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr);
997     } else {
998       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr);
999       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr);
1000       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr);
1001     }
1002     ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr);
1003     ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1004     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1005     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1006     ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1007 
1008     if (!mumps->myid) { /* define rhs on the host */
1009       ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr);
1010       mumps->id.rhs = (MumpsScalar*)bray;
1011       ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr);
1012     }
1013 
1014   } else { /* sparse B */
1015     b = (Mat_MPIAIJ*)Bt->data;
1016 
1017     /* wrap dense X into a vector v_mpi */
1018     ierr = MatGetLocalSize(X,&m,NULL);CHKERRQ(ierr);
1019     ierr = MatDenseGetArray(X,&bray);CHKERRQ(ierr);
1020     ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)X),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr);
1021     ierr = MatDenseRestoreArray(X,&bray);CHKERRQ(ierr);
1022 
1023     if (!mumps->myid) {
1024       ierr = MatSeqAIJGetArray(b->A,&aa);CHKERRQ(ierr);
1025       ierr = MatGetRowIJ(b->A,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr);
1026       if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure");
1027       /* mumps requires ia and ja start at 1! */
1028       mumps->id.irhs_ptr    = ia;
1029       mumps->id.irhs_sparse = ja;
1030       mumps->id.nz_rhs      = ia[spnr] - 1;
1031       mumps->id.rhs_sparse  = (MumpsScalar*)aa;
1032     } else {
1033       mumps->id.irhs_ptr    = NULL;
1034       mumps->id.irhs_sparse = NULL;
1035       mumps->id.nz_rhs      = 0;
1036       mumps->id.rhs_sparse  = NULL;
1037     }
1038   }
1039 
1040   /* solve phase */
1041   /*-------------*/
1042   mumps->id.job = JOB_SOLVE;
1043   PetscMUMPS_c(mumps);
1044   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1045 
1046   /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */
1047   ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
1048   ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr);
1049 
1050   /* create scatter scat_sol */
1051   ierr = MatGetOwnershipRanges(X,&rstart);CHKERRQ(ierr);
1052   /* iidx: index for scatter mumps solution to petsc X */
1053 
1054   ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr);
1055   ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr);
1056   for (i=0; i<lsol_loc; i++) {
1057     isol_loc[i] -= 1; /* change Fortran style to C style. isol_loc[i+j*lsol_loc] contains x[isol_loc[i]] in j-th vector */
1058 
1059     for (proc=0; proc<mumps->petsc_size; proc++){
1060       if (isol_loc[i] >= rstart[proc] && isol_loc[i] < rstart[proc+1]) {
1061         myrstart = rstart[proc];
1062         k        = isol_loc[i] - myrstart;        /* local index on 1st column of petsc vector X */
1063         iidx     = k + myrstart*nrhs;             /* maps mumps isol_loc[i] to petsc index in X */
1064         m        = rstart[proc+1] - rstart[proc]; /* rows of X for this proc */
1065         break;
1066       }
1067     }
1068 
1069     for (j=0; j<nrhs; j++) idxx[i+j*lsol_loc] = iidx + j*m;
1070   }
1071   ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
1072   ierr = VecScatterCreate(msol_loc,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr);
1073   ierr = VecScatterBegin(scat_sol,msol_loc,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1074   ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1075   ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1076   ierr = VecScatterEnd(scat_sol,msol_loc,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1077   ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
1078 
1079   /* free spaces */
1080   mumps->id.sol_loc  = (MumpsScalar*)sol_loc_save;
1081   mumps->id.isol_loc = isol_loc_save;
1082 
1083   ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr);
1084   ierr = PetscFree(idxx);CHKERRQ(ierr);
1085   ierr = VecDestroy(&msol_loc);CHKERRQ(ierr);
1086   ierr = VecDestroy(&v_mpi);CHKERRQ(ierr);
1087   if (Bt) {
1088     if (!mumps->myid) {
1089       b = (Mat_MPIAIJ*)Bt->data;
1090       ierr = MatSeqAIJRestoreArray(b->A,&aa);CHKERRQ(ierr);
1091       ierr = MatRestoreRowIJ(b->A,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr);
1092       if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot restore IJ structure");
1093     }
1094   } else {
1095     ierr = VecDestroy(&b_seq);CHKERRQ(ierr);
1096     ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr);
1097   }
1098   ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr);
1099   ierr = PetscLogFlops(2.0*nrhs*mumps->id.RINFO(3));CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 PetscErrorCode MatMatTransposeSolve_MUMPS(Mat A,Mat Bt,Mat X)
1104 {
1105   PetscErrorCode ierr;
1106   PetscBool      flg;
1107   Mat            B;
1108 
1109   PetscFunctionBegin;
1110   ierr = PetscObjectTypeCompareAny((PetscObject)Bt,&flg,MATSEQAIJ,MATMPIAIJ,NULL);CHKERRQ(ierr);
1111   if (!flg) SETERRQ(PetscObjectComm((PetscObject)Bt),PETSC_ERR_ARG_WRONG,"Matrix Bt must be MATAIJ matrix");
1112 
1113   /* Create B=Bt^T that uses Bt's data structure */
1114   ierr = MatCreateTranspose(Bt,&B);CHKERRQ(ierr);
1115 
1116   ierr = MatMatSolve_MUMPS(A,B,X);CHKERRQ(ierr);
1117   ierr = MatDestroy(&B);CHKERRQ(ierr);
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 #if !defined(PETSC_USE_COMPLEX)
1122 /*
1123   input:
1124    F:        numeric factor
1125   output:
1126    nneg:     total number of negative pivots
1127    nzero:    total number of zero pivots
1128    npos:     (global dimension of F) - nneg - nzero
1129 */
1130 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
1131 {
1132   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->data;
1133   PetscErrorCode ierr;
1134   PetscMPIInt    size;
1135 
1136   PetscFunctionBegin;
1137   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
1138   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
1139   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
1140 
1141   if (nneg) *nneg = mumps->id.INFOG(12);
1142   if (nzero || npos) {
1143     if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
1144     if (nzero) *nzero = mumps->id.INFOG(28);
1145     if (npos) *npos   = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
1146   }
1147   PetscFunctionReturn(0);
1148 }
1149 #endif
1150 
1151 PetscErrorCode MatMumpsGatherNonzerosOnMaster(MatReuse reuse,Mat_MUMPS *mumps)
1152 {
1153   PetscErrorCode ierr;
1154   PetscInt       i,nz=0,*irn,*jcn=0;
1155   PetscScalar    *val=0;
1156   PetscMPIInt    mpinz,*recvcount=NULL,*displs=NULL;
1157 
1158   PetscFunctionBegin;
1159   if (mumps->omp_comm_size > 1) {
1160     if (reuse == MAT_INITIAL_MATRIX) {
1161       /* master first gathers counts of nonzeros to receive */
1162       if (mumps->is_omp_master) { ierr = PetscMalloc2(mumps->omp_comm_size,&recvcount,mumps->omp_comm_size,&displs);CHKERRQ(ierr); }
1163       ierr = PetscMPIIntCast(mumps->nz,&mpinz);CHKERRQ(ierr);
1164       ierr = MPI_Gather(&mpinz,1,MPI_INT,recvcount,1,MPI_INT,0/*root*/,mumps->omp_comm);CHKERRQ(ierr);
1165 
1166       /* master allocates memory to receive nonzeros */
1167       if (mumps->is_omp_master) {
1168         displs[0] = 0;
1169         for (i=1; i<mumps->omp_comm_size; i++) displs[i] = displs[i-1] + recvcount[i-1];
1170         nz   = displs[mumps->omp_comm_size-1] + recvcount[mumps->omp_comm_size-1];
1171         ierr = PetscMalloc(2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar),&irn);CHKERRQ(ierr);
1172         jcn  = irn + nz;
1173         val  = (PetscScalar*)(jcn + nz);
1174       }
1175 
1176       /* save the gatherv plan */
1177       mumps->mpinz     = mpinz; /* used as send count */
1178       mumps->recvcount = recvcount;
1179       mumps->displs    = displs;
1180 
1181       /* master gathers nonzeros */
1182       ierr = MPI_Gatherv(mumps->irn,mpinz,MPIU_INT,irn,mumps->recvcount,mumps->displs,MPIU_INT,0/*root*/,mumps->omp_comm);CHKERRQ(ierr);
1183       ierr = MPI_Gatherv(mumps->jcn,mpinz,MPIU_INT,jcn,mumps->recvcount,mumps->displs,MPIU_INT,0/*root*/,mumps->omp_comm);CHKERRQ(ierr);
1184       ierr = MPI_Gatherv(mumps->val,mpinz,MPIU_SCALAR,val,mumps->recvcount,mumps->displs,MPIU_SCALAR,0/*root*/,mumps->omp_comm);CHKERRQ(ierr);
1185 
1186       /* master frees its row/col/val and replaces them with bigger arrays */
1187       if (mumps->is_omp_master) {
1188         ierr = PetscFree(mumps->irn);CHKERRQ(ierr); /* irn/jcn/val are allocated together so free only irn */
1189         mumps->nz  = nz; /* it is a sum of mpinz over omp_comm */
1190         mumps->irn = irn;
1191         mumps->jcn = jcn;
1192         mumps->val = val;
1193       }
1194     } else {
1195       ierr = MPI_Gatherv((mumps->is_omp_master?MPI_IN_PLACE:mumps->val),mumps->mpinz,MPIU_SCALAR,mumps->val,mumps->recvcount,mumps->displs,MPIU_SCALAR,0/*root*/,mumps->omp_comm);CHKERRQ(ierr);
1196     }
1197   }
1198   PetscFunctionReturn(0);
1199 }
1200 
1201 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
1202 {
1203   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->data;
1204   PetscErrorCode ierr;
1205   PetscBool      isMPIAIJ;
1206 
1207   PetscFunctionBegin;
1208   if (mumps->id.INFOG(1) < 0) {
1209     if (mumps->id.INFOG(1) == -6) {
1210       ierr = PetscInfo2(A,"MatFactorNumeric is called with singular matrix structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1211     }
1212     ierr = PetscInfo2(A,"MatFactorNumeric is called after analysis phase fails, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1213     PetscFunctionReturn(0);
1214   }
1215 
1216   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1217   ierr = MatMumpsGatherNonzerosOnMaster(MAT_REUSE_MATRIX,mumps);CHKERRQ(ierr);
1218 
1219   /* numerical factorization phase */
1220   /*-------------------------------*/
1221   mumps->id.job = JOB_FACTNUMERIC;
1222   if (!mumps->id.ICNTL(18)) { /* A is centralized */
1223     if (!mumps->myid) {
1224       mumps->id.a = (MumpsScalar*)mumps->val;
1225     }
1226   } else {
1227     mumps->id.a_loc = (MumpsScalar*)mumps->val;
1228   }
1229   PetscMUMPS_c(mumps);
1230   if (mumps->id.INFOG(1) < 0) {
1231     if (A->erroriffailure) {
1232       SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1233     } else {
1234       if (mumps->id.INFOG(1) == -10) { /* numerically singular matrix */
1235         ierr = PetscInfo2(F,"matrix is numerically singular, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1236         F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
1237       } else if (mumps->id.INFOG(1) == -13) {
1238         ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, cannot allocate required memory %d megabytes\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1239         F->factorerrortype = MAT_FACTOR_OUTMEMORY;
1240       } else if (mumps->id.INFOG(1) == -8 || mumps->id.INFOG(1) == -9 || (-16 < mumps->id.INFOG(1) && mumps->id.INFOG(1) < -10) ) {
1241         ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d, problem with workarray \n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1242         F->factorerrortype = MAT_FACTOR_OUTMEMORY;
1243       } else {
1244         ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1245         F->factorerrortype = MAT_FACTOR_OTHER;
1246       }
1247     }
1248   }
1249   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
1250 
1251   F->assembled    = PETSC_TRUE;
1252   mumps->matstruc = SAME_NONZERO_PATTERN;
1253   if (F->schur) { /* reset Schur status to unfactored */
1254     if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1255       mumps->id.ICNTL(19) = 2;
1256       ierr = MatTranspose(F->schur,MAT_INPLACE_MATRIX,&F->schur);CHKERRQ(ierr);
1257     }
1258     ierr = MatFactorRestoreSchurComplement(F,NULL,MAT_FACTOR_SCHUR_UNFACTORED);CHKERRQ(ierr);
1259   }
1260 
1261   /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */
1262   if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3;
1263 
1264   if (!mumps->is_omp_master) mumps->id.INFO(23) = 0;
1265   if (mumps->petsc_size > 1) {
1266     PetscInt    lsol_loc;
1267     PetscScalar *sol_loc;
1268 
1269     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
1270 
1271     /* distributed solution; Create x_seq=sol_loc for repeated use */
1272     if (mumps->x_seq) {
1273       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
1274       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
1275       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
1276     }
1277     lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
1278     ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
1279     mumps->id.lsol_loc = lsol_loc;
1280     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1281     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
1282   }
1283   ierr = PetscLogFlops(mumps->id.RINFO(2));CHKERRQ(ierr);
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 /* Sets MUMPS options from the options database */
1288 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
1289 {
1290   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->data;
1291   PetscErrorCode ierr;
1292   PetscInt       icntl,info[80],i,ninfo=80;
1293   PetscBool      flg;
1294 
1295   PetscFunctionBegin;
1296   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
1297   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
1298   if (flg) mumps->id.ICNTL(1) = icntl;
1299   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
1300   if (flg) mumps->id.ICNTL(2) = icntl;
1301   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
1302   if (flg) mumps->id.ICNTL(3) = icntl;
1303 
1304   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
1305   if (flg) mumps->id.ICNTL(4) = icntl;
1306   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
1307 
1308   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
1309   if (flg) mumps->id.ICNTL(6) = icntl;
1310 
1311   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
1312   if (flg) {
1313     if (icntl== 1 && mumps->petsc_size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
1314     else mumps->id.ICNTL(7) = icntl;
1315   }
1316 
1317   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
1318   /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */
1319   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
1320   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
1321   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
1322   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
1323   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
1324   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
1325   if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */
1326     ierr = MatDestroy(&F->schur);CHKERRQ(ierr);
1327     ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
1328   }
1329   /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */
1330   /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */
1331 
1332   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
1333   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
1334   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
1335   if (mumps->id.ICNTL(24)) {
1336     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
1337   }
1338 
1339   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computes a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
1340   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
1341   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
1342   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
1343   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
1344   /* ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr); */ /* call MatMumpsGetInverse() directly */
1345   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
1346   /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr);  -- not supported by PETSc API */
1347   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
1348   ierr = PetscOptionsInt("-mat_mumps_icntl_35","ICNTL(35): activates Block Low Rank (BLR) based factorization","None",mumps->id.ICNTL(35),&mumps->id.ICNTL(35),NULL);CHKERRQ(ierr);
1349   ierr = PetscOptionsInt("-mat_mumps_icntl_36","ICNTL(36): choice of BLR factorization variant","None",mumps->id.ICNTL(36),&mumps->id.ICNTL(36),NULL);CHKERRQ(ierr);
1350   ierr = PetscOptionsInt("-mat_mumps_icntl_38","ICNTL(38): estimated compression rate of LU factors with BLR","None",mumps->id.ICNTL(38),&mumps->id.ICNTL(38),NULL);CHKERRQ(ierr);
1351 
1352   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
1353   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
1354   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
1355   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
1356   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
1357   ierr = PetscOptionsReal("-mat_mumps_cntl_7","CNTL(7): dropping parameter used during BLR","None",mumps->id.CNTL(7),&mumps->id.CNTL(7),NULL);CHKERRQ(ierr);
1358 
1359   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);CHKERRQ(ierr);
1360 
1361   ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr);
1362   if (ninfo) {
1363     if (ninfo > 80) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 80\n",ninfo);
1364     ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr);
1365     mumps->ninfo = ninfo;
1366     for (i=0; i<ninfo; i++) {
1367       if (info[i] < 0 || info[i]>80) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 80\n",ninfo);
1368       else  mumps->info[i] = info[i];
1369     }
1370   }
1371 
1372   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
1377 {
1378   PetscErrorCode ierr;
1379   PetscInt       nthreads=0;
1380 
1381   PetscFunctionBegin;
1382   mumps->petsc_comm = PetscObjectComm((PetscObject)A);
1383   ierr = MPI_Comm_size(mumps->petsc_comm,&mumps->petsc_size);CHKERRQ(ierr);
1384   ierr = MPI_Comm_rank(mumps->petsc_comm,&mumps->myid);CHKERRQ(ierr); /* so that code like "if (!myid)" still works even if mumps_comm is different */
1385 
1386   ierr = PetscOptionsHasName(NULL,NULL,"-mat_mumps_use_omp_threads",&mumps->use_petsc_omp_support);CHKERRQ(ierr);
1387   if (mumps->use_petsc_omp_support) nthreads = -1; /* -1 will let PetscOmpCtrlCreate() guess a proper value when user did not supply one */
1388   ierr = PetscOptionsGetInt(NULL,NULL,"-mat_mumps_use_omp_threads",&nthreads,NULL);CHKERRQ(ierr);
1389   if (mumps->use_petsc_omp_support) {
1390 #if defined(PETSC_HAVE_OPENMP_SUPPORT)
1391     ierr = PetscOmpCtrlCreate(mumps->petsc_comm,nthreads,&mumps->omp_ctrl);CHKERRQ(ierr);
1392     ierr = PetscOmpCtrlGetOmpComms(mumps->omp_ctrl,&mumps->omp_comm,&mumps->mumps_comm,&mumps->is_omp_master);CHKERRQ(ierr);
1393 #else
1394     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"the system does not have PETSc OpenMP support but you added the -mat_mumps_use_omp_threads option. Configure PETSc with --with-openmp --download-hwloc (or --with-hwloc) to enable it, see more in MATSOLVERMUMPS manual\n");
1395 #endif
1396   } else {
1397     mumps->omp_comm      = PETSC_COMM_SELF;
1398     mumps->mumps_comm    = mumps->petsc_comm;
1399     mumps->is_omp_master = PETSC_TRUE;
1400   }
1401   ierr = MPI_Comm_size(mumps->omp_comm,&mumps->omp_comm_size);CHKERRQ(ierr);
1402 
1403   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->mumps_comm);
1404   mumps->id.job = JOB_INIT;
1405   mumps->id.par = 1;  /* host participates factorizaton and solve */
1406   mumps->id.sym = mumps->sym;
1407 
1408   PetscMUMPS_c(mumps);
1409 
1410   /* copy MUMPS default control values from master to slaves. Although slaves do not call MUMPS, they may access these values in code.
1411      For example, ICNTL(9) is initialized to 1 by MUMPS and slaves check ICNTL(9) in MatSolve_MUMPS.
1412    */
1413   ierr = MPI_Bcast(mumps->id.icntl,60,MPIU_INT, 0,mumps->omp_comm);CHKERRQ(ierr); /* see MUMPS-5.1.2 Manual Section 9 */
1414   ierr = MPI_Bcast(mumps->id.cntl, 15,MPIU_REAL,0,mumps->omp_comm);CHKERRQ(ierr);
1415 
1416   mumps->scat_rhs     = NULL;
1417   mumps->scat_sol     = NULL;
1418 
1419   /* set PETSc-MUMPS default options - override MUMPS default */
1420   mumps->id.ICNTL(3) = 0;
1421   mumps->id.ICNTL(4) = 0;
1422   if (mumps->petsc_size == 1) {
1423     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
1424   } else {
1425     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
1426     mumps->id.ICNTL(20) = 0;   /* rhs is in dense format */
1427     mumps->id.ICNTL(21) = 1;   /* distributed solution */
1428   }
1429 
1430   /* schur */
1431   mumps->id.size_schur      = 0;
1432   mumps->id.listvar_schur   = NULL;
1433   mumps->id.schur           = NULL;
1434   mumps->sizeredrhs         = 0;
1435   mumps->schur_sol          = NULL;
1436   mumps->schur_sizesol      = 0;
1437   PetscFunctionReturn(0);
1438 }
1439 
1440 PetscErrorCode MatFactorSymbolic_MUMPS_ReportIfError(Mat F,Mat A,const MatFactorInfo *info,Mat_MUMPS *mumps)
1441 {
1442   PetscErrorCode ierr;
1443 
1444   PetscFunctionBegin;
1445   ierr = MPI_Bcast(mumps->id.infog, 80,MPIU_INT, 0,mumps->omp_comm);CHKERRQ(ierr); /* see MUMPS-5.1.2 manual p82 */
1446   ierr = MPI_Bcast(mumps->id.rinfog,20,MPIU_REAL,0,mumps->omp_comm);CHKERRQ(ierr);
1447   if (mumps->id.INFOG(1) < 0) {
1448     if (A->erroriffailure) {
1449       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1450     } else {
1451       if (mumps->id.INFOG(1) == -6) {
1452         ierr = PetscInfo2(F,"matrix is singular in structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1453         F->factorerrortype = MAT_FACTOR_STRUCT_ZEROPIVOT;
1454       } else if (mumps->id.INFOG(1) == -5 || mumps->id.INFOG(1) == -7) {
1455         ierr = PetscInfo2(F,"problem of workspace, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1456         F->factorerrortype = MAT_FACTOR_OUTMEMORY;
1457       } else {
1458         ierr = PetscInfo2(F,"Error reported by MUMPS in analysis phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr);
1459         F->factorerrortype = MAT_FACTOR_OTHER;
1460       }
1461     }
1462   }
1463   PetscFunctionReturn(0);
1464 }
1465 
1466 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
1467 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1468 {
1469   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->data;
1470   PetscErrorCode ierr;
1471   Vec            b;
1472   IS             is_iden;
1473   const PetscInt M = A->rmap->N;
1474 
1475   PetscFunctionBegin;
1476   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1477 
1478   /* Set MUMPS options from the options database */
1479   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1480 
1481   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1482   ierr = MatMumpsGatherNonzerosOnMaster(MAT_INITIAL_MATRIX,mumps);CHKERRQ(ierr);
1483 
1484   /* analysis phase */
1485   /*----------------*/
1486   mumps->id.job = JOB_FACTSYMBOLIC;
1487   mumps->id.n   = M;
1488   switch (mumps->id.ICNTL(18)) {
1489   case 0:  /* centralized assembled matrix input */
1490     if (!mumps->myid) {
1491       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1492       if (mumps->id.ICNTL(6)>1) {
1493         mumps->id.a = (MumpsScalar*)mumps->val;
1494       }
1495       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
1496         /*
1497         PetscBool      flag;
1498         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
1499         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
1500         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
1501          */
1502         if (!mumps->myid) {
1503           const PetscInt *idx;
1504           PetscInt       i,*perm_in;
1505 
1506           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
1507           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
1508 
1509           mumps->id.perm_in = perm_in;
1510           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
1511           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
1512         }
1513       }
1514     }
1515     break;
1516   case 3:  /* distributed assembled matrix input (size>1) */
1517     mumps->id.nz_loc = mumps->nz;
1518     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1519     if (mumps->id.ICNTL(6)>1) {
1520       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1521     }
1522     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1523     if (!mumps->myid) {
1524       ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr);
1525       ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr);
1526     } else {
1527       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1528       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1529     }
1530     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1531     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1532     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1533     ierr = VecDestroy(&b);CHKERRQ(ierr);
1534     break;
1535   }
1536   PetscMUMPS_c(mumps);
1537   ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr);
1538 
1539   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1540   F->ops->solve           = MatSolve_MUMPS;
1541   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1542   F->ops->matsolve        = MatMatSolve_MUMPS;
1543   F->ops->mattransposesolve = MatMatTransposeSolve_MUMPS;
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 /* Note the Petsc r and c permutations are ignored */
1548 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1549 {
1550   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->data;
1551   PetscErrorCode ierr;
1552   Vec            b;
1553   IS             is_iden;
1554   const PetscInt M = A->rmap->N;
1555 
1556   PetscFunctionBegin;
1557   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1558 
1559   /* Set MUMPS options from the options database */
1560   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1561 
1562   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1563   ierr = MatMumpsGatherNonzerosOnMaster(MAT_INITIAL_MATRIX,mumps);CHKERRQ(ierr);
1564 
1565   /* analysis phase */
1566   /*----------------*/
1567   mumps->id.job = JOB_FACTSYMBOLIC;
1568   mumps->id.n   = M;
1569   switch (mumps->id.ICNTL(18)) {
1570   case 0:  /* centralized assembled matrix input */
1571     if (!mumps->myid) {
1572       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1573       if (mumps->id.ICNTL(6)>1) {
1574         mumps->id.a = (MumpsScalar*)mumps->val;
1575       }
1576     }
1577     break;
1578   case 3:  /* distributed assembled matrix input (size>1) */
1579     mumps->id.nz_loc = mumps->nz;
1580     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1581     if (mumps->id.ICNTL(6)>1) {
1582       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1583     }
1584     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1585     if (!mumps->myid) {
1586       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1587       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1588     } else {
1589       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1590       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1591     }
1592     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1593     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1594     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1595     ierr = VecDestroy(&b);CHKERRQ(ierr);
1596     break;
1597   }
1598   PetscMUMPS_c(mumps);
1599   ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr);
1600 
1601   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1602   F->ops->solve           = MatSolve_MUMPS;
1603   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1604   PetscFunctionReturn(0);
1605 }
1606 
1607 /* Note the Petsc r permutation and factor info are ignored */
1608 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1609 {
1610   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->data;
1611   PetscErrorCode ierr;
1612   Vec            b;
1613   IS             is_iden;
1614   const PetscInt M = A->rmap->N;
1615 
1616   PetscFunctionBegin;
1617   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1618 
1619   /* Set MUMPS options from the options database */
1620   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1621 
1622   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1623   ierr = MatMumpsGatherNonzerosOnMaster(MAT_INITIAL_MATRIX,mumps);CHKERRQ(ierr);
1624 
1625   /* analysis phase */
1626   /*----------------*/
1627   mumps->id.job = JOB_FACTSYMBOLIC;
1628   mumps->id.n   = M;
1629   switch (mumps->id.ICNTL(18)) {
1630   case 0:  /* centralized assembled matrix input */
1631     if (!mumps->myid) {
1632       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1633       if (mumps->id.ICNTL(6)>1) {
1634         mumps->id.a = (MumpsScalar*)mumps->val;
1635       }
1636     }
1637     break;
1638   case 3:  /* distributed assembled matrix input (size>1) */
1639     mumps->id.nz_loc = mumps->nz;
1640     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1641     if (mumps->id.ICNTL(6)>1) {
1642       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1643     }
1644     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1645     if (!mumps->myid) {
1646       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1647       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1648     } else {
1649       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1650       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1651     }
1652     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1653     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1654     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1655     ierr = VecDestroy(&b);CHKERRQ(ierr);
1656     break;
1657   }
1658   PetscMUMPS_c(mumps);
1659   ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr);
1660 
1661   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1662   F->ops->solve                 = MatSolve_MUMPS;
1663   F->ops->solvetranspose        = MatSolve_MUMPS;
1664   F->ops->matsolve              = MatMatSolve_MUMPS;
1665   F->ops->mattransposesolve     = MatMatTransposeSolve_MUMPS;
1666 #if defined(PETSC_USE_COMPLEX)
1667   F->ops->getinertia = NULL;
1668 #else
1669   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1670 #endif
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1675 {
1676   PetscErrorCode    ierr;
1677   PetscBool         iascii;
1678   PetscViewerFormat format;
1679   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->data;
1680 
1681   PetscFunctionBegin;
1682   /* check if matrix is mumps type */
1683   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1684 
1685   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1686   if (iascii) {
1687     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1688     if (format == PETSC_VIEWER_ASCII_INFO) {
1689       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1690       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1691       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1692       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1693       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1694       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1695       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1696       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1697       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1698       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequential matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1699       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scaling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1700       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1701       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1702       if (mumps->id.ICNTL(11)>0) {
1703         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1704         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1705         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1706         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1707         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1708         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1709       }
1710       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1711       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1712       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1713       /* ICNTL(15-17) not used */
1714       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1715       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Schur complement info):                      %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1716       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1717       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1718       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1719       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1720 
1721       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1722       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1723       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1724       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1725       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1726       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1727 
1728       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1729       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1730       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1731       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(35) (activate BLR based factorization):           %d \n",mumps->id.ICNTL(35));CHKERRQ(ierr);
1732       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(36) (choice of BLR factorization variant):        %d \n",mumps->id.ICNTL(36));CHKERRQ(ierr);
1733       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(38) (estimated compression rate of LU factors):   %d \n",mumps->id.ICNTL(38));CHKERRQ(ierr);
1734 
1735       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1736       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1737       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1738       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1739       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1740       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(7) (dropping parameter for BLR):       %g \n",mumps->id.CNTL(7));CHKERRQ(ierr);
1741 
1742       /* infomation local to each processor */
1743       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1744       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1745       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1746       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1747       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1748       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1749       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1750       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1751       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1752       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1753 
1754       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1755       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1756       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1757 
1758       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1759       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1760       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1761 
1762       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1763       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1764       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1765 
1766       if (mumps->ninfo && mumps->ninfo <= 80){
1767         PetscInt i;
1768         for (i=0; i<mumps->ninfo; i++){
1769           ierr = PetscViewerASCIIPrintf(viewer, "  INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr);
1770           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr);
1771           ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1772         }
1773       }
1774       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1775 
1776       if (!mumps->myid) { /* information from the host */
1777         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1778         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1779         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1780         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1781 
1782         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1783         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1784         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1785         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1786         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1787         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1788         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1789         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1790         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1791         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1792         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1793         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1794         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1795         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1796         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1797         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1798         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1799         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1800         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1801         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1802         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1803         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1804         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1805         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1806         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1807         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1808         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1809         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1810         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1811         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(35) (after factorization: number of entries taking into account BLR factor compression - sum over all processors): %d\n",mumps->id.INFOG(35));CHKERRQ(ierr);
1812         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(36) (after analysis: estimated size of all MUMPS internal data for running BLR in-core - value on the most memory consuming processor): %d \n",mumps->id.INFOG(36));CHKERRQ(ierr);
1813         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(37) (after analysis: estimated size of all MUMPS internal data for running BLR in-core - sum over all processors): %d \n",mumps->id.INFOG(37));CHKERRQ(ierr);
1814         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(38) (after analysis: estimated size of all MUMPS internal data for running BLR out-of-core - value on the most memory consuming processor): %d \n",mumps->id.INFOG(38));CHKERRQ(ierr);
1815         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(39) (after analysis: estimated size of all MUMPS internal data for running BLR out-of-core - sum over all processors): %d \n",mumps->id.INFOG(39));CHKERRQ(ierr);
1816       }
1817     }
1818   }
1819   PetscFunctionReturn(0);
1820 }
1821 
1822 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1823 {
1824   Mat_MUMPS *mumps =(Mat_MUMPS*)A->data;
1825 
1826   PetscFunctionBegin;
1827   info->block_size        = 1.0;
1828   info->nz_allocated      = mumps->id.INFOG(20);
1829   info->nz_used           = mumps->id.INFOG(20);
1830   info->nz_unneeded       = 0.0;
1831   info->assemblies        = 0.0;
1832   info->mallocs           = 0.0;
1833   info->memory            = 0.0;
1834   info->fill_ratio_given  = 0;
1835   info->fill_ratio_needed = 0;
1836   info->factor_mallocs    = 0;
1837   PetscFunctionReturn(0);
1838 }
1839 
1840 /* -------------------------------------------------------------------------------------------*/
1841 PetscErrorCode MatFactorSetSchurIS_MUMPS(Mat F, IS is)
1842 {
1843   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->data;
1844   const PetscInt *idxs;
1845   PetscInt       size,i;
1846   PetscErrorCode ierr;
1847 
1848   PetscFunctionBegin;
1849   ierr = ISGetLocalSize(is,&size);CHKERRQ(ierr);
1850   if (mumps->petsc_size > 1) {
1851     PetscBool ls,gs; /* gs is false if any rank other than root has non-empty IS */
1852 
1853     ls   = mumps->myid ? (size ? PETSC_FALSE : PETSC_TRUE) : PETSC_TRUE; /* always true on root; false on others if their size != 0 */
1854     ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,mumps->petsc_comm);CHKERRQ(ierr);
1855     if (!gs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS distributed parallel Schur complements not yet supported from PETSc\n");
1856   }
1857   if (mumps->id.size_schur != size) {
1858     ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
1859     mumps->id.size_schur = size;
1860     mumps->id.schur_lld  = size;
1861     ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr);
1862   }
1863 
1864   /* Schur complement matrix */
1865   ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&F->schur);CHKERRQ(ierr);
1866   if (mumps->sym == 1) {
1867     ierr = MatSetOption(F->schur,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr);
1868   }
1869 
1870   /* MUMPS expects Fortran style indices */
1871   ierr = ISGetIndices(is,&idxs);CHKERRQ(ierr);
1872   ierr = PetscArraycpy(mumps->id.listvar_schur,idxs,size);CHKERRQ(ierr);
1873   for (i=0;i<size;i++) mumps->id.listvar_schur[i]++;
1874   ierr = ISRestoreIndices(is,&idxs);CHKERRQ(ierr);
1875   if (mumps->petsc_size > 1) {
1876     mumps->id.ICNTL(19) = 1; /* MUMPS returns Schur centralized on the host */
1877   } else {
1878     if (F->factortype == MAT_FACTOR_LU) {
1879       mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */
1880     } else {
1881       mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */
1882     }
1883   }
1884   /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */
1885   mumps->id.ICNTL(26) = -1;
1886   PetscFunctionReturn(0);
1887 }
1888 
1889 /* -------------------------------------------------------------------------------------------*/
1890 PetscErrorCode MatFactorCreateSchurComplement_MUMPS(Mat F,Mat* S)
1891 {
1892   Mat            St;
1893   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->data;
1894   PetscScalar    *array;
1895 #if defined(PETSC_USE_COMPLEX)
1896   PetscScalar    im = PetscSqrtScalar((PetscScalar)-1.0);
1897 #endif
1898   PetscErrorCode ierr;
1899 
1900   PetscFunctionBegin;
1901   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it");
1902   ierr = MatCreate(PETSC_COMM_SELF,&St);CHKERRQ(ierr);
1903   ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr);
1904   ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr);
1905   ierr = MatSetUp(St);CHKERRQ(ierr);
1906   ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr);
1907   if (!mumps->sym) { /* MUMPS always return a full matrix */
1908     if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1909       PetscInt i,j,N=mumps->id.size_schur;
1910       for (i=0;i<N;i++) {
1911         for (j=0;j<N;j++) {
1912 #if !defined(PETSC_USE_COMPLEX)
1913           PetscScalar val = mumps->id.schur[i*N+j];
1914 #else
1915           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1916 #endif
1917           array[j*N+i] = val;
1918         }
1919       }
1920     } else { /* stored by columns */
1921       ierr = PetscArraycpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur);CHKERRQ(ierr);
1922     }
1923   } else { /* either full or lower-triangular (not packed) */
1924     if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */
1925       PetscInt i,j,N=mumps->id.size_schur;
1926       for (i=0;i<N;i++) {
1927         for (j=i;j<N;j++) {
1928 #if !defined(PETSC_USE_COMPLEX)
1929           PetscScalar val = mumps->id.schur[i*N+j];
1930 #else
1931           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1932 #endif
1933           array[i*N+j] = val;
1934           array[j*N+i] = val;
1935         }
1936       }
1937     } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */
1938       ierr = PetscArraycpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur);CHKERRQ(ierr);
1939     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
1940       PetscInt i,j,N=mumps->id.size_schur;
1941       for (i=0;i<N;i++) {
1942         for (j=0;j<i+1;j++) {
1943 #if !defined(PETSC_USE_COMPLEX)
1944           PetscScalar val = mumps->id.schur[i*N+j];
1945 #else
1946           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1947 #endif
1948           array[i*N+j] = val;
1949           array[j*N+i] = val;
1950         }
1951       }
1952     }
1953   }
1954   ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr);
1955   *S   = St;
1956   PetscFunctionReturn(0);
1957 }
1958 
1959 /* -------------------------------------------------------------------------------------------*/
1960 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
1961 {
1962   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1963 
1964   PetscFunctionBegin;
1965   mumps->id.ICNTL(icntl) = ival;
1966   PetscFunctionReturn(0);
1967 }
1968 
1969 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
1970 {
1971   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1972 
1973   PetscFunctionBegin;
1974   *ival = mumps->id.ICNTL(icntl);
1975   PetscFunctionReturn(0);
1976 }
1977 
1978 /*@
1979   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
1980 
1981    Logically Collective on Mat
1982 
1983    Input Parameters:
1984 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1985 .  icntl - index of MUMPS parameter array ICNTL()
1986 -  ival - value of MUMPS ICNTL(icntl)
1987 
1988   Options Database:
1989 .   -mat_mumps_icntl_<icntl> <ival>
1990 
1991    Level: beginner
1992 
1993    References:
1994 .     MUMPS Users' Guide
1995 
1996 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
1997  @*/
1998 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
1999 {
2000   PetscErrorCode ierr;
2001 
2002   PetscFunctionBegin;
2003   PetscValidType(F,1);
2004   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2005   PetscValidLogicalCollectiveInt(F,icntl,2);
2006   PetscValidLogicalCollectiveInt(F,ival,3);
2007   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
2008   PetscFunctionReturn(0);
2009 }
2010 
2011 /*@
2012   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
2013 
2014    Logically Collective on Mat
2015 
2016    Input Parameters:
2017 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2018 -  icntl - index of MUMPS parameter array ICNTL()
2019 
2020   Output Parameter:
2021 .  ival - value of MUMPS ICNTL(icntl)
2022 
2023    Level: beginner
2024 
2025    References:
2026 .     MUMPS Users' Guide
2027 
2028 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2029 @*/
2030 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
2031 {
2032   PetscErrorCode ierr;
2033 
2034   PetscFunctionBegin;
2035   PetscValidType(F,1);
2036   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2037   PetscValidLogicalCollectiveInt(F,icntl,2);
2038   PetscValidIntPointer(ival,3);
2039   ierr = PetscUseMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2040   PetscFunctionReturn(0);
2041 }
2042 
2043 /* -------------------------------------------------------------------------------------------*/
2044 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
2045 {
2046   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
2047 
2048   PetscFunctionBegin;
2049   mumps->id.CNTL(icntl) = val;
2050   PetscFunctionReturn(0);
2051 }
2052 
2053 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
2054 {
2055   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
2056 
2057   PetscFunctionBegin;
2058   *val = mumps->id.CNTL(icntl);
2059   PetscFunctionReturn(0);
2060 }
2061 
2062 /*@
2063   MatMumpsSetCntl - Set MUMPS parameter CNTL()
2064 
2065    Logically Collective on Mat
2066 
2067    Input Parameters:
2068 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2069 .  icntl - index of MUMPS parameter array CNTL()
2070 -  val - value of MUMPS CNTL(icntl)
2071 
2072   Options Database:
2073 .   -mat_mumps_cntl_<icntl> <val>
2074 
2075    Level: beginner
2076 
2077    References:
2078 .     MUMPS Users' Guide
2079 
2080 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2081 @*/
2082 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
2083 {
2084   PetscErrorCode ierr;
2085 
2086   PetscFunctionBegin;
2087   PetscValidType(F,1);
2088   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2089   PetscValidLogicalCollectiveInt(F,icntl,2);
2090   PetscValidLogicalCollectiveReal(F,val,3);
2091   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
2092   PetscFunctionReturn(0);
2093 }
2094 
2095 /*@
2096   MatMumpsGetCntl - Get MUMPS parameter CNTL()
2097 
2098    Logically Collective on Mat
2099 
2100    Input Parameters:
2101 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2102 -  icntl - index of MUMPS parameter array CNTL()
2103 
2104   Output Parameter:
2105 .  val - value of MUMPS CNTL(icntl)
2106 
2107    Level: beginner
2108 
2109    References:
2110 .      MUMPS Users' Guide
2111 
2112 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2113 @*/
2114 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
2115 {
2116   PetscErrorCode ierr;
2117 
2118   PetscFunctionBegin;
2119   PetscValidType(F,1);
2120   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2121   PetscValidLogicalCollectiveInt(F,icntl,2);
2122   PetscValidRealPointer(val,3);
2123   ierr = PetscUseMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2124   PetscFunctionReturn(0);
2125 }
2126 
2127 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
2128 {
2129   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
2130 
2131   PetscFunctionBegin;
2132   *info = mumps->id.INFO(icntl);
2133   PetscFunctionReturn(0);
2134 }
2135 
2136 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
2137 {
2138   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
2139 
2140   PetscFunctionBegin;
2141   *infog = mumps->id.INFOG(icntl);
2142   PetscFunctionReturn(0);
2143 }
2144 
2145 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
2146 {
2147   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
2148 
2149   PetscFunctionBegin;
2150   *rinfo = mumps->id.RINFO(icntl);
2151   PetscFunctionReturn(0);
2152 }
2153 
2154 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
2155 {
2156   Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
2157 
2158   PetscFunctionBegin;
2159   *rinfog = mumps->id.RINFOG(icntl);
2160   PetscFunctionReturn(0);
2161 }
2162 
2163 PetscErrorCode MatMumpsGetInverse_MUMPS(Mat F,Mat spRHS)
2164 {
2165   PetscErrorCode ierr;
2166   Mat            Bt = NULL,Btseq = NULL;
2167   PetscBool      flg;
2168   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->data;
2169   PetscScalar    *aa;
2170   PetscInt       spnr,*ia,*ja;
2171 
2172   PetscFunctionBegin;
2173   PetscValidIntPointer(spRHS,2);
2174   ierr = PetscObjectTypeCompare((PetscObject)spRHS,MATTRANSPOSEMAT,&flg);CHKERRQ(ierr);
2175   if (flg) {
2176     ierr = MatTransposeGetMat(spRHS,&Bt);CHKERRQ(ierr);
2177   } else SETERRQ(PetscObjectComm((PetscObject)spRHS),PETSC_ERR_ARG_WRONG,"Matrix spRHS must be type MATTRANSPOSEMAT matrix");
2178 
2179   ierr = MatMumpsSetIcntl(F,30,1);CHKERRQ(ierr);
2180 
2181   if (mumps->petsc_size > 1) {
2182     Mat_MPIAIJ *b = (Mat_MPIAIJ*)Bt->data;
2183     Btseq = b->A;
2184   } else {
2185     Btseq = Bt;
2186   }
2187 
2188   if (!mumps->myid) {
2189     ierr = MatSeqAIJGetArray(Btseq,&aa);CHKERRQ(ierr);
2190     ierr = MatGetRowIJ(Btseq,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr);
2191     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure");
2192 
2193     mumps->id.irhs_ptr    = ia;
2194     mumps->id.irhs_sparse = ja;
2195     mumps->id.nz_rhs      = ia[spnr] - 1;
2196     mumps->id.rhs_sparse  = (MumpsScalar*)aa;
2197   } else {
2198     mumps->id.irhs_ptr    = NULL;
2199     mumps->id.irhs_sparse = NULL;
2200     mumps->id.nz_rhs      = 0;
2201     mumps->id.rhs_sparse  = NULL;
2202   }
2203   mumps->id.ICNTL(20)   = 1; /* rhs is sparse */
2204   mumps->id.ICNTL(21)   = 0; /* solution is in assembled centralized format */
2205 
2206   /* solve phase */
2207   /*-------------*/
2208   mumps->id.job = JOB_SOLVE;
2209   PetscMUMPS_c(mumps);
2210   if (mumps->id.INFOG(1) < 0)
2211     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
2212 
2213   if (!mumps->myid) {
2214     ierr = MatSeqAIJRestoreArray(Btseq,&aa);CHKERRQ(ierr);
2215     ierr = MatRestoreRowIJ(Btseq,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr);
2216     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure");
2217   }
2218   PetscFunctionReturn(0);
2219 }
2220 
2221 /*@
2222   MatMumpsGetInverse - Get user-specified set of entries in inverse of A
2223 
2224    Logically Collective on Mat
2225 
2226    Input Parameters:
2227 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2228 -  spRHS - sequential sparse matrix in MATTRANSPOSEMAT format holding specified indices in processor[0]
2229 
2230   Output Parameter:
2231 . spRHS - requested entries of inverse of A
2232 
2233    Level: beginner
2234 
2235    References:
2236 .      MUMPS Users' Guide
2237 
2238 .seealso: MatGetFactor(), MatCreateTranspose()
2239 @*/
2240 PetscErrorCode MatMumpsGetInverse(Mat F,Mat spRHS)
2241 {
2242   PetscErrorCode ierr;
2243 
2244   PetscFunctionBegin;
2245   PetscValidType(F,1);
2246   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2247   ierr = PetscUseMethod(F,"MatMumpsGetInverse_C",(Mat,Mat),(F,spRHS));CHKERRQ(ierr);
2248   PetscFunctionReturn(0);
2249 }
2250 
2251 PetscErrorCode MatMumpsGetInverseTranspose_MUMPS(Mat F,Mat spRHST)
2252 {
2253   PetscErrorCode ierr;
2254   Mat            spRHS;
2255 
2256   PetscFunctionBegin;
2257   ierr = MatCreateTranspose(spRHST,&spRHS);CHKERRQ(ierr);
2258   ierr = MatMumpsGetInverse_MUMPS(F,spRHS);CHKERRQ(ierr);
2259   ierr = MatDestroy(&spRHS);CHKERRQ(ierr);
2260   PetscFunctionReturn(0);
2261 }
2262 
2263 /*@
2264   MatMumpsGetInverseTranspose - Get user-specified set of entries in inverse of matrix A^T
2265 
2266    Logically Collective on Mat
2267 
2268    Input Parameters:
2269 +  F - the factored matrix of A obtained by calling MatGetFactor() from PETSc-MUMPS interface
2270 -  spRHST - sequential sparse matrix in MATAIJ format holding specified indices of A^T in processor[0]
2271 
2272   Output Parameter:
2273 . spRHST - requested entries of inverse of A^T
2274 
2275    Level: beginner
2276 
2277    References:
2278 .      MUMPS Users' Guide
2279 
2280 .seealso: MatGetFactor(), MatCreateTranspose(), MatMumpsGetInverse()
2281 @*/
2282 PetscErrorCode MatMumpsGetInverseTranspose(Mat F,Mat spRHST)
2283 {
2284   PetscErrorCode ierr;
2285   PetscBool      flg;
2286 
2287   PetscFunctionBegin;
2288   PetscValidType(F,1);
2289   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2290   ierr = PetscObjectTypeCompareAny((PetscObject)spRHST,&flg,MATSEQAIJ,MATMPIAIJ,NULL);CHKERRQ(ierr);
2291   if (!flg) SETERRQ(PetscObjectComm((PetscObject)spRHST),PETSC_ERR_ARG_WRONG,"Matrix spRHST must be MATAIJ matrix");
2292 
2293   ierr = PetscUseMethod(F,"MatMumpsGetInverseTranspose_C",(Mat,Mat),(F,spRHST));CHKERRQ(ierr);
2294   PetscFunctionReturn(0);
2295 }
2296 
2297 /*@
2298   MatMumpsGetInfo - Get MUMPS parameter INFO()
2299 
2300    Logically Collective on Mat
2301 
2302    Input Parameters:
2303 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2304 -  icntl - index of MUMPS parameter array INFO()
2305 
2306   Output Parameter:
2307 .  ival - value of MUMPS INFO(icntl)
2308 
2309    Level: beginner
2310 
2311    References:
2312 .      MUMPS Users' Guide
2313 
2314 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2315 @*/
2316 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
2317 {
2318   PetscErrorCode ierr;
2319 
2320   PetscFunctionBegin;
2321   PetscValidType(F,1);
2322   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2323   PetscValidIntPointer(ival,3);
2324   ierr = PetscUseMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2325   PetscFunctionReturn(0);
2326 }
2327 
2328 /*@
2329   MatMumpsGetInfog - Get MUMPS parameter INFOG()
2330 
2331    Logically Collective on Mat
2332 
2333    Input Parameters:
2334 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2335 -  icntl - index of MUMPS parameter array INFOG()
2336 
2337   Output Parameter:
2338 .  ival - value of MUMPS INFOG(icntl)
2339 
2340    Level: beginner
2341 
2342    References:
2343 .      MUMPS Users' Guide
2344 
2345 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2346 @*/
2347 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
2348 {
2349   PetscErrorCode ierr;
2350 
2351   PetscFunctionBegin;
2352   PetscValidType(F,1);
2353   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2354   PetscValidIntPointer(ival,3);
2355   ierr = PetscUseMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2356   PetscFunctionReturn(0);
2357 }
2358 
2359 /*@
2360   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
2361 
2362    Logically Collective on Mat
2363 
2364    Input Parameters:
2365 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2366 -  icntl - index of MUMPS parameter array RINFO()
2367 
2368   Output Parameter:
2369 .  val - value of MUMPS RINFO(icntl)
2370 
2371    Level: beginner
2372 
2373    References:
2374 .       MUMPS Users' Guide
2375 
2376 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2377 @*/
2378 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
2379 {
2380   PetscErrorCode ierr;
2381 
2382   PetscFunctionBegin;
2383   PetscValidType(F,1);
2384   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2385   PetscValidRealPointer(val,3);
2386   ierr = PetscUseMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2387   PetscFunctionReturn(0);
2388 }
2389 
2390 /*@
2391   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
2392 
2393    Logically Collective on Mat
2394 
2395    Input Parameters:
2396 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2397 -  icntl - index of MUMPS parameter array RINFOG()
2398 
2399   Output Parameter:
2400 .  val - value of MUMPS RINFOG(icntl)
2401 
2402    Level: beginner
2403 
2404    References:
2405 .      MUMPS Users' Guide
2406 
2407 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2408 @*/
2409 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
2410 {
2411   PetscErrorCode ierr;
2412 
2413   PetscFunctionBegin;
2414   PetscValidType(F,1);
2415   if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2416   PetscValidRealPointer(val,3);
2417   ierr = PetscUseMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2418   PetscFunctionReturn(0);
2419 }
2420 
2421 /*MC
2422   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
2423   distributed and sequential matrices via the external package MUMPS.
2424 
2425   Works with MATAIJ and MATSBAIJ matrices
2426 
2427   Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with MUMPS
2428 
2429   Use ./configure --with-openmp --download-hwloc (or --with-hwloc) to enable running MUMPS in MPI+OpenMP hybrid mode and non-MUMPS in flat-MPI mode. See details below.
2430 
2431   Use -pc_type cholesky or lu -pc_factor_mat_solver_type mumps to use this direct solver
2432 
2433   Options Database Keys:
2434 +  -mat_mumps_icntl_1 - ICNTL(1): output stream for error messages
2435 .  -mat_mumps_icntl_2 - ICNTL(2): output stream for diagnostic printing, statistics, and warning
2436 .  -mat_mumps_icntl_3 -  ICNTL(3): output stream for global information, collected on the host
2437 .  -mat_mumps_icntl_4 -  ICNTL(4): level of printing (0 to 4)
2438 .  -mat_mumps_icntl_6 - ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)
2439 .  -mat_mumps_icntl_7 - ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis
2440 .  -mat_mumps_icntl_8  - ICNTL(8): scaling strategy (-2 to 8 or 77)
2441 .  -mat_mumps_icntl_10  - ICNTL(10): max num of refinements
2442 .  -mat_mumps_icntl_11  - ICNTL(11): statistics related to an error analysis (via -ksp_view)
2443 .  -mat_mumps_icntl_12  - ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)
2444 .  -mat_mumps_icntl_13  - ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting
2445 .  -mat_mumps_icntl_14  - ICNTL(14): percentage increase in the estimated working space
2446 .  -mat_mumps_icntl_19  - ICNTL(19): computes the Schur complement
2447 .  -mat_mumps_icntl_22  - ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)
2448 .  -mat_mumps_icntl_23  - ICNTL(23): max size of the working memory (MB) that can allocate per processor
2449 .  -mat_mumps_icntl_24  - ICNTL(24): detection of null pivot rows (0 or 1)
2450 .  -mat_mumps_icntl_25  - ICNTL(25): compute a solution of a deficient matrix and a null space basis
2451 .  -mat_mumps_icntl_26  - ICNTL(26): drives the solution phase if a Schur complement matrix
2452 .  -mat_mumps_icntl_28  - ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering
2453 .  -mat_mumps_icntl_29 - ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis
2454 .  -mat_mumps_icntl_30 - ICNTL(30): compute user-specified set of entries in inv(A)
2455 .  -mat_mumps_icntl_31 - ICNTL(31): indicates which factors may be discarded during factorization
2456 .  -mat_mumps_icntl_33 - ICNTL(33): compute determinant
2457 .  -mat_mumps_icntl_35 - ICNTL(35): level of activation of BLR (Block Low-Rank) feature
2458 .  -mat_mumps_icntl_36 - ICNTL(36): controls the choice of BLR factorization variant
2459 .  -mat_mumps_icntl_38 - ICNTL(38): sets the estimated compression rate of LU factors with BLR
2460 .  -mat_mumps_cntl_1  - CNTL(1): relative pivoting threshold
2461 .  -mat_mumps_cntl_2  -  CNTL(2): stopping criterion of refinement
2462 .  -mat_mumps_cntl_3 - CNTL(3): absolute pivoting threshold
2463 .  -mat_mumps_cntl_4 - CNTL(4): value for static pivoting
2464 .  -mat_mumps_cntl_5 - CNTL(5): fixation for null pivots
2465 .  -mat_mumps_cntl_7 - CNTL(7): precision of the dropping parameter used during BLR factorization
2466 -  -mat_mumps_use_omp_threads [m] - run MUMPS in MPI+OpenMP hybrid mode as if omp_set_num_threads(m) is called before calling MUMPS.
2467                                    Default might be the number of cores per CPU package (socket) as reported by hwloc and suggested by the MUMPS manual.
2468 
2469   Level: beginner
2470 
2471     Notes:
2472     When a MUMPS factorization fails inside a KSP solve, for example with a KSP_DIVERGED_PC_FAILED, one can find the MUMPS information about the failure by calling
2473 $          KSPGetPC(ksp,&pc);
2474 $          PCFactorGetMatrix(pc,&mat);
2475 $          MatMumpsGetInfo(mat,....);
2476 $          MatMumpsGetInfog(mat,....); etc.
2477            Or you can run with -ksp_error_if_not_converged and the program will be stopped and the information printed in the error message.
2478 
2479    If you want to run MUMPS in MPI+OpenMP hybrid mode (i.e., enable multithreading in MUMPS), but still want to run the non-MUMPS part
2480    (i.e., PETSc part) of your code in the so-called flat-MPI (aka pure-MPI) mode, you need to configure PETSc with --with-openmp --download-hwloc
2481    (or --with-hwloc), and have an MPI that supports MPI-3.0's process shared memory (which is usually available). Since MUMPS calls BLAS
2482    libraries, to really get performance, you should have multithreaded BLAS libraries such as Intel MKL, AMD ACML, Cray libSci or open sourced
2483    OpenBLAS (PETSc has configure options to install/specify them). With these conditions met, you can run your program as before but with
2484    an extra option -mat_mumps_use_omp_threads [m]. It works as if we set OMP_NUM_THREADS=m to MUMPS, with m defaults to the number of cores
2485    per CPU socket (or package, in hwloc term), or number of PETSc MPI processes on a node, whichever is smaller.
2486 
2487    By flat-MPI or pure-MPI mode, it means you run your code with as many MPI ranks as the number of cores. For example,
2488    if a compute node has 32 cores and you run on two nodes, you may use "mpirun -n 64 ./test". To run MPI+OpenMP hybrid MUMPS,
2489    the tranditional way is to set OMP_NUM_THREADS and run with fewer MPI ranks than cores. For example, if you want to have 16 OpenMP
2490    threads per rank, then you may use "export OMP_NUM_THREADS=16 && mpirun -n 4 ./test". The problem of this approach is that the non-MUMPS
2491    part of your code is run with fewer cores and CPUs are wasted. "-mat_mumps_use_omp_threads [m]" provides an alternative such that
2492    you can stil run your code with as many MPI ranks as the number of cores, but have MUMPS run in MPI+OpenMP hybrid mode. In our example,
2493    you can use "mpirun -n 64 ./test -mat_mumps_use_omp_threads 16".
2494 
2495    If you run your code through a job submission system, there are caveats in MPI rank mapping. We use MPI_Comm_split_type to get MPI
2496    processes on each compute node. Listing the processes in rank ascending order, we split processes on a node into consecutive groups of
2497    size m and create a communicator called omp_comm for each group. Rank 0 in an omp_comm is called the master rank, and others in the omp_comm
2498    are called slave ranks (or slaves). Only master ranks are seen to MUMPS and slaves are not. We will free CPUs assigned to slaves (might be set
2499    by CPU binding policies in job scripts) and make the CPUs available to the master so that OMP threads spawned by MUMPS can run on the CPUs.
2500    In a multi-socket compute node, MPI rank mapping is an issue. Still use the above example and suppose your compute node has two sockets,
2501    if you interleave MPI ranks on the two sockets, in other words, even ranks are placed on socket 0, and odd ranks are on socket 1, and bind
2502    MPI ranks to cores, then with -mat_mumps_use_omp_threads 16, a master rank (and threads it spawns) will use half cores in socket 0, and half
2503    cores in socket 1, that definitely hurts locality. On the other hand, if you map MPI ranks consecutively on the two sockets, then the
2504    problem will not happen. Therefore, when you use -mat_mumps_use_omp_threads, you need to keep an eye on your MPI rank mapping and CPU binding.
2505    For example, with the Slurm job scheduler, one can use srun --cpu-bind=verbsoe -m block:block to map consecutive MPI ranks to sockets and
2506    examine the mapping result.
2507 
2508    PETSc does not control thread binding in MUMPS. So to get best performance, one still has to set OMP_PROC_BIND and OMP_PLACES in job scripts,
2509    for example, export OMP_PLACES=threads and export OMP_PROC_BIND=spread. One does not need to export OMP_NUM_THREADS=m in job scripts as PETSc
2510    calls omp_set_num_threads(m) internally before calling MUMPS.
2511 
2512    References:
2513 +   1. - Heroux, Michael A., R. Brightwell, and Michael M. Wolf. "Bi-modal MPI and MPI+ threads computing on scalable multicore systems." IJHPCA (Submitted) (2011).
2514 -   2. - Gutierrez, Samuel K., et al. "Accommodating Thread-Level Heterogeneity in Coupled Parallel Applications." Parallel and Distributed Processing Symposium (IPDPS), 2017 IEEE International. IEEE, 2017.
2515 
2516 .seealso: PCFactorSetMatSolverType(), MatSolverType, MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog(), KSPGetPC(), PCGetFactor(), PCFactorGetMatrix()
2517 
2518 M*/
2519 
2520 static PetscErrorCode MatFactorGetSolverType_mumps(Mat A,MatSolverType *type)
2521 {
2522   PetscFunctionBegin;
2523   *type = MATSOLVERMUMPS;
2524   PetscFunctionReturn(0);
2525 }
2526 
2527 /* MatGetFactor for Seq and MPI AIJ matrices */
2528 static PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
2529 {
2530   Mat            B;
2531   PetscErrorCode ierr;
2532   Mat_MUMPS      *mumps;
2533   PetscBool      isSeqAIJ;
2534 
2535   PetscFunctionBegin;
2536   /* Create the factorization matrix */
2537   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
2538   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2539   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2540   ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr);
2541   ierr = MatSetUp(B);CHKERRQ(ierr);
2542 
2543   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2544 
2545   B->ops->view        = MatView_MUMPS;
2546   B->ops->getinfo     = MatGetInfo_MUMPS;
2547 
2548   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr);
2549   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr);
2550   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2551   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2552   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2553   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2554   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2555   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2556   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2557   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2558   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2559   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr);
2560   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverseTranspose_C",MatMumpsGetInverseTranspose_MUMPS);CHKERRQ(ierr);
2561 
2562   if (ftype == MAT_FACTOR_LU) {
2563     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2564     B->factortype            = MAT_FACTOR_LU;
2565     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
2566     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
2567     mumps->sym = 0;
2568   } else {
2569     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2570     B->factortype                  = MAT_FACTOR_CHOLESKY;
2571     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
2572     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
2573 #if defined(PETSC_USE_COMPLEX)
2574     mumps->sym = 2;
2575 #else
2576     if (A->spd_set && A->spd) mumps->sym = 1;
2577     else                      mumps->sym = 2;
2578 #endif
2579   }
2580 
2581   /* set solvertype */
2582   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
2583   ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr);
2584 
2585   B->ops->destroy = MatDestroy_MUMPS;
2586   B->data         = (void*)mumps;
2587 
2588   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2589 
2590   *F = B;
2591   PetscFunctionReturn(0);
2592 }
2593 
2594 /* MatGetFactor for Seq and MPI SBAIJ matrices */
2595 static PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
2596 {
2597   Mat            B;
2598   PetscErrorCode ierr;
2599   Mat_MUMPS      *mumps;
2600   PetscBool      isSeqSBAIJ;
2601 
2602   PetscFunctionBegin;
2603   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
2604   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
2605   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
2606   /* Create the factorization matrix */
2607   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2608   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2609   ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr);
2610   ierr = MatSetUp(B);CHKERRQ(ierr);
2611 
2612   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2613   if (isSeqSBAIJ) {
2614     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
2615   } else {
2616     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
2617   }
2618 
2619   B->ops->getinfo                = MatGetInfo_External;
2620   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2621   B->ops->view                   = MatView_MUMPS;
2622 
2623   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr);
2624   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr);
2625   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2626   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2627   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2628   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2629   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2630   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2631   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2632   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2633   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2634   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr);
2635   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverseTranspose_C",MatMumpsGetInverseTranspose_MUMPS);CHKERRQ(ierr);
2636 
2637   B->factortype = MAT_FACTOR_CHOLESKY;
2638 #if defined(PETSC_USE_COMPLEX)
2639   mumps->sym = 2;
2640 #else
2641   if (A->spd_set && A->spd) mumps->sym = 1;
2642   else                      mumps->sym = 2;
2643 #endif
2644 
2645   /* set solvertype */
2646   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
2647   ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr);
2648 
2649   B->ops->destroy = MatDestroy_MUMPS;
2650   B->data         = (void*)mumps;
2651 
2652   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2653 
2654   *F = B;
2655   PetscFunctionReturn(0);
2656 }
2657 
2658 static PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
2659 {
2660   Mat            B;
2661   PetscErrorCode ierr;
2662   Mat_MUMPS      *mumps;
2663   PetscBool      isSeqBAIJ;
2664 
2665   PetscFunctionBegin;
2666   /* Create the factorization matrix */
2667   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
2668   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2669   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2670   ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr);
2671   ierr = MatSetUp(B);CHKERRQ(ierr);
2672 
2673   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2674   if (ftype == MAT_FACTOR_LU) {
2675     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
2676     B->factortype            = MAT_FACTOR_LU;
2677     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
2678     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
2679     mumps->sym = 0;
2680   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
2681 
2682   B->ops->getinfo     = MatGetInfo_External;
2683   B->ops->view        = MatView_MUMPS;
2684 
2685   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr);
2686   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr);
2687   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2688   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2689   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2690   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2691   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2692   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2693   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2694   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2695   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2696   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr);
2697   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverseTranspose_C",MatMumpsGetInverseTranspose_MUMPS);CHKERRQ(ierr);
2698 
2699   /* set solvertype */
2700   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
2701   ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr);
2702 
2703   B->ops->destroy = MatDestroy_MUMPS;
2704   B->data         = (void*)mumps;
2705 
2706   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2707 
2708   *F = B;
2709   PetscFunctionReturn(0);
2710 }
2711 
2712 /* MatGetFactor for Seq and MPI SELL matrices */
2713 static PetscErrorCode MatGetFactor_sell_mumps(Mat A,MatFactorType ftype,Mat *F)
2714 {
2715   Mat            B;
2716   PetscErrorCode ierr;
2717   Mat_MUMPS      *mumps;
2718   PetscBool      isSeqSELL;
2719 
2720   PetscFunctionBegin;
2721   /* Create the factorization matrix */
2722   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSELL,&isSeqSELL);CHKERRQ(ierr);
2723   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2724   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2725   ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr);
2726   ierr = MatSetUp(B);CHKERRQ(ierr);
2727 
2728   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2729 
2730   B->ops->view        = MatView_MUMPS;
2731   B->ops->getinfo     = MatGetInfo_MUMPS;
2732 
2733   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr);
2734   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr);
2735   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2736   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2737   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2738   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2739   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2740   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2741   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2742   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2743   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2744 
2745   if (ftype == MAT_FACTOR_LU) {
2746     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2747     B->factortype            = MAT_FACTOR_LU;
2748     if (isSeqSELL) mumps->ConvertToTriples = MatConvertToTriples_seqsell_seqaij;
2749     else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
2750     mumps->sym = 0;
2751   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
2752 
2753   /* set solvertype */
2754   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
2755   ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr);
2756 
2757   B->ops->destroy = MatDestroy_MUMPS;
2758   B->data         = (void*)mumps;
2759 
2760   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2761 
2762   *F = B;
2763   PetscFunctionReturn(0);
2764 }
2765 
2766 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MUMPS(void)
2767 {
2768   PetscErrorCode ierr;
2769 
2770   PetscFunctionBegin;
2771   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2772   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2773   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2774   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2775   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2776   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2777   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2778   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2779   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2780   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2781   ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQSELL,MAT_FACTOR_LU,MatGetFactor_sell_mumps);CHKERRQ(ierr);
2782   PetscFunctionReturn(0);
2783 }
2784 
2785