xref: /petsc/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c (revision 37eeb8152ec6a2cf24186d3591c2c5de5dfd8fa5)
1 #if defined(PETSC_HAVE_LIBMKL_INTEL_ILP64)
2 #define MKL_ILP64
3 #endif
4 
5 #include <../src/mat/impls/aij/seq/aij.h>        /*I "petscmat.h" I*/
6 #include <../src/mat/impls/sbaij/seq/sbaij.h>
7 #include <../src/mat/impls/dense/seq/dense.h>
8 
9 #include <stdio.h>
10 #include <stdlib.h>
11 #include <math.h>
12 #if defined(PETSC_HAVE_LIBMKL_INTEL_ILP64)
13 #define MKL_ILP64
14 #endif
15 #include <mkl_pardiso.h>
16 
17 PETSC_EXTERN void PetscSetMKL_PARDISOThreads(int);
18 
19 /*
20  *  Possible mkl_pardiso phases that controls the execution of the solver.
21  *  For more information check mkl_pardiso manual.
22  */
23 #define JOB_ANALYSIS 11
24 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION 12
25 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 13
26 #define JOB_NUMERICAL_FACTORIZATION 22
27 #define JOB_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 23
28 #define JOB_SOLVE_ITERATIVE_REFINEMENT 33
29 #define JOB_SOLVE_FORWARD_SUBSTITUTION 331
30 #define JOB_SOLVE_DIAGONAL_SUBSTITUTION 332
31 #define JOB_SOLVE_BACKWARD_SUBSTITUTION 333
32 #define JOB_RELEASE_OF_LU_MEMORY 0
33 #define JOB_RELEASE_OF_ALL_MEMORY -1
34 
35 #define IPARM_SIZE 64
36 
37 #if defined(PETSC_USE_64BIT_INDICES)
38  #if defined(PETSC_HAVE_LIBMKL_INTEL_ILP64)
39   #define INT_TYPE long long int
40   #define MKL_PARDISO pardiso
41   #define MKL_PARDISO_INIT pardisoinit
42  #else
43   /* this is the case where the MKL BLAS/LAPACK are 32 bit integers but the 64 bit integer version of
44      of Pardiso code is used; hence the need for the 64 below*/
45   #define INT_TYPE long long int
46   #define MKL_PARDISO pardiso_64
47   #define MKL_PARDISO_INIT pardiso_64init
48 void pardiso_64init(void *pt, INT_TYPE *mtype, INT_TYPE iparm [])
49 {
50   int iparm_copy[IPARM_SIZE], mtype_copy, i;
51 
52   mtype_copy = *mtype;
53   pardisoinit(pt, &mtype_copy, iparm_copy);
54   for(i = 0; i < IPARM_SIZE; i++){
55     iparm[i] = iparm_copy[i];
56   }
57 }
58  #endif
59 #else
60  #define INT_TYPE int
61  #define MKL_PARDISO pardiso
62  #define MKL_PARDISO_INIT pardisoinit
63 #endif
64 
65 
66 /*
67  *  Internal data structure.
68  *  For more information check mkl_pardiso manual.
69  */
70 typedef struct {
71 
72   /* Configuration vector*/
73   INT_TYPE     iparm[IPARM_SIZE];
74 
75   /*
76    * Internal mkl_pardiso memory location.
77    * After the first call to mkl_pardiso do not modify pt, as that could cause a serious memory leak.
78    */
79   void         *pt[IPARM_SIZE];
80 
81   /* Basic mkl_pardiso info*/
82   INT_TYPE     phase, maxfct, mnum, mtype, n, nrhs, msglvl, err;
83 
84   /* Matrix structure*/
85   void         *a;
86   INT_TYPE     *ia, *ja;
87 
88   /* Number of non-zero elements*/
89   INT_TYPE     nz;
90 
91   /* Row permutaton vector*/
92   INT_TYPE     *perm;
93 
94   /* Define if matrix preserves sparse structure.*/
95   MatStructure matstruc;
96 
97   PetscBool    needsym;
98   PetscBool    freeaij;
99 
100   /* Schur complement */
101   PetscScalar  *schur;
102   PetscInt     schur_size;
103   PetscInt     *schur_idxs;
104   PetscScalar  *schur_work;
105   PetscBLASInt schur_work_size;
106   PetscBool    solve_interior;
107 
108   /* True if mkl_pardiso function have been used.*/
109   PetscBool CleanUp;
110 
111   /* Conversion to a format suitable for MKL */
112   PetscErrorCode (*Convert)(Mat, PetscBool, MatReuse, PetscBool*, INT_TYPE*, INT_TYPE**, INT_TYPE**, PetscScalar**);
113 } Mat_MKL_PARDISO;
114 
115 PetscErrorCode MatMKLPardiso_Convert_seqsbaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
116 {
117   Mat_SeqSBAIJ   *aa = (Mat_SeqSBAIJ*)A->data;
118   PetscInt       bs  = A->rmap->bs;
119 
120   PetscFunctionBegin;
121   if (!sym) {
122     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"This should not happen");
123   }
124   if (bs == 1) { /* already in the correct format */
125     *v    = aa->a;
126     /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
127     *r    = (INT_TYPE*)aa->i;
128     *c    = (INT_TYPE*)aa->j;
129     *nnz  = (INT_TYPE)aa->nz;
130     *free = PETSC_FALSE;
131   } else {
132     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Conversion from SeqSBAIJ to MKL Pardiso format still need to be implemented");
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 PetscErrorCode MatMKLPardiso_Convert_seqbaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
138 {
139   PetscFunctionBegin;
140   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Conversion from SeqBAIJ to MKL Pardiso format still need to be implemented");
141   PetscFunctionReturn(0);
142 }
143 
144 PetscErrorCode MatMKLPardiso_Convert_seqaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
145 {
146   Mat_SeqAIJ     *aa = (Mat_SeqAIJ*)A->data;
147   PetscErrorCode ierr;
148 
149   PetscFunctionBegin;
150   if (!sym) { /* already in the correct format */
151     *v    = aa->a;
152     *r    = (INT_TYPE*)aa->i;
153     *c    = (INT_TYPE*)aa->j;
154     *nnz  = (INT_TYPE)aa->nz;
155     *free = PETSC_FALSE;
156     PetscFunctionReturn(0);
157   }
158   /* need to get the triangular part */
159   if (reuse == MAT_INITIAL_MATRIX) {
160     PetscScalar *vals,*vv;
161     PetscInt    *row,*col,*jj;
162     PetscInt    m = A->rmap->n,nz,i;
163 
164     nz = 0;
165     for (i=0; i<m; i++) {
166       nz += aa->i[i+1] - aa->diag[i];
167     }
168     ierr = PetscMalloc2(m+1,&row,nz,&col);CHKERRQ(ierr);
169     ierr = PetscMalloc1(nz,&vals);CHKERRQ(ierr);
170     jj = col;
171     vv = vals;
172 
173     row[0] = 0;
174     for (i=0; i<m; i++) {
175       PetscInt    *aj = aa->j + aa->diag[i];
176       PetscScalar *av = aa->a + aa->diag[i];
177       PetscInt    rl = aa->i[i+1] - aa->diag[i],j;
178       for (j=0; j<rl; j++) {
179         *jj = *aj; jj++; aj++;
180         *vv = *av; vv++; av++;
181       }
182       row[i+1]    = row[i] + rl;
183     }
184     *v    = vals;
185     *r    = (INT_TYPE*)row;
186     *c    = (INT_TYPE*)col;
187     *nnz  = (INT_TYPE)nz;
188     *free = PETSC_TRUE;
189   } else {
190     PetscScalar *vv;
191     PetscInt    m = A->rmap->n,i;
192 
193     vv = *v;
194     for (i=0; i<m; i++) {
195       PetscScalar *av = aa->a + aa->diag[i];
196       PetscInt    rl = aa->i[i+1] - aa->diag[i],j;
197       for (j=0; j<rl; j++) {
198         *vv = *av; vv++; av++;
199       }
200     }
201     *free = PETSC_TRUE;
202   }
203   PetscFunctionReturn(0);
204 }
205 
206 
207 static PetscErrorCode MatMKLPardisoSolveSchur_Private(Mat F, PetscScalar *B, PetscScalar *X)
208 {
209   Mat_MKL_PARDISO      *mpardiso =(Mat_MKL_PARDISO*)F->data;
210   Mat                  S,Xmat,Bmat;
211   MatFactorSchurStatus schurstatus;
212   PetscErrorCode       ierr;
213 
214   PetscFunctionBegin;
215   ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr);
216   ierr = MatFactorGetSchurComplement(F,&S,&schurstatus);CHKERRQ(ierr);
217   if (X == B && schurstatus == MAT_FACTOR_SCHUR_INVERTED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"X and B cannot point to the same address");
218   ierr = MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->nrhs,B,&Bmat);CHKERRQ(ierr);
219   ierr = MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->nrhs,X,&Xmat);CHKERRQ(ierr);
220   if (X != B) { /* using MatMatSolve */
221     ierr = MatCopy(Bmat,Xmat,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
222   }
223 
224 #if defined(PETSC_USE_COMPLEX)
225   if (mpardiso->iparm[12-1] == 1) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Hermitian solve not implemented yet");
226 #endif
227 
228   switch (schurstatus) {
229   case MAT_FACTOR_SCHUR_FACTORED:
230     if (!mpardiso->iparm[12-1]) {
231       ierr = MatMatSolve(S,Bmat,Xmat);CHKERRQ(ierr);
232     } else { /* transpose solve */
233       ierr = MatMatSolveTranspose(S,Bmat,Xmat);CHKERRQ(ierr);
234     }
235     break;
236   case MAT_FACTOR_SCHUR_INVERTED:
237     if (!mpardiso->iparm[12-1]) {
238       ierr = MatMatMult(S,Bmat,MAT_REUSE_MATRIX,PETSC_DEFAULT,&Xmat);CHKERRQ(ierr);
239     } else { /* transpose solve */
240       ierr = MatTransposeMatMult(S,Bmat,MAT_REUSE_MATRIX,PETSC_DEFAULT,&Xmat);CHKERRQ(ierr);
241     }
242     break;
243   default:
244     SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status);
245     break;
246   }
247   ierr = MatFactorRestoreSchurComplement(F,&S,schurstatus);CHKERRQ(ierr);
248   ierr = MatDestroy(&Bmat);CHKERRQ(ierr);
249   ierr = MatDestroy(&Xmat);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 PetscErrorCode MatFactorSetSchurIS_MKL_PARDISO(Mat F, IS is)
254 {
255   Mat_MKL_PARDISO *mpardiso =(Mat_MKL_PARDISO*)F->data;
256   const PetscInt  *idxs;
257   PetscInt        size,i;
258   PetscMPIInt     csize;
259   PetscBool       sorted;
260   PetscErrorCode  ierr;
261 
262   PetscFunctionBegin;
263   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&csize);CHKERRQ(ierr);
264   if (csize > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MKL_PARDISO parallel Schur complements not yet supported from PETSc");
265   ierr = ISSorted(is,&sorted);CHKERRQ(ierr);
266   if (!sorted) {
267     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS for MKL_PARDISO Schur complements needs to be sorted");
268   }
269   ierr = ISGetLocalSize(is,&size);CHKERRQ(ierr);
270   if (mpardiso->schur_size != size) {
271     mpardiso->schur_size = size;
272     ierr = PetscFree2(mpardiso->schur,mpardiso->schur_work);CHKERRQ(ierr);
273     ierr = PetscFree(mpardiso->schur_idxs);CHKERRQ(ierr);
274     ierr = PetscBLASIntCast(PetscMax(mpardiso->n,2*size),&mpardiso->schur_work_size);CHKERRQ(ierr);
275     ierr = PetscMalloc2(size*size,&mpardiso->schur,mpardiso->schur_work_size,&mpardiso->schur_work);CHKERRQ(ierr);
276     ierr = PetscMalloc1(size,&mpardiso->schur_idxs);CHKERRQ(ierr);
277   }
278   ierr = MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->schur_size,mpardiso->schur,&F->schur);CHKERRQ(ierr);
279   if (mpardiso->mtype == 2) {
280     ierr = MatSetOption(F->schur,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr);
281   }
282 
283   ierr = PetscArrayzero(mpardiso->perm,mpardiso->n);CHKERRQ(ierr);
284   ierr = ISGetIndices(is,&idxs);CHKERRQ(ierr);
285   ierr = PetscArraycpy(mpardiso->schur_idxs,idxs,size);CHKERRQ(ierr);
286   for (i=0;i<size;i++) mpardiso->perm[idxs[i]] = 1;
287   ierr = ISRestoreIndices(is,&idxs);CHKERRQ(ierr);
288   if (size) { /* turn on Schur switch if the set of indices is not empty */
289     mpardiso->iparm[36-1] = 2;
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 PetscErrorCode MatDestroy_MKL_PARDISO(Mat A)
295 {
296   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
297   PetscErrorCode  ierr;
298 
299   PetscFunctionBegin;
300   if (mat_mkl_pardiso->CleanUp) {
301     mat_mkl_pardiso->phase = JOB_RELEASE_OF_ALL_MEMORY;
302 
303     MKL_PARDISO (mat_mkl_pardiso->pt,
304       &mat_mkl_pardiso->maxfct,
305       &mat_mkl_pardiso->mnum,
306       &mat_mkl_pardiso->mtype,
307       &mat_mkl_pardiso->phase,
308       &mat_mkl_pardiso->n,
309       NULL,
310       NULL,
311       NULL,
312       NULL,
313       &mat_mkl_pardiso->nrhs,
314       mat_mkl_pardiso->iparm,
315       &mat_mkl_pardiso->msglvl,
316       NULL,
317       NULL,
318       &mat_mkl_pardiso->err);
319   }
320   ierr = PetscFree(mat_mkl_pardiso->perm);CHKERRQ(ierr);
321   ierr = PetscFree2(mat_mkl_pardiso->schur,mat_mkl_pardiso->schur_work);CHKERRQ(ierr);
322   ierr = PetscFree(mat_mkl_pardiso->schur_idxs);CHKERRQ(ierr);
323   if (mat_mkl_pardiso->freeaij) {
324     ierr = PetscFree2(mat_mkl_pardiso->ia,mat_mkl_pardiso->ja);CHKERRQ(ierr);
325     ierr = PetscFree(mat_mkl_pardiso->a);CHKERRQ(ierr);
326   }
327   ierr = PetscFree(A->data);CHKERRQ(ierr);
328 
329   /* clear composed functions */
330   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr);
331   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);CHKERRQ(ierr);
332   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMkl_PardisoSetCntl_C",NULL);CHKERRQ(ierr);
333   PetscFunctionReturn(0);
334 }
335 
336 static PetscErrorCode MatMKLPardisoScatterSchur_Private(Mat_MKL_PARDISO *mpardiso, PetscScalar *whole, PetscScalar *schur, PetscBool reduce)
337 {
338   PetscFunctionBegin;
339   if (reduce) { /* data given for the whole matrix */
340     PetscInt i,m=0,p=0;
341     for (i=0;i<mpardiso->nrhs;i++) {
342       PetscInt j;
343       for (j=0;j<mpardiso->schur_size;j++) {
344         schur[p+j] = whole[m+mpardiso->schur_idxs[j]];
345       }
346       m += mpardiso->n;
347       p += mpardiso->schur_size;
348     }
349   } else { /* from Schur to whole */
350     PetscInt i,m=0,p=0;
351     for (i=0;i<mpardiso->nrhs;i++) {
352       PetscInt j;
353       for (j=0;j<mpardiso->schur_size;j++) {
354         whole[m+mpardiso->schur_idxs[j]] = schur[p+j];
355       }
356       m += mpardiso->n;
357       p += mpardiso->schur_size;
358     }
359   }
360   PetscFunctionReturn(0);
361 }
362 
363 PetscErrorCode MatSolve_MKL_PARDISO(Mat A,Vec b,Vec x)
364 {
365   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
366   PetscErrorCode    ierr;
367   PetscScalar       *xarray;
368   const PetscScalar *barray;
369 
370   PetscFunctionBegin;
371   mat_mkl_pardiso->nrhs = 1;
372   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
373   ierr = VecGetArrayRead(b,&barray);CHKERRQ(ierr);
374 
375   if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
376   else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
377 
378   if (barray == xarray) { /* if the two vectors share the same memory */
379     PetscScalar *work;
380     if (!mat_mkl_pardiso->schur_work) {
381       ierr = PetscMalloc1(mat_mkl_pardiso->n,&work);CHKERRQ(ierr);
382     } else {
383       work = mat_mkl_pardiso->schur_work;
384     }
385     mat_mkl_pardiso->iparm[6-1] = 1;
386     MKL_PARDISO (mat_mkl_pardiso->pt,
387       &mat_mkl_pardiso->maxfct,
388       &mat_mkl_pardiso->mnum,
389       &mat_mkl_pardiso->mtype,
390       &mat_mkl_pardiso->phase,
391       &mat_mkl_pardiso->n,
392       mat_mkl_pardiso->a,
393       mat_mkl_pardiso->ia,
394       mat_mkl_pardiso->ja,
395       NULL,
396       &mat_mkl_pardiso->nrhs,
397       mat_mkl_pardiso->iparm,
398       &mat_mkl_pardiso->msglvl,
399       (void*)xarray,
400       (void*)work,
401       &mat_mkl_pardiso->err);
402     if (!mat_mkl_pardiso->schur_work) {
403       ierr = PetscFree(work);CHKERRQ(ierr);
404     }
405   } else {
406     mat_mkl_pardiso->iparm[6-1] = 0;
407     MKL_PARDISO (mat_mkl_pardiso->pt,
408       &mat_mkl_pardiso->maxfct,
409       &mat_mkl_pardiso->mnum,
410       &mat_mkl_pardiso->mtype,
411       &mat_mkl_pardiso->phase,
412       &mat_mkl_pardiso->n,
413       mat_mkl_pardiso->a,
414       mat_mkl_pardiso->ia,
415       mat_mkl_pardiso->ja,
416       mat_mkl_pardiso->perm,
417       &mat_mkl_pardiso->nrhs,
418       mat_mkl_pardiso->iparm,
419       &mat_mkl_pardiso->msglvl,
420       (void*)barray,
421       (void*)xarray,
422       &mat_mkl_pardiso->err);
423   }
424   ierr = VecRestoreArrayRead(b,&barray);CHKERRQ(ierr);
425 
426   if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
427 
428   if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
429     PetscInt shift = mat_mkl_pardiso->schur_size;
430 
431     /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
432     if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) {
433       shift = 0;
434     }
435 
436     if (!mat_mkl_pardiso->solve_interior) {
437       /* solve Schur complement */
438       ierr = MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work,PETSC_TRUE);CHKERRQ(ierr);
439       ierr = MatMKLPardisoSolveSchur_Private(A,mat_mkl_pardiso->schur_work,mat_mkl_pardiso->schur_work+shift);CHKERRQ(ierr);
440       ierr = MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work+shift,PETSC_FALSE);CHKERRQ(ierr);
441     } else { /* if we are solving for the interior problem, any value in barray[schur] forward-substituted to xarray[schur] will be neglected */
442       PetscInt i;
443       for (i=0;i<mat_mkl_pardiso->schur_size;i++) {
444         xarray[mat_mkl_pardiso->schur_idxs[i]] = 0.;
445       }
446     }
447 
448     /* expansion phase */
449     mat_mkl_pardiso->iparm[6-1] = 1;
450     mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
451     MKL_PARDISO (mat_mkl_pardiso->pt,
452       &mat_mkl_pardiso->maxfct,
453       &mat_mkl_pardiso->mnum,
454       &mat_mkl_pardiso->mtype,
455       &mat_mkl_pardiso->phase,
456       &mat_mkl_pardiso->n,
457       mat_mkl_pardiso->a,
458       mat_mkl_pardiso->ia,
459       mat_mkl_pardiso->ja,
460       mat_mkl_pardiso->perm,
461       &mat_mkl_pardiso->nrhs,
462       mat_mkl_pardiso->iparm,
463       &mat_mkl_pardiso->msglvl,
464       (void*)xarray,
465       (void*)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
466       &mat_mkl_pardiso->err);
467 
468     if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
469     mat_mkl_pardiso->iparm[6-1] = 0;
470   }
471   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
472   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
473   PetscFunctionReturn(0);
474 }
475 
476 PetscErrorCode MatSolveTranspose_MKL_PARDISO(Mat A,Vec b,Vec x)
477 {
478   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
479   PetscInt        oiparm12;
480   PetscErrorCode  ierr;
481 
482   PetscFunctionBegin;
483   oiparm12 = mat_mkl_pardiso->iparm[12 - 1];
484   mat_mkl_pardiso->iparm[12 - 1] = 2;
485   ierr = MatSolve_MKL_PARDISO(A,b,x);CHKERRQ(ierr);
486   mat_mkl_pardiso->iparm[12 - 1] = oiparm12;
487   PetscFunctionReturn(0);
488 }
489 
490 PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A,Mat B,Mat X)
491 {
492   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)(A)->data;
493   PetscErrorCode    ierr;
494   const PetscScalar *barray;
495   PetscScalar       *xarray;
496   PetscBool         flg;
497 
498   PetscFunctionBegin;
499   ierr = PetscObjectTypeCompare((PetscObject)B,MATSEQDENSE,&flg);CHKERRQ(ierr);
500   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATSEQDENSE matrix");
501   ierr = PetscObjectTypeCompare((PetscObject)X,MATSEQDENSE,&flg);CHKERRQ(ierr);
502   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATSEQDENSE matrix");
503 
504   ierr = MatGetSize(B,NULL,(PetscInt*)&mat_mkl_pardiso->nrhs);CHKERRQ(ierr);
505 
506   if (mat_mkl_pardiso->nrhs > 0) {
507     ierr = MatDenseGetArrayRead(B,&barray);CHKERRQ(ierr);
508     ierr = MatDenseGetArray(X,&xarray);CHKERRQ(ierr);
509 
510     if (barray == xarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"B and X cannot share the same memory location");
511     if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
512     else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
513     mat_mkl_pardiso->iparm[6-1] = 0;
514 
515     MKL_PARDISO (mat_mkl_pardiso->pt,
516       &mat_mkl_pardiso->maxfct,
517       &mat_mkl_pardiso->mnum,
518       &mat_mkl_pardiso->mtype,
519       &mat_mkl_pardiso->phase,
520       &mat_mkl_pardiso->n,
521       mat_mkl_pardiso->a,
522       mat_mkl_pardiso->ia,
523       mat_mkl_pardiso->ja,
524       mat_mkl_pardiso->perm,
525       &mat_mkl_pardiso->nrhs,
526       mat_mkl_pardiso->iparm,
527       &mat_mkl_pardiso->msglvl,
528       (void*)barray,
529       (void*)xarray,
530       &mat_mkl_pardiso->err);
531     if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
532 
533     ierr = MatDenseRestoreArrayRead(B,&barray);CHKERRQ(ierr);
534     if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
535       PetscScalar *o_schur_work = NULL;
536       PetscInt    shift = mat_mkl_pardiso->schur_size*mat_mkl_pardiso->nrhs,scale;
537       PetscInt    mem = mat_mkl_pardiso->n*mat_mkl_pardiso->nrhs;
538 
539       /* allocate extra memory if it is needed */
540       scale = 1;
541       if (A->schur_status == MAT_FACTOR_SCHUR_INVERTED) scale = 2;
542 
543       mem *= scale;
544       if (mem > mat_mkl_pardiso->schur_work_size) {
545         o_schur_work = mat_mkl_pardiso->schur_work;
546         ierr = PetscMalloc1(mem,&mat_mkl_pardiso->schur_work);CHKERRQ(ierr);
547       }
548 
549       /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
550       if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;
551 
552       /* solve Schur complement */
553       if (!mat_mkl_pardiso->solve_interior) {
554         ierr = MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work,PETSC_TRUE);CHKERRQ(ierr);
555         ierr = MatMKLPardisoSolveSchur_Private(A,mat_mkl_pardiso->schur_work,mat_mkl_pardiso->schur_work+shift);CHKERRQ(ierr);
556         ierr = MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work+shift,PETSC_FALSE);CHKERRQ(ierr);
557       } else { /* if we are solving for the interior problem, any value in barray[schur,n] forward-substituted to xarray[schur,n] will be neglected */
558         PetscInt i,n,m=0;
559         for (n=0;n<mat_mkl_pardiso->nrhs;n++) {
560           for (i=0;i<mat_mkl_pardiso->schur_size;i++) {
561             xarray[mat_mkl_pardiso->schur_idxs[i]+m] = 0.;
562           }
563           m += mat_mkl_pardiso->n;
564         }
565       }
566 
567       /* expansion phase */
568       mat_mkl_pardiso->iparm[6-1] = 1;
569       mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
570       MKL_PARDISO (mat_mkl_pardiso->pt,
571         &mat_mkl_pardiso->maxfct,
572         &mat_mkl_pardiso->mnum,
573         &mat_mkl_pardiso->mtype,
574         &mat_mkl_pardiso->phase,
575         &mat_mkl_pardiso->n,
576         mat_mkl_pardiso->a,
577         mat_mkl_pardiso->ia,
578         mat_mkl_pardiso->ja,
579         mat_mkl_pardiso->perm,
580         &mat_mkl_pardiso->nrhs,
581         mat_mkl_pardiso->iparm,
582         &mat_mkl_pardiso->msglvl,
583         (void*)xarray,
584         (void*)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
585         &mat_mkl_pardiso->err);
586       if (o_schur_work) { /* restore original schur_work (minimal size) */
587         ierr = PetscFree(mat_mkl_pardiso->schur_work);CHKERRQ(ierr);
588         mat_mkl_pardiso->schur_work = o_schur_work;
589       }
590       if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
591       mat_mkl_pardiso->iparm[6-1] = 0;
592     }
593   }
594   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
595   PetscFunctionReturn(0);
596 }
597 
598 PetscErrorCode MatFactorNumeric_MKL_PARDISO(Mat F,Mat A,const MatFactorInfo *info)
599 {
600   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)(F)->data;
601   PetscErrorCode  ierr;
602 
603   PetscFunctionBegin;
604   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
605   ierr = (*mat_mkl_pardiso->Convert)(A,mat_mkl_pardiso->needsym,MAT_REUSE_MATRIX,&mat_mkl_pardiso->freeaij,&mat_mkl_pardiso->nz,&mat_mkl_pardiso->ia,&mat_mkl_pardiso->ja,(PetscScalar**)&mat_mkl_pardiso->a);CHKERRQ(ierr);
606 
607   mat_mkl_pardiso->phase = JOB_NUMERICAL_FACTORIZATION;
608   MKL_PARDISO (mat_mkl_pardiso->pt,
609     &mat_mkl_pardiso->maxfct,
610     &mat_mkl_pardiso->mnum,
611     &mat_mkl_pardiso->mtype,
612     &mat_mkl_pardiso->phase,
613     &mat_mkl_pardiso->n,
614     mat_mkl_pardiso->a,
615     mat_mkl_pardiso->ia,
616     mat_mkl_pardiso->ja,
617     mat_mkl_pardiso->perm,
618     &mat_mkl_pardiso->nrhs,
619     mat_mkl_pardiso->iparm,
620     &mat_mkl_pardiso->msglvl,
621     NULL,
622     (void*)mat_mkl_pardiso->schur,
623     &mat_mkl_pardiso->err);
624   if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
625 
626   if (F->schur) { /* schur output from pardiso is in row major format */
627     ierr = MatFactorRestoreSchurComplement(F,NULL,MAT_FACTOR_SCHUR_UNFACTORED);CHKERRQ(ierr);
628     ierr = MatTranspose(F->schur,MAT_INPLACE_MATRIX,&F->schur);CHKERRQ(ierr);
629   }
630   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
631   mat_mkl_pardiso->CleanUp  = PETSC_TRUE;
632   PetscFunctionReturn(0);
633 }
634 
635 PetscErrorCode PetscSetMKL_PARDISOFromOptions(Mat F, Mat A)
636 {
637   Mat_MKL_PARDISO     *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;
638   PetscErrorCode      ierr;
639   PetscInt            icntl,threads=1;
640   PetscBool           flg;
641 
642   PetscFunctionBegin;
643   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MKL_PARDISO Options","Mat");CHKERRQ(ierr);
644 
645   ierr = PetscOptionsInt("-mat_mkl_pardiso_65","Number of threads to use within PARDISO","None",threads,&threads,&flg);CHKERRQ(ierr);
646   if (flg) PetscSetMKL_PARDISOThreads((int)threads);
647 
648   ierr = PetscOptionsInt("-mat_mkl_pardiso_66","Maximum number of factors with identical sparsity structure that must be kept in memory at the same time","None",mat_mkl_pardiso->maxfct,&icntl,&flg);CHKERRQ(ierr);
649   if (flg) mat_mkl_pardiso->maxfct = icntl;
650 
651   ierr = PetscOptionsInt("-mat_mkl_pardiso_67","Indicates the actual matrix for the solution phase","None",mat_mkl_pardiso->mnum,&icntl,&flg);CHKERRQ(ierr);
652   if (flg) mat_mkl_pardiso->mnum = icntl;
653 
654   ierr = PetscOptionsInt("-mat_mkl_pardiso_68","Message level information","None",mat_mkl_pardiso->msglvl,&icntl,&flg);CHKERRQ(ierr);
655   if (flg) mat_mkl_pardiso->msglvl = icntl;
656 
657   ierr = PetscOptionsInt("-mat_mkl_pardiso_69","Defines the matrix type","None",mat_mkl_pardiso->mtype,&icntl,&flg);CHKERRQ(ierr);
658   if(flg){
659     void *pt[IPARM_SIZE];
660     mat_mkl_pardiso->mtype = icntl;
661     MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
662 #if defined(PETSC_USE_REAL_SINGLE)
663     mat_mkl_pardiso->iparm[27] = 1;
664 #else
665     mat_mkl_pardiso->iparm[27] = 0;
666 #endif
667     mat_mkl_pardiso->iparm[34] = 1; /* use 0-based indexing */
668   }
669   ierr = PetscOptionsInt("-mat_mkl_pardiso_1","Use default values","None",mat_mkl_pardiso->iparm[0],&icntl,&flg);CHKERRQ(ierr);
670 
671   if (flg && icntl != 0) {
672     ierr = PetscOptionsInt("-mat_mkl_pardiso_2","Fill-in reducing ordering for the input matrix","None",mat_mkl_pardiso->iparm[1],&icntl,&flg);CHKERRQ(ierr);
673     if (flg) mat_mkl_pardiso->iparm[1] = icntl;
674 
675     ierr = PetscOptionsInt("-mat_mkl_pardiso_4","Preconditioned CGS/CG","None",mat_mkl_pardiso->iparm[3],&icntl,&flg);CHKERRQ(ierr);
676     if (flg) mat_mkl_pardiso->iparm[3] = icntl;
677 
678     ierr = PetscOptionsInt("-mat_mkl_pardiso_5","User permutation","None",mat_mkl_pardiso->iparm[4],&icntl,&flg);CHKERRQ(ierr);
679     if (flg) mat_mkl_pardiso->iparm[4] = icntl;
680 
681     ierr = PetscOptionsInt("-mat_mkl_pardiso_6","Write solution on x","None",mat_mkl_pardiso->iparm[5],&icntl,&flg);CHKERRQ(ierr);
682     if (flg) mat_mkl_pardiso->iparm[5] = icntl;
683 
684     ierr = PetscOptionsInt("-mat_mkl_pardiso_8","Iterative refinement step","None",mat_mkl_pardiso->iparm[7],&icntl,&flg);CHKERRQ(ierr);
685     if (flg) mat_mkl_pardiso->iparm[7] = icntl;
686 
687     ierr = PetscOptionsInt("-mat_mkl_pardiso_10","Pivoting perturbation","None",mat_mkl_pardiso->iparm[9],&icntl,&flg);CHKERRQ(ierr);
688     if (flg) mat_mkl_pardiso->iparm[9] = icntl;
689 
690     ierr = PetscOptionsInt("-mat_mkl_pardiso_11","Scaling vectors","None",mat_mkl_pardiso->iparm[10],&icntl,&flg);CHKERRQ(ierr);
691     if (flg) mat_mkl_pardiso->iparm[10] = icntl;
692 
693     ierr = PetscOptionsInt("-mat_mkl_pardiso_12","Solve with transposed or conjugate transposed matrix A","None",mat_mkl_pardiso->iparm[11],&icntl,&flg);CHKERRQ(ierr);
694     if (flg) mat_mkl_pardiso->iparm[11] = icntl;
695 
696     ierr = PetscOptionsInt("-mat_mkl_pardiso_13","Improved accuracy using (non-) symmetric weighted matching","None",mat_mkl_pardiso->iparm[12],&icntl,&flg);CHKERRQ(ierr);
697     if (flg) mat_mkl_pardiso->iparm[12] = icntl;
698 
699     ierr = PetscOptionsInt("-mat_mkl_pardiso_18","Numbers of non-zero elements","None",mat_mkl_pardiso->iparm[17],&icntl,&flg);CHKERRQ(ierr);
700     if (flg) mat_mkl_pardiso->iparm[17] = icntl;
701 
702     ierr = PetscOptionsInt("-mat_mkl_pardiso_19","Report number of floating point operations","None",mat_mkl_pardiso->iparm[18],&icntl,&flg);CHKERRQ(ierr);
703     if (flg) mat_mkl_pardiso->iparm[18] = icntl;
704 
705     ierr = PetscOptionsInt("-mat_mkl_pardiso_21","Pivoting for symmetric indefinite matrices","None",mat_mkl_pardiso->iparm[20],&icntl,&flg);CHKERRQ(ierr);
706     if (flg) mat_mkl_pardiso->iparm[20] = icntl;
707 
708     ierr = PetscOptionsInt("-mat_mkl_pardiso_24","Parallel factorization control","None",mat_mkl_pardiso->iparm[23],&icntl,&flg);CHKERRQ(ierr);
709     if (flg) mat_mkl_pardiso->iparm[23] = icntl;
710 
711     ierr = PetscOptionsInt("-mat_mkl_pardiso_25","Parallel forward/backward solve control","None",mat_mkl_pardiso->iparm[24],&icntl,&flg);CHKERRQ(ierr);
712     if (flg) mat_mkl_pardiso->iparm[24] = icntl;
713 
714     ierr = PetscOptionsInt("-mat_mkl_pardiso_27","Matrix checker","None",mat_mkl_pardiso->iparm[26],&icntl,&flg);CHKERRQ(ierr);
715     if (flg) mat_mkl_pardiso->iparm[26] = icntl;
716 
717     ierr = PetscOptionsInt("-mat_mkl_pardiso_31","Partial solve and computing selected components of the solution vectors","None",mat_mkl_pardiso->iparm[30],&icntl,&flg);CHKERRQ(ierr);
718     if (flg) mat_mkl_pardiso->iparm[30] = icntl;
719 
720     ierr = PetscOptionsInt("-mat_mkl_pardiso_34","Optimal number of threads for conditional numerical reproducibility (CNR) mode","None",mat_mkl_pardiso->iparm[33],&icntl,&flg);CHKERRQ(ierr);
721     if (flg) mat_mkl_pardiso->iparm[33] = icntl;
722 
723     ierr = PetscOptionsInt("-mat_mkl_pardiso_60","Intel MKL_PARDISO mode","None",mat_mkl_pardiso->iparm[59],&icntl,&flg);CHKERRQ(ierr);
724     if (flg) mat_mkl_pardiso->iparm[59] = icntl;
725   }
726   PetscOptionsEnd();
727   PetscFunctionReturn(0);
728 }
729 
730 PetscErrorCode MatFactorMKL_PARDISOInitialize_Private(Mat A, MatFactorType ftype, Mat_MKL_PARDISO *mat_mkl_pardiso)
731 {
732   PetscErrorCode ierr;
733   PetscInt       i;
734 
735   PetscFunctionBegin;
736   for ( i = 0; i < IPARM_SIZE; i++ ){
737     mat_mkl_pardiso->iparm[i] = 0;
738   }
739   for ( i = 0; i < IPARM_SIZE; i++ ){
740     mat_mkl_pardiso->pt[i] = 0;
741   }
742   /* Default options for both sym and unsym */
743   mat_mkl_pardiso->iparm[ 0] =  1; /* Solver default parameters overriden with provided by iparm */
744   mat_mkl_pardiso->iparm[ 1] =  2; /* Metis reordering */
745   mat_mkl_pardiso->iparm[ 5] =  0; /* Write solution into x */
746   mat_mkl_pardiso->iparm[ 7] =  0; /* Max number of iterative refinement steps */
747   mat_mkl_pardiso->iparm[17] = -1; /* Output: Number of nonzeros in the factor LU */
748   mat_mkl_pardiso->iparm[18] = -1; /* Output: Mflops for LU factorization */
749 #if 0
750   mat_mkl_pardiso->iparm[23] =  1; /* Parallel factorization control*/
751 #endif
752   mat_mkl_pardiso->iparm[34] =  1; /* Cluster Sparse Solver use C-style indexing for ia and ja arrays */
753   mat_mkl_pardiso->iparm[39] =  0; /* Input: matrix/rhs/solution stored on master */
754 
755   mat_mkl_pardiso->CleanUp   = PETSC_FALSE;
756   mat_mkl_pardiso->maxfct    = 1; /* Maximum number of numerical factorizations. */
757   mat_mkl_pardiso->mnum      = 1; /* Which factorization to use. */
758   mat_mkl_pardiso->msglvl    = 0; /* 0: do not print 1: Print statistical information in file */
759   mat_mkl_pardiso->phase     = -1;
760   mat_mkl_pardiso->err       = 0;
761 
762   mat_mkl_pardiso->n         = A->rmap->N;
763   mat_mkl_pardiso->nrhs      = 1;
764   mat_mkl_pardiso->err       = 0;
765   mat_mkl_pardiso->phase     = -1;
766 
767   if(ftype == MAT_FACTOR_LU){
768     mat_mkl_pardiso->iparm[ 9] = 13; /* Perturb the pivot elements with 1E-13 */
769     mat_mkl_pardiso->iparm[10] =  1; /* Use nonsymmetric permutation and scaling MPS */
770     mat_mkl_pardiso->iparm[12] =  1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
771 
772   } else {
773     mat_mkl_pardiso->iparm[ 9] = 13; /* Perturb the pivot elements with 1E-13 */
774     mat_mkl_pardiso->iparm[10] = 0; /* Use nonsymmetric permutation and scaling MPS */
775     mat_mkl_pardiso->iparm[12] = 1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
776 /*    mat_mkl_pardiso->iparm[20] =  1; */ /* Apply 1x1 and 2x2 Bunch-Kaufman pivoting during the factorization process */
777 #if defined(PETSC_USE_DEBUG)
778     mat_mkl_pardiso->iparm[26] = 1; /* Matrix checker */
779 #endif
780   }
781   ierr = PetscMalloc1(A->rmap->N*sizeof(INT_TYPE), &mat_mkl_pardiso->perm);CHKERRQ(ierr);
782   for(i = 0; i < A->rmap->N; i++){
783     mat_mkl_pardiso->perm[i] = 0;
784   }
785   mat_mkl_pardiso->schur_size = 0;
786   PetscFunctionReturn(0);
787 }
788 
789 PetscErrorCode MatFactorSymbolic_AIJMKL_PARDISO_Private(Mat F,Mat A,const MatFactorInfo *info)
790 {
791   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;
792   PetscErrorCode  ierr;
793 
794   PetscFunctionBegin;
795   mat_mkl_pardiso->matstruc = DIFFERENT_NONZERO_PATTERN;
796   ierr = PetscSetMKL_PARDISOFromOptions(F,A);CHKERRQ(ierr);
797 
798   /* throw away any previously computed structure */
799   if (mat_mkl_pardiso->freeaij) {
800     ierr = PetscFree2(mat_mkl_pardiso->ia,mat_mkl_pardiso->ja);CHKERRQ(ierr);
801     ierr = PetscFree(mat_mkl_pardiso->a);CHKERRQ(ierr);
802   }
803   ierr = (*mat_mkl_pardiso->Convert)(A,mat_mkl_pardiso->needsym,MAT_INITIAL_MATRIX,&mat_mkl_pardiso->freeaij,&mat_mkl_pardiso->nz,&mat_mkl_pardiso->ia,&mat_mkl_pardiso->ja,(PetscScalar**)&mat_mkl_pardiso->a);CHKERRQ(ierr);
804   mat_mkl_pardiso->n = A->rmap->N;
805 
806   mat_mkl_pardiso->phase = JOB_ANALYSIS;
807 
808   MKL_PARDISO (mat_mkl_pardiso->pt,
809     &mat_mkl_pardiso->maxfct,
810     &mat_mkl_pardiso->mnum,
811     &mat_mkl_pardiso->mtype,
812     &mat_mkl_pardiso->phase,
813     &mat_mkl_pardiso->n,
814     mat_mkl_pardiso->a,
815     mat_mkl_pardiso->ia,
816     mat_mkl_pardiso->ja,
817     mat_mkl_pardiso->perm,
818     &mat_mkl_pardiso->nrhs,
819     mat_mkl_pardiso->iparm,
820     &mat_mkl_pardiso->msglvl,
821     NULL,
822     NULL,
823     &mat_mkl_pardiso->err);
824   if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
825 
826   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
827 
828   if (F->factortype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_MKL_PARDISO;
829   else F->ops->choleskyfactornumeric = MatFactorNumeric_MKL_PARDISO;
830 
831   F->ops->solve           = MatSolve_MKL_PARDISO;
832   F->ops->solvetranspose  = MatSolveTranspose_MKL_PARDISO;
833   F->ops->matsolve        = MatMatSolve_MKL_PARDISO;
834   PetscFunctionReturn(0);
835 }
836 
837 PetscErrorCode MatLUFactorSymbolic_AIJMKL_PARDISO(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
838 {
839   PetscErrorCode ierr;
840 
841   PetscFunctionBegin;
842   ierr = MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info);CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 #if !defined(PETSC_USE_COMPLEX)
847 PetscErrorCode MatGetInertia_MKL_PARDISO(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
848 {
849   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)F->data;
850 
851   PetscFunctionBegin;
852   if (nneg) *nneg = mat_mkl_pardiso->iparm[22];
853   if (npos) *npos = mat_mkl_pardiso->iparm[21];
854   if (nzero) *nzero = F->rmap->N -(mat_mkl_pardiso->iparm[22] + mat_mkl_pardiso->iparm[21]);
855   PetscFunctionReturn(0);
856 }
857 #endif
858 
859 PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_PARDISO(Mat F,Mat A,IS r,const MatFactorInfo *info)
860 {
861   PetscErrorCode ierr;
862 
863   PetscFunctionBegin;
864   ierr = MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info);CHKERRQ(ierr);
865 #if defined(PETSC_USE_COMPLEX)
866   F->ops->getinertia = NULL;
867 #else
868   F->ops->getinertia = MatGetInertia_MKL_PARDISO;
869 #endif
870   PetscFunctionReturn(0);
871 }
872 
873 PetscErrorCode MatView_MKL_PARDISO(Mat A, PetscViewer viewer)
874 {
875   PetscErrorCode    ierr;
876   PetscBool         iascii;
877   PetscViewerFormat format;
878   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
879   PetscInt          i;
880 
881   PetscFunctionBegin;
882   if (A->ops->solve != MatSolve_MKL_PARDISO) PetscFunctionReturn(0);
883 
884   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
885   if (iascii) {
886     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
887     if (format == PETSC_VIEWER_ASCII_INFO) {
888       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO run parameters:\n");CHKERRQ(ierr);
889       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO phase:             %d \n",mat_mkl_pardiso->phase);CHKERRQ(ierr);
890       for(i = 1; i <= 64; i++){
891         ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO iparm[%d]:     %d \n",i, mat_mkl_pardiso->iparm[i - 1]);CHKERRQ(ierr);
892       }
893       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO maxfct:     %d \n", mat_mkl_pardiso->maxfct);CHKERRQ(ierr);
894       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO mnum:     %d \n", mat_mkl_pardiso->mnum);CHKERRQ(ierr);
895       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO mtype:     %d \n", mat_mkl_pardiso->mtype);CHKERRQ(ierr);
896       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO n:     %d \n", mat_mkl_pardiso->n);CHKERRQ(ierr);
897       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO nrhs:     %d \n", mat_mkl_pardiso->nrhs);CHKERRQ(ierr);
898       ierr = PetscViewerASCIIPrintf(viewer,"MKL_PARDISO msglvl:     %d \n", mat_mkl_pardiso->msglvl);CHKERRQ(ierr);
899     }
900   }
901   PetscFunctionReturn(0);
902 }
903 
904 
905 PetscErrorCode MatGetInfo_MKL_PARDISO(Mat A, MatInfoType flag, MatInfo *info)
906 {
907   Mat_MKL_PARDISO *mat_mkl_pardiso =(Mat_MKL_PARDISO*)A->data;
908 
909   PetscFunctionBegin;
910   info->block_size        = 1.0;
911   info->nz_used           = mat_mkl_pardiso->nz;
912   info->nz_allocated      = mat_mkl_pardiso->nz;
913   info->nz_unneeded       = 0.0;
914   info->assemblies        = 0.0;
915   info->mallocs           = 0.0;
916   info->memory            = 0.0;
917   info->fill_ratio_given  = 0;
918   info->fill_ratio_needed = 0;
919   info->factor_mallocs    = 0;
920   PetscFunctionReturn(0);
921 }
922 
923 PetscErrorCode MatMkl_PardisoSetCntl_MKL_PARDISO(Mat F,PetscInt icntl,PetscInt ival)
924 {
925   Mat_MKL_PARDISO *mat_mkl_pardiso =(Mat_MKL_PARDISO*)F->data;
926 
927   PetscFunctionBegin;
928   if(icntl <= 64){
929     mat_mkl_pardiso->iparm[icntl - 1] = ival;
930   } else {
931     if(icntl == 65) PetscSetMKL_PARDISOThreads(ival);
932     else if(icntl == 66) mat_mkl_pardiso->maxfct = ival;
933     else if(icntl == 67) mat_mkl_pardiso->mnum = ival;
934     else if(icntl == 68) mat_mkl_pardiso->msglvl = ival;
935     else if(icntl == 69){
936       void *pt[IPARM_SIZE];
937       mat_mkl_pardiso->mtype = ival;
938       MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
939 #if defined(PETSC_USE_REAL_SINGLE)
940       mat_mkl_pardiso->iparm[27] = 1;
941 #else
942       mat_mkl_pardiso->iparm[27] = 0;
943 #endif
944       mat_mkl_pardiso->iparm[34] = 1;
945     } else if(icntl==70) mat_mkl_pardiso->solve_interior = (PetscBool)!!ival;
946   }
947   PetscFunctionReturn(0);
948 }
949 
950 /*@
951   MatMkl_PardisoSetCntl - Set Mkl_Pardiso parameters
952 
953    Logically Collective on Mat
954 
955    Input Parameters:
956 +  F - the factored matrix obtained by calling MatGetFactor()
957 .  icntl - index of Mkl_Pardiso parameter
958 -  ival - value of Mkl_Pardiso parameter
959 
960   Options Database:
961 .   -mat_mkl_pardiso_<icntl> <ival>
962 
963    Level: beginner
964 
965    References:
966 .      Mkl_Pardiso Users' Guide
967 
968 .seealso: MatGetFactor()
969 @*/
970 PetscErrorCode MatMkl_PardisoSetCntl(Mat F,PetscInt icntl,PetscInt ival)
971 {
972   PetscErrorCode ierr;
973 
974   PetscFunctionBegin;
975   ierr = PetscTryMethod(F,"MatMkl_PardisoSetCntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
976   PetscFunctionReturn(0);
977 }
978 
979 /*MC
980   MATSOLVERMKL_PARDISO -  A matrix type providing direct solvers (LU) for
981   sequential matrices via the external package MKL_PARDISO.
982 
983   Works with MATSEQAIJ matrices
984 
985   Use -pc_type lu -pc_factor_mat_solver_type mkl_pardiso to use this direct solver
986 
987   Options Database Keys:
988 + -mat_mkl_pardiso_65 - Number of threads to use within MKL_PARDISO
989 . -mat_mkl_pardiso_66 - Maximum number of factors with identical sparsity structure that must be kept in memory at the same time
990 . -mat_mkl_pardiso_67 - Indicates the actual matrix for the solution phase
991 . -mat_mkl_pardiso_68 - Message level information
992 . -mat_mkl_pardiso_69 - Defines the matrix type. IMPORTANT: When you set this flag, iparm parameters are going to be set to the default ones for the matrix type
993 . -mat_mkl_pardiso_1  - Use default values
994 . -mat_mkl_pardiso_2  - Fill-in reducing ordering for the input matrix
995 . -mat_mkl_pardiso_4  - Preconditioned CGS/CG
996 . -mat_mkl_pardiso_5  - User permutation
997 . -mat_mkl_pardiso_6  - Write solution on x
998 . -mat_mkl_pardiso_8  - Iterative refinement step
999 . -mat_mkl_pardiso_10 - Pivoting perturbation
1000 . -mat_mkl_pardiso_11 - Scaling vectors
1001 . -mat_mkl_pardiso_12 - Solve with transposed or conjugate transposed matrix A
1002 . -mat_mkl_pardiso_13 - Improved accuracy using (non-) symmetric weighted matching
1003 . -mat_mkl_pardiso_18 - Numbers of non-zero elements
1004 . -mat_mkl_pardiso_19 - Report number of floating point operations
1005 . -mat_mkl_pardiso_21 - Pivoting for symmetric indefinite matrices
1006 . -mat_mkl_pardiso_24 - Parallel factorization control
1007 . -mat_mkl_pardiso_25 - Parallel forward/backward solve control
1008 . -mat_mkl_pardiso_27 - Matrix checker
1009 . -mat_mkl_pardiso_31 - Partial solve and computing selected components of the solution vectors
1010 . -mat_mkl_pardiso_34 - Optimal number of threads for conditional numerical reproducibility (CNR) mode
1011 - -mat_mkl_pardiso_60 - Intel MKL_PARDISO mode
1012 
1013   Level: beginner
1014 
1015   For more information please check  mkl_pardiso manual
1016 
1017 .seealso: PCFactorSetMatSolverType(), MatSolverType
1018 
1019 M*/
1020 static PetscErrorCode MatFactorGetSolverType_mkl_pardiso(Mat A, MatSolverType *type)
1021 {
1022   PetscFunctionBegin;
1023   *type = MATSOLVERMKL_PARDISO;
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mkl_pardiso(Mat A,MatFactorType ftype,Mat *F)
1028 {
1029   Mat             B;
1030   PetscErrorCode  ierr;
1031   Mat_MKL_PARDISO *mat_mkl_pardiso;
1032   PetscBool       isSeqAIJ,isSeqBAIJ,isSeqSBAIJ;
1033 
1034   PetscFunctionBegin;
1035   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
1036   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
1037   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
1038   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1039   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1040   ierr = PetscStrallocpy("mkl_pardiso",&((PetscObject)B)->type_name);CHKERRQ(ierr);
1041   ierr = MatSetUp(B);CHKERRQ(ierr);
1042 
1043   ierr = PetscNewLog(B,&mat_mkl_pardiso);CHKERRQ(ierr);
1044   B->data = mat_mkl_pardiso;
1045 
1046   ierr = MatFactorMKL_PARDISOInitialize_Private(A, ftype, mat_mkl_pardiso);CHKERRQ(ierr);
1047   if (ftype == MAT_FACTOR_LU) {
1048     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMKL_PARDISO;
1049     B->factortype            = MAT_FACTOR_LU;
1050     mat_mkl_pardiso->needsym = PETSC_FALSE;
1051     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1052     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1053     else if (isSeqSBAIJ) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO LU factor with SEQSBAIJ format! Use MAT_FACTOR_CHOLESKY instead");
1054     else SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO LU with %s format",((PetscObject)A)->type_name);
1055 #if defined(PETSC_USE_COMPLEX)
1056     mat_mkl_pardiso->mtype = 13;
1057 #else
1058     mat_mkl_pardiso->mtype = 11;
1059 #endif
1060   } else {
1061 #if defined(PETSC_USE_COMPLEX)
1062     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO CHOLESKY with complex scalars! Use MAT_FACTOR_LU instead",((PetscObject)A)->type_name);
1063 #endif
1064     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_AIJMKL_PARDISO;
1065     B->factortype                  = MAT_FACTOR_CHOLESKY;
1066     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1067     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1068     else if (isSeqSBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqsbaij;
1069     else SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO CHOLESKY with %s format",((PetscObject)A)->type_name);
1070 
1071     mat_mkl_pardiso->needsym = PETSC_TRUE;
1072     if (A->spd_set && A->spd) mat_mkl_pardiso->mtype = 2;
1073     else                      mat_mkl_pardiso->mtype = -2;
1074   }
1075   B->ops->destroy          = MatDestroy_MKL_PARDISO;
1076   B->ops->view             = MatView_MKL_PARDISO;
1077   B->factortype            = ftype;
1078   B->ops->getinfo          = MatGetInfo_MKL_PARDISO;
1079   B->assembled             = PETSC_TRUE;
1080 
1081   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
1082   ierr = PetscStrallocpy(MATSOLVERMKL_PARDISO,&B->solvertype);CHKERRQ(ierr);
1083 
1084   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mkl_pardiso);CHKERRQ(ierr);
1085   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MKL_PARDISO);CHKERRQ(ierr);
1086   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMkl_PardisoSetCntl_C",MatMkl_PardisoSetCntl_MKL_PARDISO);CHKERRQ(ierr);
1087 
1088   *F = B;
1089   PetscFunctionReturn(0);
1090 }
1091 
1092 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void)
1093 {
1094   PetscErrorCode ierr;
1095 
1096   PetscFunctionBegin;
1097   ierr = MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mkl_pardiso);CHKERRQ(ierr);
1098   ierr = MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mkl_pardiso);CHKERRQ(ierr);
1099   ierr = MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mkl_pardiso);CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103