xref: /petsc/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c (revision f13dfd9ea68e0ddeee984e65c377a1819eab8a8a)
1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/sbaij/seq/sbaij.h>
3 #include <../src/mat/impls/dense/seq/dense.h>
4 
5 #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
6   #define MKL_ILP64
7 #endif
8 #include <mkl_pardiso.h>
9 
10 PETSC_EXTERN void PetscSetMKL_PARDISOThreads(int);
11 
12 /*
13  *  Possible mkl_pardiso phases that controls the execution of the solver.
14  *  For more information check mkl_pardiso manual.
15  */
16 #define JOB_ANALYSIS                                                    11
17 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION                            12
18 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 13
19 #define JOB_NUMERICAL_FACTORIZATION                                     22
20 #define JOB_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT          23
21 #define JOB_SOLVE_ITERATIVE_REFINEMENT                                  33
22 #define JOB_SOLVE_FORWARD_SUBSTITUTION                                  331
23 #define JOB_SOLVE_DIAGONAL_SUBSTITUTION                                 332
24 #define JOB_SOLVE_BACKWARD_SUBSTITUTION                                 333
25 #define JOB_RELEASE_OF_LU_MEMORY                                        0
26 #define JOB_RELEASE_OF_ALL_MEMORY                                       -1
27 
28 #define IPARM_SIZE 64
29 
30 #if defined(PETSC_USE_64BIT_INDICES)
31   #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
32     #define INT_TYPE         long long int
33     #define MKL_PARDISO      pardiso
34     #define MKL_PARDISO_INIT pardisoinit
35   #else
36     /* this is the case where the MKL BLAS/LAPACK are 32-bit integers but the 64-bit integer version of
37      of PARDISO code is used; hence the need for the 64 below*/
38     #define INT_TYPE         long long int
39     #define MKL_PARDISO      pardiso_64
40     #define MKL_PARDISO_INIT pardiso_64init
41 void pardiso_64init(void *pt, INT_TYPE *mtype, INT_TYPE iparm[])
42 {
43   int iparm_copy[IPARM_SIZE], mtype_copy, i;
44 
45   mtype_copy = *mtype;
46   pardisoinit(pt, &mtype_copy, iparm_copy);
47   for (i = 0; i < IPARM_SIZE; i++) iparm[i] = iparm_copy[i];
48 }
49   #endif
50 #else
51   #define INT_TYPE         int
52   #define MKL_PARDISO      pardiso
53   #define MKL_PARDISO_INIT pardisoinit
54 #endif
55 
56 #define PetscCallPardiso(f) PetscStackCallExternalVoid("MKL_PARDISO", f);
57 
58 /*
59    Internal data structure.
60  */
61 typedef struct {
62   /* Configuration vector*/
63   INT_TYPE iparm[IPARM_SIZE];
64 
65   /*
66      Internal MKL PARDISO memory location.
67      After the first call to MKL PARDISO do not modify pt, as that could cause a serious memory leak.
68    */
69   void *pt[IPARM_SIZE];
70 
71   /* Basic MKL PARDISO info */
72   INT_TYPE phase, maxfct, mnum, mtype, n, nrhs, msglvl, err;
73 
74   /* Matrix structure*/
75   void     *a;
76   INT_TYPE *ia, *ja;
77 
78   /* Number of non-zero elements*/
79   INT_TYPE nz;
80 
81   /* Row permutaton vector*/
82   INT_TYPE *perm;
83 
84   /* Define if matrix preserves sparse structure.*/
85   MatStructure matstruc;
86 
87   PetscBool needsym;
88   PetscBool freeaij;
89 
90   /* Schur complement */
91   PetscScalar *schur;
92   PetscInt     schur_size;
93   PetscInt    *schur_idxs;
94   PetscScalar *schur_work;
95   PetscBLASInt schur_work_size;
96   PetscBool    solve_interior;
97 
98   /* True if MKL PARDISO function have been used. */
99   PetscBool CleanUp;
100 
101   /* Conversion to a format suitable for MKL */
102   PetscErrorCode (*Convert)(Mat, PetscBool, MatReuse, PetscBool *, INT_TYPE *, INT_TYPE **, INT_TYPE **, PetscScalar **);
103 } Mat_MKL_PARDISO;
104 
105 static PetscErrorCode MatMKLPardiso_Convert_seqsbaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v)
106 {
107   Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ *)A->data;
108   PetscInt      bs = A->rmap->bs, i;
109 
110   PetscFunctionBegin;
111   PetscCheck(sym, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "This should not happen");
112   *v = aa->a;
113   if (bs == 1) { /* already in the correct format */
114     /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
115     *r    = (INT_TYPE *)aa->i;
116     *c    = (INT_TYPE *)aa->j;
117     *nnz  = (INT_TYPE)aa->nz;
118     *free = PETSC_FALSE;
119   } else if (reuse == MAT_INITIAL_MATRIX) {
120     PetscInt  m = A->rmap->n, nz = aa->nz;
121     PetscInt *row, *col;
122     PetscCall(PetscMalloc2(m + 1, &row, nz, &col));
123     for (i = 0; i < m + 1; i++) row[i] = aa->i[i] + 1;
124     for (i = 0; i < nz; i++) col[i] = aa->j[i] + 1;
125     *r    = (INT_TYPE *)row;
126     *c    = (INT_TYPE *)col;
127     *nnz  = (INT_TYPE)nz;
128     *free = PETSC_TRUE;
129   }
130   PetscFunctionReturn(PETSC_SUCCESS);
131 }
132 
133 static PetscErrorCode MatMKLPardiso_Convert_seqbaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v)
134 {
135   Mat_SeqBAIJ *aa = (Mat_SeqBAIJ *)A->data;
136   PetscInt     bs = A->rmap->bs, i;
137 
138   PetscFunctionBegin;
139   if (!sym) {
140     *v = aa->a;
141     if (bs == 1) { /* already in the correct format */
142       /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
143       *r    = (INT_TYPE *)aa->i;
144       *c    = (INT_TYPE *)aa->j;
145       *nnz  = (INT_TYPE)aa->nz;
146       *free = PETSC_FALSE;
147       PetscFunctionReturn(PETSC_SUCCESS);
148     } else if (reuse == MAT_INITIAL_MATRIX) {
149       PetscInt  m = A->rmap->n, nz = aa->nz;
150       PetscInt *row, *col;
151       PetscCall(PetscMalloc2(m + 1, &row, nz, &col));
152       for (i = 0; i < m + 1; i++) row[i] = aa->i[i] + 1;
153       for (i = 0; i < nz; i++) col[i] = aa->j[i] + 1;
154       *r   = (INT_TYPE *)row;
155       *c   = (INT_TYPE *)col;
156       *nnz = (INT_TYPE)nz;
157     }
158     *free = PETSC_TRUE;
159   } else {
160     SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "This should not happen");
161   }
162   PetscFunctionReturn(PETSC_SUCCESS);
163 }
164 
165 static PetscErrorCode MatMKLPardiso_Convert_seqaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v)
166 {
167   Mat_SeqAIJ  *aa = (Mat_SeqAIJ *)A->data;
168   PetscScalar *aav;
169 
170   PetscFunctionBegin;
171   PetscCall(MatSeqAIJGetArrayRead(A, (const PetscScalar **)&aav));
172   if (!sym) { /* already in the correct format */
173     *v    = aav;
174     *r    = (INT_TYPE *)aa->i;
175     *c    = (INT_TYPE *)aa->j;
176     *nnz  = (INT_TYPE)aa->nz;
177     *free = PETSC_FALSE;
178   } else if (reuse == MAT_INITIAL_MATRIX) { /* need to get the triangular part */
179     PetscScalar *vals, *vv;
180     PetscInt    *row, *col, *jj;
181     PetscInt     m = A->rmap->n, nz, i;
182 
183     nz = 0;
184     for (i = 0; i < m; i++) nz += aa->i[i + 1] - aa->diag[i];
185     PetscCall(PetscMalloc2(m + 1, &row, nz, &col));
186     PetscCall(PetscMalloc1(nz, &vals));
187     jj = col;
188     vv = vals;
189 
190     row[0] = 0;
191     for (i = 0; i < m; i++) {
192       PetscInt    *aj = aa->j + aa->diag[i];
193       PetscScalar *av = aav + aa->diag[i];
194       PetscInt     rl = aa->i[i + 1] - aa->diag[i], j;
195 
196       for (j = 0; j < rl; j++) {
197         *jj = *aj;
198         jj++;
199         aj++;
200         *vv = *av;
201         vv++;
202         av++;
203       }
204       row[i + 1] = row[i] + rl;
205     }
206     *v    = vals;
207     *r    = (INT_TYPE *)row;
208     *c    = (INT_TYPE *)col;
209     *nnz  = (INT_TYPE)nz;
210     *free = PETSC_TRUE;
211   } else {
212     PetscScalar *vv;
213     PetscInt     m = A->rmap->n, i;
214 
215     vv = *v;
216     for (i = 0; i < m; i++) {
217       PetscScalar *av = aav + aa->diag[i];
218       PetscInt     rl = aa->i[i + 1] - aa->diag[i], j;
219       for (j = 0; j < rl; j++) {
220         *vv = *av;
221         vv++;
222         av++;
223       }
224     }
225     *free = PETSC_TRUE;
226   }
227   PetscCall(MatSeqAIJRestoreArrayRead(A, (const PetscScalar **)&aav));
228   PetscFunctionReturn(PETSC_SUCCESS);
229 }
230 
231 static PetscErrorCode MatMKLPardisoSolveSchur_Private(Mat F, PetscScalar *B, PetscScalar *X)
232 {
233   Mat_MKL_PARDISO     *mpardiso = (Mat_MKL_PARDISO *)F->data;
234   Mat                  S, Xmat, Bmat;
235   MatFactorSchurStatus schurstatus;
236 
237   PetscFunctionBegin;
238   PetscCall(MatFactorGetSchurComplement(F, &S, &schurstatus));
239   PetscCheck(X != B || schurstatus != MAT_FACTOR_SCHUR_INVERTED, PETSC_COMM_SELF, PETSC_ERR_SUP, "X and B cannot point to the same address");
240   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, mpardiso->schur_size, mpardiso->nrhs, B, &Bmat));
241   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, mpardiso->schur_size, mpardiso->nrhs, X, &Xmat));
242   PetscCall(MatSetType(Bmat, ((PetscObject)S)->type_name));
243   PetscCall(MatSetType(Xmat, ((PetscObject)S)->type_name));
244 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
245   PetscCall(MatBindToCPU(Xmat, S->boundtocpu));
246   PetscCall(MatBindToCPU(Bmat, S->boundtocpu));
247 #endif
248 
249 #if defined(PETSC_USE_COMPLEX)
250   PetscCheck(mpardiso->iparm[12 - 1] != 1, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Hermitian solve not implemented yet");
251 #endif
252 
253   switch (schurstatus) {
254   case MAT_FACTOR_SCHUR_FACTORED:
255     if (!mpardiso->iparm[12 - 1]) {
256       PetscCall(MatMatSolve(S, Bmat, Xmat));
257     } else { /* transpose solve */
258       PetscCall(MatMatSolveTranspose(S, Bmat, Xmat));
259     }
260     break;
261   case MAT_FACTOR_SCHUR_INVERTED:
262     PetscCall(MatProductCreateWithMat(S, Bmat, NULL, Xmat));
263     if (!mpardiso->iparm[12 - 1]) {
264       PetscCall(MatProductSetType(Xmat, MATPRODUCT_AB));
265     } else { /* transpose solve */
266       PetscCall(MatProductSetType(Xmat, MATPRODUCT_AtB));
267     }
268     PetscCall(MatProductSetFromOptions(Xmat));
269     PetscCall(MatProductSymbolic(Xmat));
270     PetscCall(MatProductNumeric(Xmat));
271     PetscCall(MatProductClear(Xmat));
272     break;
273   default:
274     SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %" PetscInt_FMT, F->schur_status);
275     break;
276   }
277   PetscCall(MatFactorRestoreSchurComplement(F, &S, schurstatus));
278   PetscCall(MatDestroy(&Bmat));
279   PetscCall(MatDestroy(&Xmat));
280   PetscFunctionReturn(PETSC_SUCCESS);
281 }
282 
283 static PetscErrorCode MatFactorSetSchurIS_MKL_PARDISO(Mat F, IS is)
284 {
285   Mat_MKL_PARDISO   *mpardiso = (Mat_MKL_PARDISO *)F->data;
286   const PetscScalar *arr;
287   const PetscInt    *idxs;
288   PetscInt           size, i;
289   PetscMPIInt        csize;
290   PetscBool          sorted;
291 
292   PetscFunctionBegin;
293   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)F), &csize));
294   PetscCheck(csize <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "MKL PARDISO parallel Schur complements not yet supported from PETSc");
295   PetscCall(ISSorted(is, &sorted));
296   PetscCheck(sorted, PETSC_COMM_SELF, PETSC_ERR_SUP, "IS for MKL PARDISO Schur complements needs to be sorted");
297   PetscCall(ISGetLocalSize(is, &size));
298   PetscCall(PetscFree(mpardiso->schur_work));
299   PetscCall(PetscBLASIntCast(PetscMax(mpardiso->n, 2 * size), &mpardiso->schur_work_size));
300   PetscCall(PetscMalloc1(mpardiso->schur_work_size, &mpardiso->schur_work));
301   PetscCall(MatDestroy(&F->schur));
302   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, size, size, NULL, &F->schur));
303   PetscCall(MatDenseGetArrayRead(F->schur, &arr));
304   mpardiso->schur      = (PetscScalar *)arr;
305   mpardiso->schur_size = size;
306   PetscCall(MatDenseRestoreArrayRead(F->schur, &arr));
307   if (mpardiso->mtype == 2) PetscCall(MatSetOption(F->schur, MAT_SPD, PETSC_TRUE));
308 
309   PetscCall(PetscFree(mpardiso->schur_idxs));
310   PetscCall(PetscMalloc1(size, &mpardiso->schur_idxs));
311   PetscCall(PetscArrayzero(mpardiso->perm, mpardiso->n));
312   PetscCall(ISGetIndices(is, &idxs));
313   PetscCall(PetscArraycpy(mpardiso->schur_idxs, idxs, size));
314   for (i = 0; i < size; i++) mpardiso->perm[idxs[i]] = 1;
315   PetscCall(ISRestoreIndices(is, &idxs));
316   if (size) { /* turn on Schur switch if the set of indices is not empty */
317     mpardiso->iparm[36 - 1] = 2;
318   }
319   PetscFunctionReturn(PETSC_SUCCESS);
320 }
321 
322 static PetscErrorCode MatDestroy_MKL_PARDISO(Mat A)
323 {
324   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
325 
326   PetscFunctionBegin;
327   if (mat_mkl_pardiso->CleanUp) {
328     mat_mkl_pardiso->phase = JOB_RELEASE_OF_ALL_MEMORY;
329 
330     PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, NULL, NULL, NULL, NULL, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm,
331                                  &mat_mkl_pardiso->msglvl, NULL, NULL, &mat_mkl_pardiso->err));
332   }
333   PetscCall(PetscFree(mat_mkl_pardiso->perm));
334   PetscCall(PetscFree(mat_mkl_pardiso->schur_work));
335   PetscCall(PetscFree(mat_mkl_pardiso->schur_idxs));
336   if (mat_mkl_pardiso->freeaij) {
337     PetscCall(PetscFree2(mat_mkl_pardiso->ia, mat_mkl_pardiso->ja));
338     if (mat_mkl_pardiso->iparm[34] == 1) PetscCall(PetscFree(mat_mkl_pardiso->a));
339   }
340   PetscCall(PetscFree(A->data));
341 
342   /* clear composed functions */
343   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
344   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorSetSchurIS_C", NULL));
345   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatMkl_PardisoSetCntl_C", NULL));
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 static PetscErrorCode MatMKLPardisoScatterSchur_Private(Mat_MKL_PARDISO *mpardiso, PetscScalar *whole, PetscScalar *schur, PetscBool reduce)
350 {
351   PetscFunctionBegin;
352   if (reduce) { /* data given for the whole matrix */
353     PetscInt i, m = 0, p = 0;
354     for (i = 0; i < mpardiso->nrhs; i++) {
355       PetscInt j;
356       for (j = 0; j < mpardiso->schur_size; j++) schur[p + j] = whole[m + mpardiso->schur_idxs[j]];
357       m += mpardiso->n;
358       p += mpardiso->schur_size;
359     }
360   } else { /* from Schur to whole */
361     PetscInt i, m = 0, p = 0;
362     for (i = 0; i < mpardiso->nrhs; i++) {
363       PetscInt j;
364       for (j = 0; j < mpardiso->schur_size; j++) whole[m + mpardiso->schur_idxs[j]] = schur[p + j];
365       m += mpardiso->n;
366       p += mpardiso->schur_size;
367     }
368   }
369   PetscFunctionReturn(PETSC_SUCCESS);
370 }
371 
372 static PetscErrorCode MatSolve_MKL_PARDISO(Mat A, Vec b, Vec x)
373 {
374   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
375   PetscScalar       *xarray;
376   const PetscScalar *barray;
377 
378   PetscFunctionBegin;
379   mat_mkl_pardiso->nrhs = 1;
380   PetscCall(VecGetArrayWrite(x, &xarray));
381   PetscCall(VecGetArrayRead(b, &barray));
382 
383   if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
384   else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
385 
386   if (barray == xarray) { /* if the two vectors share the same memory */
387     PetscScalar *work;
388     if (!mat_mkl_pardiso->schur_work) {
389       PetscCall(PetscMalloc1(mat_mkl_pardiso->n, &work));
390     } else {
391       work = mat_mkl_pardiso->schur_work;
392     }
393     mat_mkl_pardiso->iparm[6 - 1] = 1;
394     PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, NULL,
395                                  &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)work, &mat_mkl_pardiso->err));
396     if (!mat_mkl_pardiso->schur_work) PetscCall(PetscFree(work));
397   } else {
398     mat_mkl_pardiso->iparm[6 - 1] = 0;
399     PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja,
400                                  mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err));
401   }
402   PetscCall(VecRestoreArrayRead(b, &barray));
403 
404   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
405 
406   if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
407     if (!mat_mkl_pardiso->solve_interior) {
408       PetscInt shift = mat_mkl_pardiso->schur_size;
409 
410       PetscCall(MatFactorFactorizeSchurComplement(A));
411       /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
412       if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;
413 
414       /* solve Schur complement */
415       PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work, PETSC_TRUE));
416       PetscCall(MatMKLPardisoSolveSchur_Private(A, mat_mkl_pardiso->schur_work, mat_mkl_pardiso->schur_work + shift));
417       PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work + shift, PETSC_FALSE));
418     } else { /* if we are solving for the interior problem, any value in barray[schur] forward-substituted to xarray[schur] will be neglected */
419       PetscInt i;
420       for (i = 0; i < mat_mkl_pardiso->schur_size; i++) xarray[mat_mkl_pardiso->schur_idxs[i]] = 0.;
421     }
422 
423     /* expansion phase */
424     mat_mkl_pardiso->iparm[6 - 1] = 1;
425     mat_mkl_pardiso->phase        = JOB_SOLVE_BACKWARD_SUBSTITUTION;
426     PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja,
427                                  mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
428                                  &mat_mkl_pardiso->err));
429     PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
430     mat_mkl_pardiso->iparm[6 - 1] = 0;
431   }
432   PetscCall(VecRestoreArrayWrite(x, &xarray));
433   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
434   PetscFunctionReturn(PETSC_SUCCESS);
435 }
436 
437 static PetscErrorCode MatForwardSolve_MKL_PARDISO(Mat A, Vec b, Vec x)
438 {
439   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
440   PetscScalar       *xarray;
441   const PetscScalar *barray;
442 
443   PetscFunctionBegin;
444   PetscCheck(!mat_mkl_pardiso->schur, PETSC_COMM_SELF, PETSC_ERR_SUP, "Forward substitution not supported with Schur complement");
445 
446   mat_mkl_pardiso->nrhs = 1;
447   PetscCall(VecGetArrayWrite(x, &xarray));
448   PetscCall(VecGetArrayRead(b, &barray));
449 
450   mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
451 
452   PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
453                                &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err));
454   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
455 
456   PetscCall(VecRestoreArrayRead(b, &barray));
457   PetscCall(VecRestoreArrayWrite(x, &xarray));
458   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
459   PetscFunctionReturn(PETSC_SUCCESS);
460 }
461 
462 static PetscErrorCode MatBackwardSolve_MKL_PARDISO(Mat A, Vec b, Vec x)
463 {
464   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
465   PetscScalar       *xarray;
466   const PetscScalar *barray;
467 
468   PetscFunctionBegin;
469   PetscCheck(!mat_mkl_pardiso->schur, PETSC_COMM_SELF, PETSC_ERR_SUP, "Backward substitution not supported with Schur complement");
470 
471   mat_mkl_pardiso->nrhs = 1;
472   PetscCall(VecGetArrayWrite(x, &xarray));
473   PetscCall(VecGetArrayRead(b, &barray));
474 
475   mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
476 
477   PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
478                                &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err));
479   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
480 
481   PetscCall(VecRestoreArrayRead(b, &barray));
482   PetscCall(VecRestoreArrayWrite(x, &xarray));
483   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
484   PetscFunctionReturn(PETSC_SUCCESS);
485 }
486 
487 static PetscErrorCode MatSolveTranspose_MKL_PARDISO(Mat A, Vec b, Vec x)
488 {
489   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
490   PetscInt         oiparm12;
491 
492   PetscFunctionBegin;
493   oiparm12                       = mat_mkl_pardiso->iparm[12 - 1];
494   mat_mkl_pardiso->iparm[12 - 1] = 2;
495   PetscCall(MatSolve_MKL_PARDISO(A, b, x));
496   mat_mkl_pardiso->iparm[12 - 1] = oiparm12;
497   PetscFunctionReturn(PETSC_SUCCESS);
498 }
499 
500 static PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A, Mat B, Mat X)
501 {
502   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
503   const PetscScalar *barray;
504   PetscScalar       *xarray;
505   PetscBool          flg;
506 
507   PetscFunctionBegin;
508   PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATSEQDENSE, &flg));
509   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATSEQDENSE matrix");
510   if (X != B) {
511     PetscCall(PetscObjectBaseTypeCompare((PetscObject)X, MATSEQDENSE, &flg));
512     PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATSEQDENSE matrix");
513   }
514 
515   PetscCall(MatGetSize(B, NULL, (PetscInt *)&mat_mkl_pardiso->nrhs));
516 
517   if (mat_mkl_pardiso->nrhs > 0) {
518     PetscCall(MatDenseGetArrayRead(B, &barray));
519     PetscCall(MatDenseGetArrayWrite(X, &xarray));
520 
521     PetscCheck(barray != xarray, PETSC_COMM_SELF, PETSC_ERR_SUP, "B and X cannot share the same memory location");
522     if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
523     else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
524 
525     PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja,
526                                  mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err));
527     PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
528 
529     PetscCall(MatDenseRestoreArrayRead(B, &barray));
530     if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
531       PetscScalar *o_schur_work = NULL;
532 
533       /* solve Schur complement */
534       if (!mat_mkl_pardiso->solve_interior) {
535         PetscInt shift = mat_mkl_pardiso->schur_size * mat_mkl_pardiso->nrhs, scale;
536         PetscInt mem   = mat_mkl_pardiso->n * mat_mkl_pardiso->nrhs;
537 
538         PetscCall(MatFactorFactorizeSchurComplement(A));
539         /* allocate extra memory if it is needed */
540         scale = 1;
541         if (A->schur_status == MAT_FACTOR_SCHUR_INVERTED) scale = 2;
542         mem *= scale;
543         if (mem > mat_mkl_pardiso->schur_work_size) {
544           o_schur_work = mat_mkl_pardiso->schur_work;
545           PetscCall(PetscMalloc1(mem, &mat_mkl_pardiso->schur_work));
546         }
547         /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
548         if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;
549         PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work, PETSC_TRUE));
550         PetscCall(MatMKLPardisoSolveSchur_Private(A, mat_mkl_pardiso->schur_work, mat_mkl_pardiso->schur_work + shift));
551         PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work + shift, PETSC_FALSE));
552       } else { /* if we are solving for the interior problem, any value in barray[schur,n] forward-substituted to xarray[schur,n] will be neglected */
553         PetscInt i, n, m = 0;
554         for (n = 0; n < mat_mkl_pardiso->nrhs; n++) {
555           for (i = 0; i < mat_mkl_pardiso->schur_size; i++) xarray[mat_mkl_pardiso->schur_idxs[i] + m] = 0.;
556           m += mat_mkl_pardiso->n;
557         }
558       }
559 
560       /* expansion phase */
561       mat_mkl_pardiso->iparm[6 - 1] = 1;
562       mat_mkl_pardiso->phase        = JOB_SOLVE_BACKWARD_SUBSTITUTION;
563       PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja,
564                                    mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
565                                    &mat_mkl_pardiso->err));
566       if (o_schur_work) { /* restore original Schur_work (minimal size) */
567         PetscCall(PetscFree(mat_mkl_pardiso->schur_work));
568         mat_mkl_pardiso->schur_work = o_schur_work;
569       }
570       PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
571       mat_mkl_pardiso->iparm[6 - 1] = 0;
572     }
573     PetscCall(MatDenseRestoreArrayWrite(X, &xarray));
574   }
575   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
576   PetscFunctionReturn(PETSC_SUCCESS);
577 }
578 
579 static PetscErrorCode MatFactorNumeric_MKL_PARDISO(Mat F, Mat A, const MatFactorInfo *info)
580 {
581   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
582 
583   PetscFunctionBegin;
584   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
585   PetscCall((*mat_mkl_pardiso->Convert)(A, mat_mkl_pardiso->needsym, MAT_REUSE_MATRIX, &mat_mkl_pardiso->freeaij, &mat_mkl_pardiso->nz, &mat_mkl_pardiso->ia, &mat_mkl_pardiso->ja, (PetscScalar **)&mat_mkl_pardiso->a));
586 
587   mat_mkl_pardiso->phase = JOB_NUMERICAL_FACTORIZATION;
588   PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
589                                &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, (void *)mat_mkl_pardiso->schur, &mat_mkl_pardiso->err));
590   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
591 
592   /* report flops */
593   if (mat_mkl_pardiso->iparm[18] > 0) PetscCall(PetscLogFlops(PetscPowRealInt(10., 6) * mat_mkl_pardiso->iparm[18]));
594 
595   if (F->schur) { /* schur output from pardiso is in row major format */
596 #if defined(PETSC_HAVE_CUDA)
597     F->schur->offloadmask = PETSC_OFFLOAD_CPU;
598 #endif
599     PetscCall(MatFactorRestoreSchurComplement(F, NULL, MAT_FACTOR_SCHUR_UNFACTORED));
600     PetscCall(MatTranspose(F->schur, MAT_INPLACE_MATRIX, &F->schur));
601   }
602   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
603   mat_mkl_pardiso->CleanUp  = PETSC_TRUE;
604   PetscFunctionReturn(PETSC_SUCCESS);
605 }
606 
607 static PetscErrorCode MatSetFromOptions_MKL_PARDISO(Mat F, Mat A)
608 {
609   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
610   PetscInt         icntl, bs, threads = 1;
611   PetscBool        flg;
612 
613   PetscFunctionBegin;
614   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "MKL_PARDISO Options", "Mat");
615 
616   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_65", "Suggested number of threads to use within MKL PARDISO", "None", threads, &threads, &flg));
617   if (flg) PetscSetMKL_PARDISOThreads((int)threads);
618 
619   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_66", "Maximum number of factors with identical sparsity structure that must be kept in memory at the same time", "None", mat_mkl_pardiso->maxfct, &icntl, &flg));
620   if (flg) mat_mkl_pardiso->maxfct = icntl;
621 
622   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_67", "Indicates the actual matrix for the solution phase", "None", mat_mkl_pardiso->mnum, &icntl, &flg));
623   if (flg) mat_mkl_pardiso->mnum = icntl;
624 
625   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_68", "Message level information", "None", mat_mkl_pardiso->msglvl, &icntl, &flg));
626   if (flg) mat_mkl_pardiso->msglvl = icntl;
627 
628   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_69", "Defines the matrix type", "None", mat_mkl_pardiso->mtype, &icntl, &flg));
629   if (flg) {
630     void *pt[IPARM_SIZE];
631     mat_mkl_pardiso->mtype = icntl;
632     icntl                  = mat_mkl_pardiso->iparm[34];
633     bs                     = mat_mkl_pardiso->iparm[36];
634     MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
635 #if defined(PETSC_USE_REAL_SINGLE)
636     mat_mkl_pardiso->iparm[27] = 1;
637 #else
638     mat_mkl_pardiso->iparm[27] = 0;
639 #endif
640     mat_mkl_pardiso->iparm[34] = icntl;
641     mat_mkl_pardiso->iparm[36] = bs;
642   }
643 
644   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_1", "Use default values (if 0)", "None", mat_mkl_pardiso->iparm[0], &icntl, &flg));
645   if (flg) mat_mkl_pardiso->iparm[0] = icntl;
646 
647   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_2", "Fill-in reducing ordering for the input matrix", "None", mat_mkl_pardiso->iparm[1], &icntl, &flg));
648   if (flg) mat_mkl_pardiso->iparm[1] = icntl;
649 
650   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_4", "Preconditioned CGS/CG", "None", mat_mkl_pardiso->iparm[3], &icntl, &flg));
651   if (flg) mat_mkl_pardiso->iparm[3] = icntl;
652 
653   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_5", "User permutation", "None", mat_mkl_pardiso->iparm[4], &icntl, &flg));
654   if (flg) mat_mkl_pardiso->iparm[4] = icntl;
655 
656   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_6", "Write solution on x", "None", mat_mkl_pardiso->iparm[5], &icntl, &flg));
657   if (flg) mat_mkl_pardiso->iparm[5] = icntl;
658 
659   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_8", "Iterative refinement step", "None", mat_mkl_pardiso->iparm[7], &icntl, &flg));
660   if (flg) mat_mkl_pardiso->iparm[7] = icntl;
661 
662   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_10", "Pivoting perturbation", "None", mat_mkl_pardiso->iparm[9], &icntl, &flg));
663   if (flg) mat_mkl_pardiso->iparm[9] = icntl;
664 
665   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_11", "Scaling vectors", "None", mat_mkl_pardiso->iparm[10], &icntl, &flg));
666   if (flg) mat_mkl_pardiso->iparm[10] = icntl;
667 
668   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_12", "Solve with transposed or conjugate transposed matrix A", "None", mat_mkl_pardiso->iparm[11], &icntl, &flg));
669   if (flg) mat_mkl_pardiso->iparm[11] = icntl;
670 
671   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_13", "Improved accuracy using (non-) symmetric weighted matching", "None", mat_mkl_pardiso->iparm[12], &icntl, &flg));
672   if (flg) mat_mkl_pardiso->iparm[12] = icntl;
673 
674   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_18", "Numbers of non-zero elements", "None", mat_mkl_pardiso->iparm[17], &icntl, &flg));
675   if (flg) mat_mkl_pardiso->iparm[17] = icntl;
676 
677   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_19", "Report number of floating point operations (0 to disable)", "None", mat_mkl_pardiso->iparm[18], &icntl, &flg));
678   if (flg) mat_mkl_pardiso->iparm[18] = icntl;
679 
680   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_21", "Pivoting for symmetric indefinite matrices", "None", mat_mkl_pardiso->iparm[20], &icntl, &flg));
681   if (flg) mat_mkl_pardiso->iparm[20] = icntl;
682 
683   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_24", "Parallel factorization control", "None", mat_mkl_pardiso->iparm[23], &icntl, &flg));
684   if (flg) mat_mkl_pardiso->iparm[23] = icntl;
685 
686   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_25", "Parallel forward/backward solve control", "None", mat_mkl_pardiso->iparm[24], &icntl, &flg));
687   if (flg) mat_mkl_pardiso->iparm[24] = icntl;
688 
689   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_27", "Matrix checker", "None", mat_mkl_pardiso->iparm[26], &icntl, &flg));
690   if (flg) mat_mkl_pardiso->iparm[26] = icntl;
691 
692   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_31", "Partial solve and computing selected components of the solution vectors", "None", mat_mkl_pardiso->iparm[30], &icntl, &flg));
693   if (flg) mat_mkl_pardiso->iparm[30] = icntl;
694 
695   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_34", "Optimal number of threads for conditional numerical reproducibility (CNR) mode", "None", mat_mkl_pardiso->iparm[33], &icntl, &flg));
696   if (flg) mat_mkl_pardiso->iparm[33] = icntl;
697 
698   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_60", "Intel MKL PARDISO mode", "None", mat_mkl_pardiso->iparm[59], &icntl, &flg));
699   if (flg) mat_mkl_pardiso->iparm[59] = icntl;
700   PetscOptionsEnd();
701   PetscFunctionReturn(PETSC_SUCCESS);
702 }
703 
704 static PetscErrorCode MatFactorMKL_PARDISOInitialize_Private(Mat A, MatFactorType ftype, Mat_MKL_PARDISO *mat_mkl_pardiso)
705 {
706   PetscInt  i, bs;
707   PetscBool match;
708 
709   PetscFunctionBegin;
710   for (i = 0; i < IPARM_SIZE; i++) mat_mkl_pardiso->iparm[i] = 0;
711   for (i = 0; i < IPARM_SIZE; i++) mat_mkl_pardiso->pt[i] = 0;
712 #if defined(PETSC_USE_REAL_SINGLE)
713   mat_mkl_pardiso->iparm[27] = 1;
714 #else
715   mat_mkl_pardiso->iparm[27] = 0;
716 #endif
717   /* Default options for both sym and unsym */
718   mat_mkl_pardiso->iparm[0]  = 1;  /* Solver default parameters overridden with provided by iparm */
719   mat_mkl_pardiso->iparm[1]  = 2;  /* Metis reordering */
720   mat_mkl_pardiso->iparm[5]  = 0;  /* Write solution into x */
721   mat_mkl_pardiso->iparm[7]  = 0;  /* Max number of iterative refinement steps */
722   mat_mkl_pardiso->iparm[17] = -1; /* Output: Number of nonzeros in the factor LU */
723   mat_mkl_pardiso->iparm[18] = -1; /* Output: Mflops for LU factorization */
724 #if 0
725   mat_mkl_pardiso->iparm[23] =  1; /* Parallel factorization control*/
726 #endif
727   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &match, MATSEQBAIJ, MATSEQSBAIJ, ""));
728   PetscCall(MatGetBlockSize(A, &bs));
729   if (!match || bs == 1) {
730     mat_mkl_pardiso->iparm[34] = 1; /* Cluster Sparse Solver use C-style indexing for ia and ja arrays */
731     mat_mkl_pardiso->n         = A->rmap->N;
732   } else {
733     mat_mkl_pardiso->iparm[34] = 0; /* Cluster Sparse Solver use Fortran-style indexing for ia and ja arrays */
734     mat_mkl_pardiso->iparm[36] = bs;
735     mat_mkl_pardiso->n         = A->rmap->N / bs;
736   }
737   mat_mkl_pardiso->iparm[39] = 0; /* Input: matrix/rhs/solution stored on rank-0 */
738 
739   mat_mkl_pardiso->CleanUp = PETSC_FALSE;
740   mat_mkl_pardiso->maxfct  = 1; /* Maximum number of numerical factorizations. */
741   mat_mkl_pardiso->mnum    = 1; /* Which factorization to use. */
742   mat_mkl_pardiso->msglvl  = 0; /* 0: do not print 1: Print statistical information in file */
743   mat_mkl_pardiso->phase   = -1;
744   mat_mkl_pardiso->err     = 0;
745 
746   mat_mkl_pardiso->nrhs  = 1;
747   mat_mkl_pardiso->err   = 0;
748   mat_mkl_pardiso->phase = -1;
749 
750   if (ftype == MAT_FACTOR_LU) {
751     mat_mkl_pardiso->iparm[9]  = 13; /* Perturb the pivot elements with 1E-13 */
752     mat_mkl_pardiso->iparm[10] = 1;  /* Use nonsymmetric permutation and scaling MPS */
753     mat_mkl_pardiso->iparm[12] = 1;  /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
754   } else {
755     mat_mkl_pardiso->iparm[9]  = 8; /* Perturb the pivot elements with 1E-8 */
756     mat_mkl_pardiso->iparm[10] = 0; /* Use nonsymmetric permutation and scaling MPS */
757     mat_mkl_pardiso->iparm[12] = 1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
758 #if defined(PETSC_USE_DEBUG)
759     mat_mkl_pardiso->iparm[26] = 1; /* Matrix checker */
760 #endif
761   }
762   PetscCall(PetscCalloc1(A->rmap->N * sizeof(INT_TYPE), &mat_mkl_pardiso->perm));
763   mat_mkl_pardiso->schur_size = 0;
764   PetscFunctionReturn(PETSC_SUCCESS);
765 }
766 
767 static PetscErrorCode MatFactorSymbolic_AIJMKL_PARDISO_Private(Mat F, Mat A, const MatFactorInfo *info)
768 {
769   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
770 
771   PetscFunctionBegin;
772   mat_mkl_pardiso->matstruc = DIFFERENT_NONZERO_PATTERN;
773   PetscCall(MatSetFromOptions_MKL_PARDISO(F, A));
774   /* throw away any previously computed structure */
775   if (mat_mkl_pardiso->freeaij) {
776     PetscCall(PetscFree2(mat_mkl_pardiso->ia, mat_mkl_pardiso->ja));
777     if (mat_mkl_pardiso->iparm[34] == 1) PetscCall(PetscFree(mat_mkl_pardiso->a));
778   }
779   PetscCall((*mat_mkl_pardiso->Convert)(A, mat_mkl_pardiso->needsym, MAT_INITIAL_MATRIX, &mat_mkl_pardiso->freeaij, &mat_mkl_pardiso->nz, &mat_mkl_pardiso->ia, &mat_mkl_pardiso->ja, (PetscScalar **)&mat_mkl_pardiso->a));
780   if (mat_mkl_pardiso->iparm[34] == 1) mat_mkl_pardiso->n = A->rmap->N;
781   else mat_mkl_pardiso->n = A->rmap->N / A->rmap->bs;
782 
783   mat_mkl_pardiso->phase = JOB_ANALYSIS;
784 
785   /* reset flops counting if requested */
786   if (mat_mkl_pardiso->iparm[18]) mat_mkl_pardiso->iparm[18] = -1;
787 
788   PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
789                                &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL, &mat_mkl_pardiso->err));
790   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
791 
792   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
793 
794   if (F->factortype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_MKL_PARDISO;
795   else F->ops->choleskyfactornumeric = MatFactorNumeric_MKL_PARDISO;
796 
797   F->ops->solve          = MatSolve_MKL_PARDISO;
798   F->ops->solvetranspose = MatSolveTranspose_MKL_PARDISO;
799   F->ops->matsolve       = MatMatSolve_MKL_PARDISO;
800   if (F->factortype == MAT_FACTOR_LU || (!PetscDefined(USE_COMPLEX) && F->factortype == MAT_FACTOR_CHOLESKY && A->spd == PETSC_BOOL3_TRUE)) {
801     F->ops->backwardsolve = MatBackwardSolve_MKL_PARDISO;
802     F->ops->forwardsolve  = MatForwardSolve_MKL_PARDISO;
803   }
804   PetscFunctionReturn(PETSC_SUCCESS);
805 }
806 
807 static PetscErrorCode MatLUFactorSymbolic_AIJMKL_PARDISO(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
808 {
809   PetscFunctionBegin;
810   PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info));
811   PetscFunctionReturn(PETSC_SUCCESS);
812 }
813 
814 #if !defined(PETSC_USE_COMPLEX)
815 static PetscErrorCode MatGetInertia_MKL_PARDISO(Mat F, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
816 {
817   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
818 
819   PetscFunctionBegin;
820   if (nneg) *nneg = mat_mkl_pardiso->iparm[22];
821   if (npos) *npos = mat_mkl_pardiso->iparm[21];
822   if (nzero) *nzero = F->rmap->N - (mat_mkl_pardiso->iparm[22] + mat_mkl_pardiso->iparm[21]);
823   PetscFunctionReturn(PETSC_SUCCESS);
824 }
825 #endif
826 
827 static PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_PARDISO(Mat F, Mat A, IS r, const MatFactorInfo *info)
828 {
829   PetscFunctionBegin;
830   PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info));
831   F->ops->getinertia = NULL;
832 #if !defined(PETSC_USE_COMPLEX)
833   F->ops->getinertia = MatGetInertia_MKL_PARDISO;
834 #endif
835   PetscFunctionReturn(PETSC_SUCCESS);
836 }
837 
838 static PetscErrorCode MatView_MKL_PARDISO(Mat A, PetscViewer viewer)
839 {
840   PetscBool         iascii;
841   PetscViewerFormat format;
842   Mat_MKL_PARDISO  *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
843   PetscInt          i;
844 
845   PetscFunctionBegin;
846   if (A->ops->solve != MatSolve_MKL_PARDISO) PetscFunctionReturn(PETSC_SUCCESS);
847 
848   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
849   if (iascii) {
850     PetscCall(PetscViewerGetFormat(viewer, &format));
851     if (format == PETSC_VIEWER_ASCII_INFO) {
852       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO run parameters:\n"));
853       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO phase:             %d \n", mat_mkl_pardiso->phase));
854       for (i = 1; i <= 64; i++) PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO iparm[%d]:     %d \n", i, mat_mkl_pardiso->iparm[i - 1]));
855       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO maxfct:     %d \n", mat_mkl_pardiso->maxfct));
856       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO mnum:     %d \n", mat_mkl_pardiso->mnum));
857       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO mtype:     %d \n", mat_mkl_pardiso->mtype));
858       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO n:     %d \n", mat_mkl_pardiso->n));
859       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO nrhs:     %d \n", mat_mkl_pardiso->nrhs));
860       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO msglvl:     %d \n", mat_mkl_pardiso->msglvl));
861     }
862   }
863   PetscFunctionReturn(PETSC_SUCCESS);
864 }
865 
866 static PetscErrorCode MatGetInfo_MKL_PARDISO(Mat A, MatInfoType flag, MatInfo *info)
867 {
868   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
869 
870   PetscFunctionBegin;
871   info->block_size        = 1.0;
872   info->nz_used           = mat_mkl_pardiso->iparm[17];
873   info->nz_allocated      = mat_mkl_pardiso->iparm[17];
874   info->nz_unneeded       = 0.0;
875   info->assemblies        = 0.0;
876   info->mallocs           = 0.0;
877   info->memory            = 0.0;
878   info->fill_ratio_given  = 0;
879   info->fill_ratio_needed = 0;
880   info->factor_mallocs    = 0;
881   PetscFunctionReturn(PETSC_SUCCESS);
882 }
883 
884 static PetscErrorCode MatMkl_PardisoSetCntl_MKL_PARDISO(Mat F, PetscInt icntl, PetscInt ival)
885 {
886   PetscInt         backup, bs;
887   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
888 
889   PetscFunctionBegin;
890   if (icntl <= 64) {
891     mat_mkl_pardiso->iparm[icntl - 1] = ival;
892   } else {
893     if (icntl == 65) PetscSetMKL_PARDISOThreads(ival);
894     else if (icntl == 66) mat_mkl_pardiso->maxfct = ival;
895     else if (icntl == 67) mat_mkl_pardiso->mnum = ival;
896     else if (icntl == 68) mat_mkl_pardiso->msglvl = ival;
897     else if (icntl == 69) {
898       void *pt[IPARM_SIZE];
899       backup                 = mat_mkl_pardiso->iparm[34];
900       bs                     = mat_mkl_pardiso->iparm[36];
901       mat_mkl_pardiso->mtype = ival;
902       MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
903 #if defined(PETSC_USE_REAL_SINGLE)
904       mat_mkl_pardiso->iparm[27] = 1;
905 #else
906       mat_mkl_pardiso->iparm[27] = 0;
907 #endif
908       mat_mkl_pardiso->iparm[34] = backup;
909       mat_mkl_pardiso->iparm[36] = bs;
910     } else if (icntl == 70) mat_mkl_pardiso->solve_interior = (PetscBool) !!ival;
911   }
912   PetscFunctionReturn(PETSC_SUCCESS);
913 }
914 
915 /*@
916   MatMkl_PardisoSetCntl - Set MKL PARDISO <https://www.intel.com/content/www/us/en/docs/onemkl/developer-reference-c/2023-2/onemkl-pardiso-parallel-direct-sparse-solver-iface.html> parameters
917 
918   Logically Collective
919 
920   Input Parameters:
921 + F     - the factored matrix obtained by calling `MatGetFactor()`
922 . icntl - index of MKL PARDISO parameter
923 - ival  - value of MKL PARDISO parameter
924 
925   Options Database Key:
926 . -mat_mkl_pardiso_<icntl> <ival> - change the option numbered icntl to the value ival
927 
928   Level: beginner
929 
930 .seealso: [](ch_matrices), `Mat`, `MATSOLVERMKL_PARDISO`, `MatGetFactor()`
931 @*/
932 PetscErrorCode MatMkl_PardisoSetCntl(Mat F, PetscInt icntl, PetscInt ival)
933 {
934   PetscFunctionBegin;
935   PetscTryMethod(F, "MatMkl_PardisoSetCntl_C", (Mat, PetscInt, PetscInt), (F, icntl, ival));
936   PetscFunctionReturn(PETSC_SUCCESS);
937 }
938 
939 /*MC
940   MATSOLVERMKL_PARDISO -  A matrix type providing direct solvers, LU, for
941   `MATSEQAIJ` matrices via the external package MKL PARDISO
942   <https://www.intel.com/content/www/us/en/docs/onemkl/developer-reference-c/2024-0/onemkl-pardiso-parallel-direct-sparse-solver-iface.html>.
943 
944   Use `-pc_type lu` `-pc_factor_mat_solver_type mkl_pardiso` to use this direct solver
945 
946   Options Database Keys:
947 + -mat_mkl_pardiso_65 - Suggested number of threads to use within MKL PARDISO
948 . -mat_mkl_pardiso_66 - Maximum number of factors with identical sparsity structure that must be kept in memory at the same time
949 . -mat_mkl_pardiso_67 - Indicates the actual matrix for the solution phase
950 . -mat_mkl_pardiso_68 - Message level information, use 1 to get detailed information on the solver options
951 . -mat_mkl_pardiso_69 - Defines the matrix type. IMPORTANT: When you set this flag, iparm parameters are going to be set to the default ones for the matrix type
952 . -mat_mkl_pardiso_1  - Use default values
953 . -mat_mkl_pardiso_2  - Fill-in reducing ordering for the input matrix
954 . -mat_mkl_pardiso_4  - Preconditioned CGS/CG
955 . -mat_mkl_pardiso_5  - User permutation
956 . -mat_mkl_pardiso_6  - Write solution on x
957 . -mat_mkl_pardiso_8  - Iterative refinement step
958 . -mat_mkl_pardiso_10 - Pivoting perturbation
959 . -mat_mkl_pardiso_11 - Scaling vectors
960 . -mat_mkl_pardiso_12 - Solve with transposed or conjugate transposed matrix A
961 . -mat_mkl_pardiso_13 - Improved accuracy using (non-) symmetric weighted matching
962 . -mat_mkl_pardiso_18 - Numbers of non-zero elements
963 . -mat_mkl_pardiso_19 - Report number of floating point operations
964 . -mat_mkl_pardiso_21 - Pivoting for symmetric indefinite matrices
965 . -mat_mkl_pardiso_24 - Parallel factorization control
966 . -mat_mkl_pardiso_25 - Parallel forward/backward solve control
967 . -mat_mkl_pardiso_27 - Matrix checker
968 . -mat_mkl_pardiso_31 - Partial solve and computing selected components of the solution vectors
969 . -mat_mkl_pardiso_34 - Optimal number of threads for conditional numerical reproducibility (CNR) mode
970 - -mat_mkl_pardiso_60 - Intel MKL PARDISO mode
971 
972   Level: beginner
973 
974   Notes:
975   Use `-mat_mkl_pardiso_68 1` to display the number of threads the solver is using. MKL does not provide a way to directly access this
976   information.
977 
978   For more information on the options check the MKL PARDISO manual
979 
980 .seealso: [](ch_matrices), `Mat`, `MATSEQAIJ`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatMkl_PardisoSetCntl()`, `MATSOLVERMKL_CPARDISO`
981 M*/
982 static PetscErrorCode MatFactorGetSolverType_mkl_pardiso(Mat A, MatSolverType *type)
983 {
984   PetscFunctionBegin;
985   *type = MATSOLVERMKL_PARDISO;
986   PetscFunctionReturn(PETSC_SUCCESS);
987 }
988 
989 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mkl_pardiso(Mat A, MatFactorType ftype, Mat *F)
990 {
991   Mat              B;
992   Mat_MKL_PARDISO *mat_mkl_pardiso;
993   PetscBool        isSeqAIJ, isSeqBAIJ, isSeqSBAIJ;
994 
995   PetscFunctionBegin;
996   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isSeqAIJ));
997   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQBAIJ, &isSeqBAIJ));
998   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
999   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1000   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
1001   PetscCall(PetscStrallocpy("mkl_pardiso", &((PetscObject)B)->type_name));
1002   PetscCall(MatSetUp(B));
1003 
1004   PetscCall(PetscNew(&mat_mkl_pardiso));
1005   B->data = mat_mkl_pardiso;
1006 
1007   PetscCall(MatFactorMKL_PARDISOInitialize_Private(A, ftype, mat_mkl_pardiso));
1008   if (ftype == MAT_FACTOR_LU) {
1009     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMKL_PARDISO;
1010     B->factortype            = MAT_FACTOR_LU;
1011     mat_mkl_pardiso->needsym = PETSC_FALSE;
1012     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1013     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1014     else {
1015       PetscCheck(!isSeqSBAIJ, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for MKL PARDISO LU factor with SEQSBAIJ format! Use MAT_FACTOR_CHOLESKY instead");
1016       SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for MKL PARDISO LU with %s format", ((PetscObject)A)->type_name);
1017     }
1018 #if defined(PETSC_USE_COMPLEX)
1019     mat_mkl_pardiso->mtype = 13;
1020 #else
1021     mat_mkl_pardiso->mtype = 11;
1022 #endif
1023   } else {
1024     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_AIJMKL_PARDISO;
1025     B->factortype                  = MAT_FACTOR_CHOLESKY;
1026     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1027     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1028     else if (isSeqSBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqsbaij;
1029     else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for PARDISO CHOLESKY with %s format", ((PetscObject)A)->type_name);
1030 
1031     mat_mkl_pardiso->needsym = PETSC_TRUE;
1032 #if !defined(PETSC_USE_COMPLEX)
1033     if (A->spd == PETSC_BOOL3_TRUE) mat_mkl_pardiso->mtype = 2;
1034     else mat_mkl_pardiso->mtype = -2;
1035 #else
1036     mat_mkl_pardiso->mtype = 6;
1037     PetscCheck(A->hermitian != PETSC_BOOL3_TRUE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for MKL PARDISO CHOLESKY with Hermitian matrices! Use MAT_FACTOR_LU instead");
1038 #endif
1039   }
1040   B->ops->destroy = MatDestroy_MKL_PARDISO;
1041   B->ops->view    = MatView_MKL_PARDISO;
1042   B->ops->getinfo = MatGetInfo_MKL_PARDISO;
1043   B->factortype   = ftype;
1044   B->assembled    = PETSC_TRUE;
1045 
1046   PetscCall(PetscFree(B->solvertype));
1047   PetscCall(PetscStrallocpy(MATSOLVERMKL_PARDISO, &B->solvertype));
1048 
1049   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_mkl_pardiso));
1050   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorSetSchurIS_C", MatFactorSetSchurIS_MKL_PARDISO));
1051   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMkl_PardisoSetCntl_C", MatMkl_PardisoSetCntl_MKL_PARDISO));
1052 
1053   *F = B;
1054   PetscFunctionReturn(PETSC_SUCCESS);
1055 }
1056 
1057 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void)
1058 {
1059   PetscFunctionBegin;
1060   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mkl_pardiso));
1061   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_mkl_pardiso));
1062   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQBAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mkl_pardiso));
1063   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQSBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_mkl_pardiso));
1064   PetscFunctionReturn(PETSC_SUCCESS);
1065 }
1066