xref: /petsc/src/mat/impls/aij/mpi/mkl_cpardiso/mkl_cpardiso.c (revision ae1ee55146a7ad071171b861759b85940c7e5c67)
1 #include <petscsys.h>
2 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
3 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
4 
5 #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
6   #define MKL_ILP64
7 #endif
8 #include <mkl.h>
9 #include <mkl_cluster_sparse_solver.h>
10 
11 /*
12  *  Possible mkl_cpardiso phases that controls the execution of the solver.
13  *  For more information check mkl_cpardiso manual.
14  */
15 #define JOB_ANALYSIS                                                    11
16 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION                            12
17 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 13
18 #define JOB_NUMERICAL_FACTORIZATION                                     22
19 #define JOB_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT          23
20 #define JOB_SOLVE_ITERATIVE_REFINEMENT                                  33
21 #define JOB_SOLVE_FORWARD_SUBSTITUTION                                  331
22 #define JOB_SOLVE_DIAGONAL_SUBSTITUTION                                 332
23 #define JOB_SOLVE_BACKWARD_SUBSTITUTION                                 333
24 #define JOB_RELEASE_OF_LU_MEMORY                                        0
25 #define JOB_RELEASE_OF_ALL_MEMORY                                       -1
26 
27 #define IPARM_SIZE 64
28 #define INT_TYPE   MKL_INT
29 
Err_MSG_CPardiso(int errNo)30 static const char *Err_MSG_CPardiso(int errNo)
31 {
32   switch (errNo) {
33   case -1:
34     return "input inconsistent";
35     break;
36   case -2:
37     return "not enough memory";
38     break;
39   case -3:
40     return "reordering problem";
41     break;
42   case -4:
43     return "zero pivot, numerical factorization or iterative refinement problem";
44     break;
45   case -5:
46     return "unclassified (internal) error";
47     break;
48   case -6:
49     return "preordering failed (matrix types 11, 13 only)";
50     break;
51   case -7:
52     return "diagonal matrix problem";
53     break;
54   case -8:
55     return "32-bit integer overflow problem";
56     break;
57   case -9:
58     return "not enough memory for OOC";
59     break;
60   case -10:
61     return "problems with opening OOC temporary files";
62     break;
63   case -11:
64     return "read/write problems with the OOC data file";
65     break;
66   default:
67     return "unknown error";
68   }
69 }
70 
71 #define PetscCallCluster(f) PetscStackCallExternalVoid("cluster_sparse_solver", f);
72 
73 /*
74  *  Internal data structure.
75  *  For more information check mkl_cpardiso manual.
76  */
77 
78 typedef struct {
79   /* Configuration vector */
80   INT_TYPE iparm[IPARM_SIZE];
81 
82   /*
83    * Internal mkl_cpardiso memory location.
84    * After the first call to mkl_cpardiso do not modify pt, as that could cause a serious memory leak.
85    */
86   void *pt[IPARM_SIZE];
87 
88   MPI_Fint comm_mkl_cpardiso;
89 
90   /* Basic mkl_cpardiso info*/
91   INT_TYPE phase, maxfct, mnum, mtype, n, nrhs, msglvl, err;
92 
93   /* Matrix values and matrix nonzero structure */
94   PetscScalar *a;
95 
96   INT_TYPE *ia, *ja;
97 
98   /* Number of non-zero elements */
99   INT_TYPE nz;
100 
101   /* Row permutaton vector*/
102   INT_TYPE *perm;
103 
104   /* Define is matrix preserve sparse structure. */
105   MatStructure matstruc;
106 
107   PetscErrorCode (*ConvertToTriples)(Mat, MatReuse, PetscInt *, PetscInt **, PetscInt **, PetscScalar **);
108 
109   /* True if mkl_cpardiso function have been used. */
110   PetscBool CleanUp;
111 } Mat_MKL_CPARDISO;
112 
113 /*
114  * Copy the elements of matrix A.
115  * Input:
116  *   - Mat A: MATSEQAIJ matrix
117  *   - int shift: matrix index.
118  *     - 0 for c representation
119  *     - 1 for fortran representation
120  *   - MatReuse reuse:
121  *     - MAT_INITIAL_MATRIX: Create a new aij representation
122  *     - MAT_REUSE_MATRIX: Reuse all aij representation and just change values
123  * Output:
124  *   - int *nnz: Number of nonzero-elements.
125  *   - int **r pointer to i index
126  *   - int **c pointer to j elements
127  *   - MATRIXTYPE **v: Non-zero elements
128  */
MatCopy_seqaij_seqaij_MKL_CPARDISO(Mat A,MatReuse reuse,PetscInt * nnz,PetscInt ** r,PetscInt ** c,PetscScalar ** v)129 static PetscErrorCode MatCopy_seqaij_seqaij_MKL_CPARDISO(Mat A, MatReuse reuse, PetscInt *nnz, PetscInt **r, PetscInt **c, PetscScalar **v)
130 {
131   Mat_SeqAIJ *aa = (Mat_SeqAIJ *)A->data;
132 
133   PetscFunctionBegin;
134   *v = aa->a;
135   if (reuse == MAT_INITIAL_MATRIX) {
136     *r   = (INT_TYPE *)aa->i;
137     *c   = (INT_TYPE *)aa->j;
138     *nnz = aa->nz;
139   }
140   PetscFunctionReturn(PETSC_SUCCESS);
141 }
142 
MatConvertToTriples_mpiaij_mpiaij_MKL_CPARDISO(Mat A,MatReuse reuse,PetscInt * nnz,PetscInt ** r,PetscInt ** c,PetscScalar ** v)143 static PetscErrorCode MatConvertToTriples_mpiaij_mpiaij_MKL_CPARDISO(Mat A, MatReuse reuse, PetscInt *nnz, PetscInt **r, PetscInt **c, PetscScalar **v)
144 {
145   const PetscInt    *ai, *aj, *bi, *bj, *garray, m = A->rmap->n, *ajj, *bjj;
146   PetscInt           rstart, nz, i, j, countA, countB;
147   PetscInt          *row, *col;
148   const PetscScalar *av, *bv;
149   PetscScalar       *val;
150   Mat_MPIAIJ        *mat = (Mat_MPIAIJ *)A->data;
151   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ *)mat->A->data;
152   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ *)mat->B->data;
153   PetscInt           colA_start, jB, jcol;
154 
155   PetscFunctionBegin;
156   ai     = aa->i;
157   aj     = aa->j;
158   bi     = bb->i;
159   bj     = bb->j;
160   rstart = A->rmap->rstart;
161   av     = aa->a;
162   bv     = bb->a;
163 
164   garray = mat->garray;
165 
166   if (reuse == MAT_INITIAL_MATRIX) {
167     nz   = aa->nz + bb->nz;
168     *nnz = nz;
169     PetscCall(PetscMalloc3(m + 1, &row, nz, &col, nz, &val));
170     *r = row;
171     *c = col;
172     *v = val;
173   } else {
174     row = *r;
175     col = *c;
176     val = *v;
177   }
178 
179   nz = 0;
180   for (i = 0; i < m; i++) {
181     row[i] = nz;
182     countA = ai[i + 1] - ai[i];
183     countB = bi[i + 1] - bi[i];
184     ajj    = aj + ai[i]; /* ptr to the beginning of this row */
185     bjj    = bj + bi[i];
186 
187     /* B part, smaller col index */
188     colA_start = rstart + ajj[0]; /* the smallest global col index of A */
189     jB         = 0;
190     for (j = 0; j < countB; j++) {
191       jcol = garray[bjj[j]];
192       if (jcol > colA_start) break;
193       col[nz]   = jcol;
194       val[nz++] = *bv++;
195     }
196     jB = j;
197 
198     /* A part */
199     for (j = 0; j < countA; j++) {
200       col[nz]   = rstart + ajj[j];
201       val[nz++] = *av++;
202     }
203 
204     /* B part, larger col index */
205     for (j = jB; j < countB; j++) {
206       col[nz]   = garray[bjj[j]];
207       val[nz++] = *bv++;
208     }
209   }
210   row[m] = nz;
211   PetscFunctionReturn(PETSC_SUCCESS);
212 }
213 
MatConvertToTriples_mpibaij_mpibaij_MKL_CPARDISO(Mat A,MatReuse reuse,PetscInt * nnz,PetscInt ** r,PetscInt ** c,PetscScalar ** v)214 static PetscErrorCode MatConvertToTriples_mpibaij_mpibaij_MKL_CPARDISO(Mat A, MatReuse reuse, PetscInt *nnz, PetscInt **r, PetscInt **c, PetscScalar **v)
215 {
216   const PetscInt    *ai, *aj, *bi, *bj, *garray, bs = A->rmap->bs, bs2 = bs * bs, m = A->rmap->n / bs, *ajj, *bjj;
217   PetscInt           rstart, nz, i, j, countA, countB;
218   PetscInt          *row, *col;
219   const PetscScalar *av, *bv;
220   PetscScalar       *val;
221   Mat_MPIBAIJ       *mat = (Mat_MPIBAIJ *)A->data;
222   Mat_SeqBAIJ       *aa  = (Mat_SeqBAIJ *)mat->A->data;
223   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ *)mat->B->data;
224   PetscInt           colA_start, jB, jcol;
225 
226   PetscFunctionBegin;
227   ai     = aa->i;
228   aj     = aa->j;
229   bi     = bb->i;
230   bj     = bb->j;
231   rstart = A->rmap->rstart / bs;
232   av     = aa->a;
233   bv     = bb->a;
234 
235   garray = mat->garray;
236 
237   if (reuse == MAT_INITIAL_MATRIX) {
238     nz   = aa->nz + bb->nz;
239     *nnz = nz;
240     PetscCall(PetscMalloc3(m + 1, &row, nz, &col, nz * bs2, &val));
241     *r = row;
242     *c = col;
243     *v = val;
244   } else {
245     row = *r;
246     col = *c;
247     val = *v;
248   }
249 
250   nz = 0;
251   for (i = 0; i < m; i++) {
252     row[i] = nz + 1;
253     countA = ai[i + 1] - ai[i];
254     countB = bi[i + 1] - bi[i];
255     ajj    = aj + ai[i]; /* ptr to the beginning of this row */
256     bjj    = bj + bi[i];
257 
258     /* B part, smaller col index */
259     colA_start = rstart + (countA > 0 ? ajj[0] : 0); /* the smallest global col index of A */
260     jB         = 0;
261     for (j = 0; j < countB; j++) {
262       jcol = garray[bjj[j]];
263       if (jcol > colA_start) break;
264       col[nz++] = jcol + 1;
265     }
266     jB = j;
267     PetscCall(PetscArraycpy(val, bv, jB * bs2));
268     val += jB * bs2;
269     bv += jB * bs2;
270 
271     /* A part */
272     for (j = 0; j < countA; j++) col[nz++] = rstart + ajj[j] + 1;
273     PetscCall(PetscArraycpy(val, av, countA * bs2));
274     val += countA * bs2;
275     av += countA * bs2;
276 
277     /* B part, larger col index */
278     for (j = jB; j < countB; j++) col[nz++] = garray[bjj[j]] + 1;
279     PetscCall(PetscArraycpy(val, bv, (countB - jB) * bs2));
280     val += (countB - jB) * bs2;
281     bv += (countB - jB) * bs2;
282   }
283   row[m] = nz + 1;
284   PetscFunctionReturn(PETSC_SUCCESS);
285 }
286 
MatConvertToTriples_mpisbaij_mpisbaij_MKL_CPARDISO(Mat A,MatReuse reuse,PetscInt * nnz,PetscInt ** r,PetscInt ** c,PetscScalar ** v)287 static PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij_MKL_CPARDISO(Mat A, MatReuse reuse, PetscInt *nnz, PetscInt **r, PetscInt **c, PetscScalar **v)
288 {
289   const PetscInt    *ai, *aj, *bi, *bj, *garray, bs = A->rmap->bs, bs2 = bs * bs, m = A->rmap->n / bs, *ajj, *bjj;
290   PetscInt           rstart, nz, i, j, countA, countB;
291   PetscInt          *row, *col;
292   const PetscScalar *av, *bv;
293   PetscScalar       *val;
294   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ *)A->data;
295   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ *)mat->A->data;
296   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ *)mat->B->data;
297 
298   PetscFunctionBegin;
299   ai     = aa->i;
300   aj     = aa->j;
301   bi     = bb->i;
302   bj     = bb->j;
303   rstart = A->rmap->rstart / bs;
304   av     = aa->a;
305   bv     = bb->a;
306 
307   garray = mat->garray;
308 
309   if (reuse == MAT_INITIAL_MATRIX) {
310     nz   = aa->nz + bb->nz;
311     *nnz = nz;
312     PetscCall(PetscMalloc3(m + 1, &row, nz, &col, nz * bs2, &val));
313     *r = row;
314     *c = col;
315     *v = val;
316   } else {
317     row = *r;
318     col = *c;
319     val = *v;
320   }
321 
322   nz = 0;
323   for (i = 0; i < m; i++) {
324     row[i] = nz + 1;
325     countA = ai[i + 1] - ai[i];
326     countB = bi[i + 1] - bi[i];
327     ajj    = aj + ai[i]; /* ptr to the beginning of this row */
328     bjj    = bj + bi[i];
329 
330     /* A part */
331     for (j = 0; j < countA; j++) col[nz++] = rstart + ajj[j] + 1;
332     PetscCall(PetscArraycpy(val, av, countA * bs2));
333     val += countA * bs2;
334     av += countA * bs2;
335 
336     /* B part, larger col index */
337     for (j = 0; j < countB; j++) col[nz++] = garray[bjj[j]] + 1;
338     PetscCall(PetscArraycpy(val, bv, countB * bs2));
339     val += countB * bs2;
340     bv += countB * bs2;
341   }
342   row[m] = nz + 1;
343   PetscFunctionReturn(PETSC_SUCCESS);
344 }
345 
346 /*
347  * Free memory for Mat_MKL_CPARDISO structure and pointers to objects.
348  */
MatDestroy_MKL_CPARDISO(Mat A)349 static PetscErrorCode MatDestroy_MKL_CPARDISO(Mat A)
350 {
351   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
352   MPI_Comm          comm;
353 
354   PetscFunctionBegin;
355   /* Terminate instance, deallocate memories */
356   if (mat_mkl_cpardiso->CleanUp) {
357     mat_mkl_cpardiso->phase = JOB_RELEASE_OF_ALL_MEMORY;
358 
359     PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, NULL, NULL, NULL, mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs,
360                                            mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err));
361   }
362   if (mat_mkl_cpardiso->ConvertToTriples != MatCopy_seqaij_seqaij_MKL_CPARDISO) PetscCall(PetscFree3(mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, mat_mkl_cpardiso->a));
363   comm = MPI_Comm_f2c(mat_mkl_cpardiso->comm_mkl_cpardiso);
364   PetscCallMPI(MPI_Comm_free(&comm));
365   PetscCall(PetscFree(A->data));
366 
367   /* clear composed functions */
368   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
369   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatMkl_CPardisoSetCntl_C", NULL));
370   PetscFunctionReturn(PETSC_SUCCESS);
371 }
372 
373 /*
374  * Computes Ax = b
375  */
MatSolve_MKL_CPARDISO(Mat A,Vec b,Vec x)376 static PetscErrorCode MatSolve_MKL_CPARDISO(Mat A, Vec b, Vec x)
377 {
378   Mat_MKL_CPARDISO  *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
379   PetscScalar       *xarray;
380   const PetscScalar *barray;
381 
382   PetscFunctionBegin;
383   mat_mkl_cpardiso->nrhs = 1;
384   PetscCall(VecGetArray(x, &xarray));
385   PetscCall(VecGetArrayRead(b, &barray));
386 
387   /* solve phase */
388   mat_mkl_cpardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
389   PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja,
390                                          mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err));
391   PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err));
392 
393   PetscCall(VecRestoreArray(x, &xarray));
394   PetscCall(VecRestoreArrayRead(b, &barray));
395   mat_mkl_cpardiso->CleanUp = PETSC_TRUE;
396   PetscFunctionReturn(PETSC_SUCCESS);
397 }
398 
MatForwardSolve_MKL_CPARDISO(Mat A,Vec b,Vec x)399 static PetscErrorCode MatForwardSolve_MKL_CPARDISO(Mat A, Vec b, Vec x)
400 {
401   Mat_MKL_CPARDISO  *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
402   PetscScalar       *xarray;
403   const PetscScalar *barray;
404 
405   PetscFunctionBegin;
406   mat_mkl_cpardiso->nrhs = 1;
407   PetscCall(VecGetArray(x, &xarray));
408   PetscCall(VecGetArrayRead(b, &barray));
409 
410   /* solve phase */
411   mat_mkl_cpardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
412   PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja,
413                                          mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err));
414   PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err));
415 
416   PetscCall(VecRestoreArray(x, &xarray));
417   PetscCall(VecRestoreArrayRead(b, &barray));
418   mat_mkl_cpardiso->CleanUp = PETSC_TRUE;
419   PetscFunctionReturn(PETSC_SUCCESS);
420 }
421 
MatBackwardSolve_MKL_CPARDISO(Mat A,Vec b,Vec x)422 static PetscErrorCode MatBackwardSolve_MKL_CPARDISO(Mat A, Vec b, Vec x)
423 {
424   Mat_MKL_CPARDISO  *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
425   PetscScalar       *xarray;
426   const PetscScalar *barray;
427 
428   PetscFunctionBegin;
429   mat_mkl_cpardiso->nrhs = 1;
430   PetscCall(VecGetArray(x, &xarray));
431   PetscCall(VecGetArrayRead(b, &barray));
432 
433   /* solve phase */
434   mat_mkl_cpardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
435   PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja,
436                                          mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err));
437   PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err));
438 
439   PetscCall(VecRestoreArray(x, &xarray));
440   PetscCall(VecRestoreArrayRead(b, &barray));
441   mat_mkl_cpardiso->CleanUp = PETSC_TRUE;
442   PetscFunctionReturn(PETSC_SUCCESS);
443 }
444 
MatSolveTranspose_MKL_CPARDISO(Mat A,Vec b,Vec x)445 static PetscErrorCode MatSolveTranspose_MKL_CPARDISO(Mat A, Vec b, Vec x)
446 {
447   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
448 
449   PetscFunctionBegin;
450   mat_mkl_cpardiso->iparm[12 - 1] = PetscDefined(USE_COMPLEX) ? 1 : 2;
451   PetscCall(MatSolve_MKL_CPARDISO(A, b, x));
452   mat_mkl_cpardiso->iparm[12 - 1] = 0;
453   PetscFunctionReturn(PETSC_SUCCESS);
454 }
455 
MatMatSolve_MKL_CPARDISO(Mat A,Mat B,Mat X)456 static PetscErrorCode MatMatSolve_MKL_CPARDISO(Mat A, Mat B, Mat X)
457 {
458   Mat_MKL_CPARDISO  *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
459   PetscScalar       *xarray;
460   const PetscScalar *barray;
461 
462   PetscFunctionBegin;
463   PetscCall(MatGetSize(B, NULL, (PetscInt *)&mat_mkl_cpardiso->nrhs));
464 
465   if (mat_mkl_cpardiso->nrhs > 0) {
466     PetscCall(MatDenseGetArrayRead(B, &barray));
467     PetscCall(MatDenseGetArray(X, &xarray));
468 
469     PetscCheck(barray != xarray, PETSC_COMM_SELF, PETSC_ERR_SUP, "B and X cannot share the same memory location");
470 
471     /* solve phase */
472     mat_mkl_cpardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
473     PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja,
474                                            mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err));
475     PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err));
476     PetscCall(MatDenseRestoreArrayRead(B, &barray));
477     PetscCall(MatDenseRestoreArray(X, &xarray));
478   }
479   mat_mkl_cpardiso->CleanUp = PETSC_TRUE;
480   PetscFunctionReturn(PETSC_SUCCESS);
481 }
482 
483 /*
484  * LU Decomposition
485  */
MatFactorNumeric_MKL_CPARDISO(Mat F,Mat A,const MatFactorInfo * info)486 static PetscErrorCode MatFactorNumeric_MKL_CPARDISO(Mat F, Mat A, const MatFactorInfo *info)
487 {
488   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)F->data;
489 
490   PetscFunctionBegin;
491   mat_mkl_cpardiso->matstruc = SAME_NONZERO_PATTERN;
492   PetscCall((*mat_mkl_cpardiso->ConvertToTriples)(A, MAT_REUSE_MATRIX, &mat_mkl_cpardiso->nz, &mat_mkl_cpardiso->ia, &mat_mkl_cpardiso->ja, &mat_mkl_cpardiso->a));
493 
494   mat_mkl_cpardiso->phase = JOB_NUMERICAL_FACTORIZATION;
495   PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja,
496                                          mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, &mat_mkl_cpardiso->err));
497   PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err));
498 
499   mat_mkl_cpardiso->matstruc = SAME_NONZERO_PATTERN;
500   mat_mkl_cpardiso->CleanUp  = PETSC_TRUE;
501   PetscFunctionReturn(PETSC_SUCCESS);
502 }
503 
504 /* Sets mkl_cpardiso options from the options database */
MatSetFromOptions_MKL_CPARDISO(Mat F,Mat A)505 static PetscErrorCode MatSetFromOptions_MKL_CPARDISO(Mat F, Mat A)
506 {
507   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)F->data;
508   PetscInt          icntl, threads;
509   PetscBool         flg;
510 
511   PetscFunctionBegin;
512   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "MKL Cluster PARDISO Options", "Mat");
513   PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_65", "Suggested number of threads to use within MKL Cluster PARDISO", "None", threads, &threads, &flg));
514   if (flg) mkl_set_num_threads((int)threads);
515 
516   PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_66", "Maximum number of factors with identical sparsity structure that must be kept in memory at the same time", "None", mat_mkl_cpardiso->maxfct, &icntl, &flg));
517   if (flg) mat_mkl_cpardiso->maxfct = icntl;
518 
519   PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_67", "Indicates the actual matrix for the solution phase", "None", mat_mkl_cpardiso->mnum, &icntl, &flg));
520   if (flg) mat_mkl_cpardiso->mnum = icntl;
521 
522   PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_68", "Message level information", "None", mat_mkl_cpardiso->msglvl, &icntl, &flg));
523   if (flg) mat_mkl_cpardiso->msglvl = icntl;
524 
525   PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_69", "Defines the matrix type", "None", mat_mkl_cpardiso->mtype, &icntl, &flg));
526   if (flg) mat_mkl_cpardiso->mtype = icntl;
527   PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_1", "Use default values", "None", mat_mkl_cpardiso->iparm[0], &icntl, &flg));
528 
529   if (flg && icntl != 0) {
530     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_2", "Fill-in reducing ordering for the input matrix", "None", mat_mkl_cpardiso->iparm[1], &icntl, &flg));
531     if (flg) mat_mkl_cpardiso->iparm[1] = icntl;
532 
533     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_4", "Preconditioned CGS/CG", "None", mat_mkl_cpardiso->iparm[3], &icntl, &flg));
534     if (flg) mat_mkl_cpardiso->iparm[3] = icntl;
535 
536     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_5", "User permutation", "None", mat_mkl_cpardiso->iparm[4], &icntl, &flg));
537     if (flg) mat_mkl_cpardiso->iparm[4] = icntl;
538 
539     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_6", "Write solution on x", "None", mat_mkl_cpardiso->iparm[5], &icntl, &flg));
540     if (flg) mat_mkl_cpardiso->iparm[5] = icntl;
541 
542     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_8", "Iterative refinement step", "None", mat_mkl_cpardiso->iparm[7], &icntl, &flg));
543     if (flg) mat_mkl_cpardiso->iparm[7] = icntl;
544 
545     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_10", "Pivoting perturbation", "None", mat_mkl_cpardiso->iparm[9], &icntl, &flg));
546     if (flg) mat_mkl_cpardiso->iparm[9] = icntl;
547 
548     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_11", "Scaling vectors", "None", mat_mkl_cpardiso->iparm[10], &icntl, &flg));
549     if (flg) mat_mkl_cpardiso->iparm[10] = icntl;
550 
551     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_12", "Solve with transposed or conjugate transposed matrix A", "None", mat_mkl_cpardiso->iparm[11], &icntl, &flg));
552     if (flg) mat_mkl_cpardiso->iparm[11] = icntl;
553 
554     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_13", "Improved accuracy using (non-) symmetric weighted matching", "None", mat_mkl_cpardiso->iparm[12], &icntl, &flg));
555     if (flg) mat_mkl_cpardiso->iparm[12] = icntl;
556 
557     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_18", "Numbers of non-zero elements", "None", mat_mkl_cpardiso->iparm[17], &icntl, &flg));
558     if (flg) mat_mkl_cpardiso->iparm[17] = icntl;
559 
560     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_19", "Report number of floating point operations", "None", mat_mkl_cpardiso->iparm[18], &icntl, &flg));
561     if (flg) mat_mkl_cpardiso->iparm[18] = icntl;
562 
563     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_21", "Pivoting for symmetric indefinite matrices", "None", mat_mkl_cpardiso->iparm[20], &icntl, &flg));
564     if (flg) mat_mkl_cpardiso->iparm[20] = icntl;
565 
566     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_24", "Parallel factorization control", "None", mat_mkl_cpardiso->iparm[23], &icntl, &flg));
567     if (flg) mat_mkl_cpardiso->iparm[23] = icntl;
568 
569     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_25", "Parallel forward/backward solve control", "None", mat_mkl_cpardiso->iparm[24], &icntl, &flg));
570     if (flg) mat_mkl_cpardiso->iparm[24] = icntl;
571 
572     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_27", "Matrix checker", "None", mat_mkl_cpardiso->iparm[26], &icntl, &flg));
573     if (flg) mat_mkl_cpardiso->iparm[26] = icntl;
574 
575     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_31", "Partial solve and computing selected components of the solution vectors", "None", mat_mkl_cpardiso->iparm[30], &icntl, &flg));
576     if (flg) mat_mkl_cpardiso->iparm[30] = icntl;
577 
578     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_34", "Optimal number of threads for conditional numerical reproducibility (CNR) mode", "None", mat_mkl_cpardiso->iparm[33], &icntl, &flg));
579     if (flg) mat_mkl_cpardiso->iparm[33] = icntl;
580 
581     PetscCall(PetscOptionsInt("-mat_mkl_cpardiso_60", "Intel MKL Cluster PARDISO mode", "None", mat_mkl_cpardiso->iparm[59], &icntl, &flg));
582     if (flg) mat_mkl_cpardiso->iparm[59] = icntl;
583   }
584 
585   PetscOptionsEnd();
586   PetscFunctionReturn(PETSC_SUCCESS);
587 }
588 
PetscInitialize_MKL_CPARDISO(Mat A,Mat_MKL_CPARDISO * mat_mkl_cpardiso)589 static PetscErrorCode PetscInitialize_MKL_CPARDISO(Mat A, Mat_MKL_CPARDISO *mat_mkl_cpardiso)
590 {
591   PetscInt    bs;
592   PetscBool   match;
593   PetscMPIInt size;
594   MPI_Comm    comm;
595 
596   PetscFunctionBegin;
597   PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)A), &comm));
598   PetscCallMPI(MPI_Comm_size(comm, &size));
599   mat_mkl_cpardiso->comm_mkl_cpardiso = MPI_Comm_c2f(comm);
600 
601   mat_mkl_cpardiso->CleanUp = PETSC_FALSE;
602   mat_mkl_cpardiso->maxfct  = 1;
603   mat_mkl_cpardiso->mnum    = 1;
604   mat_mkl_cpardiso->n       = A->rmap->N;
605   if (mat_mkl_cpardiso->iparm[36]) mat_mkl_cpardiso->n /= mat_mkl_cpardiso->iparm[36];
606   mat_mkl_cpardiso->msglvl = 0;
607   mat_mkl_cpardiso->nrhs   = 1;
608   mat_mkl_cpardiso->err    = 0;
609   mat_mkl_cpardiso->phase  = -1;
610   mat_mkl_cpardiso->mtype  = PetscDefined(USE_COMPLEX) ? 13 : 11;
611 
612   mat_mkl_cpardiso->iparm[27] = PetscDefined(USE_REAL_SINGLE) ? 1 : 0;
613 
614   mat_mkl_cpardiso->iparm[0]  = 1;  /* Solver default parameters overridden with provided by iparm */
615   mat_mkl_cpardiso->iparm[1]  = 2;  /* Use METIS for fill-in reordering */
616   mat_mkl_cpardiso->iparm[5]  = 0;  /* Write solution into x */
617   mat_mkl_cpardiso->iparm[7]  = 2;  /* Max number of iterative refinement steps */
618   mat_mkl_cpardiso->iparm[9]  = 13; /* Perturb the pivot elements with 1E-13 */
619   mat_mkl_cpardiso->iparm[10] = 1;  /* Use nonsymmetric permutation and scaling MPS */
620   mat_mkl_cpardiso->iparm[12] = 1;  /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
621   mat_mkl_cpardiso->iparm[17] = -1; /* Output: Number of nonzeros in the factor LU */
622   mat_mkl_cpardiso->iparm[18] = -1; /* Output: Mflops for LU factorization */
623   mat_mkl_cpardiso->iparm[26] = 1;  /* Check input data for correctness */
624 
625   mat_mkl_cpardiso->iparm[39] = 0;
626   if (size > 1) {
627     mat_mkl_cpardiso->iparm[39] = 2;
628     mat_mkl_cpardiso->iparm[40] = A->rmap->rstart;
629     mat_mkl_cpardiso->iparm[41] = A->rmap->rend - 1;
630   }
631   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &match, MATMPIBAIJ, MATMPISBAIJ, ""));
632   if (match) {
633     PetscCall(MatGetBlockSize(A, &bs));
634     mat_mkl_cpardiso->iparm[36] = bs;
635     mat_mkl_cpardiso->iparm[40] /= bs;
636     mat_mkl_cpardiso->iparm[41] /= bs;
637     mat_mkl_cpardiso->iparm[40]++;
638     mat_mkl_cpardiso->iparm[41]++;
639     mat_mkl_cpardiso->iparm[34] = 0; /* Fortran style */
640   } else {
641     mat_mkl_cpardiso->iparm[34] = 1; /* C style */
642   }
643 
644   mat_mkl_cpardiso->perm = 0;
645   PetscFunctionReturn(PETSC_SUCCESS);
646 }
647 
648 /*
649  * Symbolic decomposition. Mkl_Pardiso analysis phase.
650  */
MatLUFactorSymbolic_AIJMKL_CPARDISO(Mat F,Mat A,IS r,IS c,const MatFactorInfo * info)651 static PetscErrorCode MatLUFactorSymbolic_AIJMKL_CPARDISO(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
652 {
653   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)F->data;
654 
655   PetscFunctionBegin;
656   mat_mkl_cpardiso->matstruc = DIFFERENT_NONZERO_PATTERN;
657 
658   /* Set MKL_CPARDISO options from the options database */
659   PetscCall(MatSetFromOptions_MKL_CPARDISO(F, A));
660   PetscCall((*mat_mkl_cpardiso->ConvertToTriples)(A, MAT_INITIAL_MATRIX, &mat_mkl_cpardiso->nz, &mat_mkl_cpardiso->ia, &mat_mkl_cpardiso->ja, &mat_mkl_cpardiso->a));
661 
662   mat_mkl_cpardiso->n = A->rmap->N;
663   if (mat_mkl_cpardiso->iparm[36]) mat_mkl_cpardiso->n /= mat_mkl_cpardiso->iparm[36];
664 
665   /* analysis phase */
666   mat_mkl_cpardiso->phase = JOB_ANALYSIS;
667 
668   PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja,
669                                          mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err));
670   PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\".Check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err));
671 
672   mat_mkl_cpardiso->CleanUp = PETSC_TRUE;
673   F->ops->lufactornumeric   = MatFactorNumeric_MKL_CPARDISO;
674   F->ops->solve             = MatSolve_MKL_CPARDISO;
675   F->ops->forwardsolve      = MatForwardSolve_MKL_CPARDISO;
676   F->ops->backwardsolve     = MatBackwardSolve_MKL_CPARDISO;
677   F->ops->solvetranspose    = MatSolveTranspose_MKL_CPARDISO;
678   F->ops->matsolve          = MatMatSolve_MKL_CPARDISO;
679   PetscFunctionReturn(PETSC_SUCCESS);
680 }
681 
MatCholeskyFactorSymbolic_AIJMKL_CPARDISO(Mat F,Mat A,IS perm,const MatFactorInfo * info)682 static PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_CPARDISO(Mat F, Mat A, IS perm, const MatFactorInfo *info)
683 {
684   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)F->data;
685 
686   PetscFunctionBegin;
687   mat_mkl_cpardiso->matstruc = DIFFERENT_NONZERO_PATTERN;
688 
689   /* Set MKL_CPARDISO options from the options database */
690   PetscCall(MatSetFromOptions_MKL_CPARDISO(F, A));
691   PetscCall((*mat_mkl_cpardiso->ConvertToTriples)(A, MAT_INITIAL_MATRIX, &mat_mkl_cpardiso->nz, &mat_mkl_cpardiso->ia, &mat_mkl_cpardiso->ja, &mat_mkl_cpardiso->a));
692 
693   mat_mkl_cpardiso->n = A->rmap->N;
694   if (mat_mkl_cpardiso->iparm[36]) mat_mkl_cpardiso->n /= mat_mkl_cpardiso->iparm[36];
695   PetscCheck(!PetscDefined(USE_COMPLEX), PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for PARDISO CHOLESKY with complex scalars! Use MAT_FACTOR_LU instead");
696   if (A->spd == PETSC_BOOL3_TRUE) mat_mkl_cpardiso->mtype = 2;
697   else mat_mkl_cpardiso->mtype = -2;
698 
699   /* analysis phase */
700   mat_mkl_cpardiso->phase = JOB_ANALYSIS;
701 
702   PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja,
703                                          mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err));
704   PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\".Check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err));
705 
706   mat_mkl_cpardiso->CleanUp     = PETSC_TRUE;
707   F->ops->choleskyfactornumeric = MatFactorNumeric_MKL_CPARDISO;
708   F->ops->solve                 = MatSolve_MKL_CPARDISO;
709   F->ops->solvetranspose        = MatSolveTranspose_MKL_CPARDISO;
710   F->ops->matsolve              = MatMatSolve_MKL_CPARDISO;
711   if (A->spd == PETSC_BOOL3_TRUE) {
712     F->ops->forwardsolve  = MatForwardSolve_MKL_CPARDISO;
713     F->ops->backwardsolve = MatBackwardSolve_MKL_CPARDISO;
714   }
715   PetscFunctionReturn(PETSC_SUCCESS);
716 }
717 
MatView_MKL_CPARDISO(Mat A,PetscViewer viewer)718 static PetscErrorCode MatView_MKL_CPARDISO(Mat A, PetscViewer viewer)
719 {
720   PetscBool         isascii;
721   PetscViewerFormat format;
722   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
723   PetscInt          i;
724 
725   PetscFunctionBegin;
726   /* check if matrix is mkl_cpardiso type */
727   if (A->ops->solve != MatSolve_MKL_CPARDISO) PetscFunctionReturn(PETSC_SUCCESS);
728 
729   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
730   if (isascii) {
731     PetscCall(PetscViewerGetFormat(viewer, &format));
732     if (format == PETSC_VIEWER_ASCII_INFO) {
733       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO run parameters:\n"));
734       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO phase:             %d \n", mat_mkl_cpardiso->phase));
735       for (i = 1; i <= 64; i++) PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO iparm[%d]:     %d \n", i, mat_mkl_cpardiso->iparm[i - 1]));
736       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO maxfct:     %d \n", mat_mkl_cpardiso->maxfct));
737       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO mnum:     %d \n", mat_mkl_cpardiso->mnum));
738       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO mtype:     %d \n", mat_mkl_cpardiso->mtype));
739       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO n:     %d \n", mat_mkl_cpardiso->n));
740       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO nrhs:     %d \n", mat_mkl_cpardiso->nrhs));
741       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL Cluster PARDISO msglvl:     %d \n", mat_mkl_cpardiso->msglvl));
742     }
743   }
744   PetscFunctionReturn(PETSC_SUCCESS);
745 }
746 
MatGetInfo_MKL_CPARDISO(Mat A,MatInfoType flag,MatInfo * info)747 static PetscErrorCode MatGetInfo_MKL_CPARDISO(Mat A, MatInfoType flag, MatInfo *info)
748 {
749   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)A->data;
750 
751   PetscFunctionBegin;
752   info->block_size        = 1.0;
753   info->nz_allocated      = mat_mkl_cpardiso->nz + 0.0;
754   info->nz_unneeded       = 0.0;
755   info->assemblies        = 0.0;
756   info->mallocs           = 0.0;
757   info->memory            = 0.0;
758   info->fill_ratio_given  = 0;
759   info->fill_ratio_needed = 0;
760   info->factor_mallocs    = 0;
761   PetscFunctionReturn(PETSC_SUCCESS);
762 }
763 
MatMkl_CPardisoSetCntl_MKL_CPARDISO(Mat F,PetscInt icntl,PetscInt ival)764 static PetscErrorCode MatMkl_CPardisoSetCntl_MKL_CPARDISO(Mat F, PetscInt icntl, PetscInt ival)
765 {
766   Mat_MKL_CPARDISO *mat_mkl_cpardiso = (Mat_MKL_CPARDISO *)F->data;
767 
768   PetscFunctionBegin;
769   if (icntl <= 64) {
770     mat_mkl_cpardiso->iparm[icntl - 1] = ival;
771   } else {
772     if (icntl == 65) mkl_set_num_threads((int)ival);
773     else if (icntl == 66) mat_mkl_cpardiso->maxfct = ival;
774     else if (icntl == 67) mat_mkl_cpardiso->mnum = ival;
775     else if (icntl == 68) mat_mkl_cpardiso->msglvl = ival;
776     else if (icntl == 69) mat_mkl_cpardiso->mtype = ival;
777   }
778   PetscFunctionReturn(PETSC_SUCCESS);
779 }
780 
781 /*@
782   MatMkl_CPardisoSetCntl - Set MKL Cluster PARDISO parameters
783   <https://www.intel.com/content/www/us/en/docs/onemkl/developer-reference-c/2023-2/onemkl-pardiso-parallel-direct-sparse-solver-iface.html>
784 
785   Logically Collective
786 
787   Input Parameters:
788 + F     - the factored matrix obtained by calling `MatGetFactor()`
789 . icntl - index of MKL Cluster PARDISO parameter
790 - ival  - value of MKL Cluster PARDISO parameter
791 
792   Options Database Key:
793 . -mat_mkl_cpardiso_<icntl> <ival> - set the option numbered icntl to ival
794 
795   Level: intermediate
796 
797   Note:
798   This routine cannot be used if you are solving the linear system with `TS`, `SNES`, or `KSP`, only if you directly call `MatGetFactor()` so use the options
799   database approach when working with `TS`, `SNES`, or `KSP`. See `MATSOLVERMKL_CPARDISO` for the options
800 
801 .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MATMPIAIJ`, `MATSOLVERMKL_CPARDISO`
802 @*/
MatMkl_CPardisoSetCntl(Mat F,PetscInt icntl,PetscInt ival)803 PetscErrorCode MatMkl_CPardisoSetCntl(Mat F, PetscInt icntl, PetscInt ival)
804 {
805   PetscFunctionBegin;
806   PetscTryMethod(F, "MatMkl_CPardisoSetCntl_C", (Mat, PetscInt, PetscInt), (F, icntl, ival));
807   PetscFunctionReturn(PETSC_SUCCESS);
808 }
809 
810 /*MC
811   MATSOLVERMKL_CPARDISO -  A matrix type providing direct solvers (LU) for parallel matrices via the external package MKL Cluster PARDISO
812   <https://www.intel.com/content/www/us/en/docs/onemkl/developer-reference-c/2023-2/onemkl-pardiso-parallel-direct-sparse-solver-iface.html>
813 
814   Works with `MATMPIAIJ` matrices
815 
816   Use `-pc_type lu` `-pc_factor_mat_solver_type mkl_cpardiso` to use this direct solver
817 
818   Options Database Keys:
819 + -mat_mkl_cpardiso_65 - Suggested number of threads to use within MKL Cluster PARDISO
820 . -mat_mkl_cpardiso_66 - Maximum number of factors with identical sparsity structure that must be kept in memory at the same time
821 . -mat_mkl_cpardiso_67 - Indicates the actual matrix for the solution phase
822 . -mat_mkl_cpardiso_68 - Message level information, use 1 to get detailed information on the solver options
823 . -mat_mkl_cpardiso_69 - Defines the matrix type. IMPORTANT: When you set this flag, iparm parameters are going to be set to the default ones for the matrix type
824 . -mat_mkl_cpardiso_1  - Use default values
825 . -mat_mkl_cpardiso_2  - Fill-in reducing ordering for the input matrix
826 . -mat_mkl_cpardiso_4  - Preconditioned CGS/CG
827 . -mat_mkl_cpardiso_5  - User permutation
828 . -mat_mkl_cpardiso_6  - Write solution on x
829 . -mat_mkl_cpardiso_8  - Iterative refinement step
830 . -mat_mkl_cpardiso_10 - Pivoting perturbation
831 . -mat_mkl_cpardiso_11 - Scaling vectors
832 . -mat_mkl_cpardiso_12 - Solve with transposed or conjugate transposed matrix A
833 . -mat_mkl_cpardiso_13 - Improved accuracy using (non-) symmetric weighted matching
834 . -mat_mkl_cpardiso_18 - Numbers of non-zero elements
835 . -mat_mkl_cpardiso_19 - Report number of floating point operations
836 . -mat_mkl_cpardiso_21 - Pivoting for symmetric indefinite matrices
837 . -mat_mkl_cpardiso_24 - Parallel factorization control
838 . -mat_mkl_cpardiso_25 - Parallel forward/backward solve control
839 . -mat_mkl_cpardiso_27 - Matrix checker
840 . -mat_mkl_cpardiso_31 - Partial solve and computing selected components of the solution vectors
841 . -mat_mkl_cpardiso_34 - Optimal number of threads for conditional numerical reproducibility (CNR) mode
842 - -mat_mkl_cpardiso_60 - Intel MKL Cluster PARDISO mode
843 
844   Level: beginner
845 
846   Notes:
847   Use `-mat_mkl_cpardiso_68 1` to display the number of threads the solver is using. MKL does not provide a way to directly access this
848   information.
849 
850   For more information on the options check
851   <https://www.intel.com/content/www/us/en/docs/onemkl/developer-reference-c/2023-2/onemkl-pardiso-parallel-direct-sparse-solver-iface.html>
852 
853 .seealso: [](ch_matrices), `Mat`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatMkl_CPardisoSetCntl()`, `MatGetFactor()`, `MATSOLVERMKL_PARDISO`
854 M*/
855 
MatFactorGetSolverType_mkl_cpardiso(Mat A,MatSolverType * type)856 static PetscErrorCode MatFactorGetSolverType_mkl_cpardiso(Mat A, MatSolverType *type)
857 {
858   PetscFunctionBegin;
859   *type = MATSOLVERMKL_CPARDISO;
860   PetscFunctionReturn(PETSC_SUCCESS);
861 }
862 
863 /* MatGetFactor for MPI AIJ matrices */
MatGetFactor_mpiaij_mkl_cpardiso(Mat A,MatFactorType ftype,Mat * F)864 static PetscErrorCode MatGetFactor_mpiaij_mkl_cpardiso(Mat A, MatFactorType ftype, Mat *F)
865 {
866   Mat               B;
867   Mat_MKL_CPARDISO *mat_mkl_cpardiso;
868   PetscBool         isSeqAIJ, isMPIBAIJ, isMPISBAIJ;
869 
870   PetscFunctionBegin;
871   /* Create the factorization matrix */
872 
873   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &isSeqAIJ));
874   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIBAIJ, &isMPIBAIJ));
875   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPISBAIJ, &isMPISBAIJ));
876   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
877   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
878   PetscCall(PetscStrallocpy("mkl_cpardiso", &((PetscObject)B)->type_name));
879   PetscCall(MatSetUp(B));
880 
881   PetscCall(PetscNew(&mat_mkl_cpardiso));
882 
883   if (isSeqAIJ) mat_mkl_cpardiso->ConvertToTriples = MatCopy_seqaij_seqaij_MKL_CPARDISO;
884   else if (isMPIBAIJ) mat_mkl_cpardiso->ConvertToTriples = MatConvertToTriples_mpibaij_mpibaij_MKL_CPARDISO;
885   else if (isMPISBAIJ) mat_mkl_cpardiso->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij_MKL_CPARDISO;
886   else mat_mkl_cpardiso->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij_MKL_CPARDISO;
887 
888   if (ftype == MAT_FACTOR_LU) B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMKL_CPARDISO;
889   else B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_AIJMKL_CPARDISO;
890   B->ops->destroy = MatDestroy_MKL_CPARDISO;
891 
892   B->ops->view    = MatView_MKL_CPARDISO;
893   B->ops->getinfo = MatGetInfo_MKL_CPARDISO;
894 
895   B->factortype = ftype;
896   B->assembled  = PETSC_TRUE; /* required by -ksp_view */
897 
898   B->data = mat_mkl_cpardiso;
899 
900   /* set solvertype */
901   PetscCall(PetscFree(B->solvertype));
902   PetscCall(PetscStrallocpy(MATSOLVERMKL_CPARDISO, &B->solvertype));
903 
904   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_mkl_cpardiso));
905   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMkl_CPardisoSetCntl_C", MatMkl_CPardisoSetCntl_MKL_CPARDISO));
906   PetscCall(PetscInitialize_MKL_CPARDISO(A, mat_mkl_cpardiso));
907 
908   *F = B;
909   PetscFunctionReturn(PETSC_SUCCESS);
910 }
911 
MatSolverTypeRegister_MKL_CPardiso(void)912 PETSC_INTERN PetscErrorCode MatSolverTypeRegister_MKL_CPardiso(void)
913 {
914   PetscFunctionBegin;
915   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_CPARDISO, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_mkl_cpardiso));
916   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_CPARDISO, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_mkl_cpardiso));
917   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_CPARDISO, MATMPIBAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_mkl_cpardiso));
918   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_CPARDISO, MATMPISBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_mpiaij_mkl_cpardiso));
919   PetscFunctionReturn(PETSC_SUCCESS);
920 }
921