xref: /petsc/src/mat/impls/htool/htool.cxx (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 #include <../src/mat/impls/htool/htool.hpp> /*I "petscmat.h" I*/
2 #include <petscblaslapack.h>
3 #include <set>
4 
5 const char *const MatHtoolCompressorTypes[] = {"sympartialACA", "fullACA", "SVD"};
6 const char *const MatHtoolClusteringTypes[] = {"PCARegular", "PCAGeometric", "BoundingBox1Regular", "BoundingBox1Geometric"};
7 const char        HtoolCitation[]           = "@article{marchand2020two,\n"
8                                               "  Author = {Marchand, Pierre and Claeys, Xavier and Jolivet, Pierre and Nataf, Fr\\'ed\\'eric and Tournier, Pierre-Henri},\n"
9                                               "  Title = {Two-level preconditioning for $h$-version boundary element approximation of hypersingular operator with {GenEO}},\n"
10                                               "  Year = {2020},\n"
11                                               "  Publisher = {Elsevier},\n"
12                                               "  Journal = {Numerische Mathematik},\n"
13                                               "  Volume = {146},\n"
14                                               "  Pages = {597--628},\n"
15                                               "  Url = {https://github.com/htool-ddm/htool}\n"
16                                               "}\n";
17 static PetscBool  HtoolCite                 = PETSC_FALSE;
18 
19 static PetscErrorCode MatGetDiagonal_Htool(Mat A, Vec v) {
20   Mat_Htool   *a = (Mat_Htool *)A->data;
21   PetscScalar *x;
22   PetscBool    flg;
23 
24   PetscFunctionBegin;
25   PetscCall(MatHasCongruentLayouts(A, &flg));
26   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
27   PetscCall(VecGetArrayWrite(v, &x));
28   a->hmatrix->copy_local_diagonal(x);
29   PetscCall(VecRestoreArrayWrite(v, &x));
30   PetscCall(VecScale(v, a->s));
31   PetscFunctionReturn(0);
32 }
33 
34 static PetscErrorCode MatGetDiagonalBlock_Htool(Mat A, Mat *b) {
35   Mat_Htool   *a = (Mat_Htool *)A->data;
36   Mat          B;
37   PetscScalar *ptr;
38   PetscBool    flg;
39 
40   PetscFunctionBegin;
41   PetscCall(MatHasCongruentLayouts(A, &flg));
42   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
43   PetscCall(PetscObjectQuery((PetscObject)A, "DiagonalBlock", (PetscObject *)&B)); /* same logic as in MatGetDiagonalBlock_MPIDense() */
44   if (!B) {
45     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, A->rmap->n, A->rmap->n, A->rmap->n, NULL, &B));
46     PetscCall(MatDenseGetArrayWrite(B, &ptr));
47     a->hmatrix->copy_local_diagonal_block(ptr);
48     PetscCall(MatDenseRestoreArrayWrite(B, &ptr));
49     PetscCall(MatPropagateSymmetryOptions(A, B));
50     PetscCall(MatScale(B, a->s));
51     PetscCall(PetscObjectCompose((PetscObject)A, "DiagonalBlock", (PetscObject)B));
52     *b = B;
53     PetscCall(MatDestroy(&B));
54   } else *b = B;
55   PetscFunctionReturn(0);
56 }
57 
58 static PetscErrorCode MatMult_Htool(Mat A, Vec x, Vec y) {
59   Mat_Htool         *a = (Mat_Htool *)A->data;
60   const PetscScalar *in;
61   PetscScalar       *out;
62 
63   PetscFunctionBegin;
64   PetscCall(VecGetArrayRead(x, &in));
65   PetscCall(VecGetArrayWrite(y, &out));
66   a->hmatrix->mvprod_local_to_local(in, out);
67   PetscCall(VecRestoreArrayRead(x, &in));
68   PetscCall(VecRestoreArrayWrite(y, &out));
69   PetscCall(VecScale(y, a->s));
70   PetscFunctionReturn(0);
71 }
72 
73 /* naive implementation of MatMultAdd() needed for FEM-BEM coupling via MATNEST */
74 static PetscErrorCode MatMultAdd_Htool(Mat A, Vec v1, Vec v2, Vec v3) {
75   Mat_Htool        *a = (Mat_Htool *)A->data;
76   Vec               tmp;
77   const PetscScalar scale = a->s;
78 
79   PetscFunctionBegin;
80   PetscCall(VecDuplicate(v2, &tmp));
81   PetscCall(VecCopy(v2, v3)); /* no-op in MatMultAdd(bA->m[i][j],bx[j],by[i],by[i]) since VecCopy() checks for x == y */
82   a->s = 1.0;                 /* set s to 1.0 since VecAXPY() may be used to scale the MatMult() output Vec */
83   PetscCall(MatMult_Htool(A, v1, tmp));
84   PetscCall(VecAXPY(v3, scale, tmp));
85   PetscCall(VecDestroy(&tmp));
86   a->s = scale; /* set s back to its original value */
87   PetscFunctionReturn(0);
88 }
89 
90 static PetscErrorCode MatMultTranspose_Htool(Mat A, Vec x, Vec y) {
91   Mat_Htool         *a = (Mat_Htool *)A->data;
92   const PetscScalar *in;
93   PetscScalar       *out;
94 
95   PetscFunctionBegin;
96   PetscCall(VecGetArrayRead(x, &in));
97   PetscCall(VecGetArrayWrite(y, &out));
98   a->hmatrix->mvprod_transp_local_to_local(in, out);
99   PetscCall(VecRestoreArrayRead(x, &in));
100   PetscCall(VecRestoreArrayWrite(y, &out));
101   PetscCall(VecScale(y, a->s));
102   PetscFunctionReturn(0);
103 }
104 
105 static PetscErrorCode MatIncreaseOverlap_Htool(Mat A, PetscInt is_max, IS is[], PetscInt ov) {
106   std::set<PetscInt> set;
107   const PetscInt    *idx;
108   PetscInt          *oidx, size, bs[2];
109   PetscMPIInt        csize;
110 
111   PetscFunctionBegin;
112   PetscCall(MatGetBlockSizes(A, bs, bs + 1));
113   if (bs[0] != bs[1]) bs[0] = 1;
114   for (PetscInt i = 0; i < is_max; ++i) {
115     /* basic implementation that adds indices by shifting an IS by -ov, -ov+1..., -1, 1..., ov-1, ov */
116     /* needed to avoid subdomain matrices to replicate A since it is dense                           */
117     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)is[i]), &csize));
118     PetscCheck(csize == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported parallel IS");
119     PetscCall(ISGetSize(is[i], &size));
120     PetscCall(ISGetIndices(is[i], &idx));
121     for (PetscInt j = 0; j < size; ++j) {
122       set.insert(idx[j]);
123       for (PetscInt k = 1; k <= ov; ++k) {                                              /* for each layer of overlap      */
124         if (idx[j] - k >= 0) set.insert(idx[j] - k);                                    /* do not insert negative indices */
125         if (idx[j] + k < A->rmap->N && idx[j] + k < A->cmap->N) set.insert(idx[j] + k); /* do not insert indices greater than the dimension of A */
126       }
127     }
128     PetscCall(ISRestoreIndices(is[i], &idx));
129     PetscCall(ISDestroy(is + i));
130     if (bs[0] > 1) {
131       for (std::set<PetscInt>::iterator it = set.cbegin(); it != set.cend(); it++) {
132         std::vector<PetscInt> block(bs[0]);
133         std::iota(block.begin(), block.end(), (*it / bs[0]) * bs[0]);
134         set.insert(block.cbegin(), block.cend());
135       }
136     }
137     size = set.size(); /* size with overlap */
138     PetscCall(PetscMalloc1(size, &oidx));
139     for (const PetscInt j : set) *oidx++ = j;
140     oidx -= size;
141     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, size, oidx, PETSC_OWN_POINTER, is + i));
142   }
143   PetscFunctionReturn(0);
144 }
145 
146 static PetscErrorCode MatCreateSubMatrices_Htool(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) {
147   Mat_Htool         *a = (Mat_Htool *)A->data;
148   Mat                D, B, BT;
149   const PetscScalar *copy;
150   PetscScalar       *ptr;
151   const PetscInt    *idxr, *idxc, *it;
152   PetscInt           nrow, m, i;
153   PetscBool          flg;
154 
155   PetscFunctionBegin;
156   if (scall != MAT_REUSE_MATRIX) PetscCall(PetscCalloc1(n, submat));
157   for (i = 0; i < n; ++i) {
158     PetscCall(ISGetLocalSize(irow[i], &nrow));
159     PetscCall(ISGetLocalSize(icol[i], &m));
160     PetscCall(ISGetIndices(irow[i], &idxr));
161     PetscCall(ISGetIndices(icol[i], &idxc));
162     if (scall != MAT_REUSE_MATRIX) PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow, m, nrow, m, NULL, (*submat) + i));
163     PetscCall(MatDenseGetArrayWrite((*submat)[i], &ptr));
164     if (irow[i] == icol[i]) { /* same row and column IS? */
165       PetscCall(MatHasCongruentLayouts(A, &flg));
166       if (flg) {
167         PetscCall(ISSorted(irow[i], &flg));
168         if (flg) { /* sorted IS? */
169           it = std::lower_bound(idxr, idxr + nrow, A->rmap->rstart);
170           if (it != idxr + nrow && *it == A->rmap->rstart) {    /* rmap->rstart in IS? */
171             if (std::distance(idxr, it) + A->rmap->n <= nrow) { /* long enough IS to store the local diagonal block? */
172               for (PetscInt j = 0; j < A->rmap->n && flg; ++j)
173                 if (PetscUnlikely(it[j] != A->rmap->rstart + j)) flg = PETSC_FALSE;
174               if (flg) { /* complete local diagonal block in IS? */
175                 /* fast extraction when the local diagonal block is part of the submatrix, e.g., for PCASM or PCHPDDM
176                  *      [   B   C   E   ]
177                  *  A = [   B   D   E   ]
178                  *      [   B   F   E   ]
179                  */
180                 m = std::distance(idxr, it); /* shift of the coefficient (0,0) of block D from above */
181                 PetscCall(MatGetDiagonalBlock_Htool(A, &D));
182                 PetscCall(MatDenseGetArrayRead(D, &copy));
183                 for (PetscInt k = 0; k < A->rmap->n; ++k) { PetscCall(PetscArraycpy(ptr + (m + k) * nrow + m, copy + k * A->rmap->n, A->rmap->n)); /* block D from above */ }
184                 PetscCall(MatDenseRestoreArrayRead(D, &copy));
185                 if (m) {
186                   a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* vertical block B from above */
187                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
188                   if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
189                     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, m, A->rmap->n, m, ptr + m, &B));
190                     PetscCall(MatDenseSetLDA(B, nrow));
191                     PetscCall(MatCreateDense(PETSC_COMM_SELF, m, A->rmap->n, m, A->rmap->n, ptr + m * nrow, &BT));
192                     PetscCall(MatDenseSetLDA(BT, nrow));
193                     if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
194                       PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
195                     } else {
196                       PetscCall(MatTransposeSetPrecursor(B, BT));
197                       PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
198                     }
199                     PetscCall(MatDestroy(&B));
200                     PetscCall(MatDestroy(&BT));
201                   } else {
202                     for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block C from above */
203                       a->wrapper->copy_submatrix(m, 1, idxr, idxc + m + k, ptr + (m + k) * nrow);
204                     }
205                   }
206                 }
207                 if (m + A->rmap->n != nrow) {
208                   a->wrapper->copy_submatrix(nrow, std::distance(it + A->rmap->n, idxr + nrow), idxr, idxc + m + A->rmap->n, ptr + (m + A->rmap->n) * nrow); /* vertical block E from above */
209                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
210                   if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
211                     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), ptr + (m + A->rmap->n) * nrow + m, &B));
212                     PetscCall(MatDenseSetLDA(B, nrow));
213                     PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, ptr + m * nrow + m + A->rmap->n, &BT));
214                     PetscCall(MatDenseSetLDA(BT, nrow));
215                     if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
216                       PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
217                     } else {
218                       PetscCall(MatTransposeSetPrecursor(B, BT));
219                       PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
220                     }
221                     PetscCall(MatDestroy(&B));
222                     PetscCall(MatDestroy(&BT));
223                   } else {
224                     for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block F from above */
225                       a->wrapper->copy_submatrix(std::distance(it + A->rmap->n, idxr + nrow), 1, it + A->rmap->n, idxc + m + k, ptr + (m + k) * nrow + m + A->rmap->n);
226                     }
227                   }
228                 }
229               }                       /* complete local diagonal block not in IS */
230             } else flg = PETSC_FALSE; /* IS not long enough to store the local diagonal block */
231           } else flg = PETSC_FALSE;   /* rmap->rstart not in IS */
232         }                             /* unsorted IS */
233       }
234     } else flg = PETSC_FALSE;                                       /* different row and column IS */
235     if (!flg) a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* reassemble everything */
236     PetscCall(ISRestoreIndices(irow[i], &idxr));
237     PetscCall(ISRestoreIndices(icol[i], &idxc));
238     PetscCall(MatDenseRestoreArrayWrite((*submat)[i], &ptr));
239     PetscCall(MatScale((*submat)[i], a->s));
240   }
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode MatDestroy_Htool(Mat A) {
245   Mat_Htool               *a = (Mat_Htool *)A->data;
246   PetscContainer           container;
247   MatHtoolKernelTranspose *kernelt;
248 
249   PetscFunctionBegin;
250   PetscCall(PetscObjectChangeTypeName((PetscObject)A, NULL));
251   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", NULL));
252   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", NULL));
253   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", NULL));
254   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", NULL));
255   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", NULL));
256   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", NULL));
257   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", NULL));
258   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", NULL));
259   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", NULL));
260   PetscCall(PetscObjectQuery((PetscObject)A, "KernelTranspose", (PetscObject *)&container));
261   if (container) { /* created in MatTranspose_Htool() */
262     PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
263     PetscCall(MatDestroy(&kernelt->A));
264     PetscCall(PetscFree(kernelt));
265     PetscCall(PetscContainerDestroy(&container));
266     PetscCall(PetscObjectCompose((PetscObject)A, "KernelTranspose", NULL));
267   }
268   if (a->gcoords_source != a->gcoords_target) PetscCall(PetscFree(a->gcoords_source));
269   PetscCall(PetscFree(a->gcoords_target));
270   PetscCall(PetscFree2(a->work_source, a->work_target));
271   delete a->wrapper;
272   delete a->hmatrix;
273   PetscCall(PetscFree(A->data));
274   PetscFunctionReturn(0);
275 }
276 
277 static PetscErrorCode MatView_Htool(Mat A, PetscViewer pv) {
278   Mat_Htool *a = (Mat_Htool *)A->data;
279   PetscBool  flg;
280 
281   PetscFunctionBegin;
282   PetscCall(PetscObjectTypeCompare((PetscObject)pv, PETSCVIEWERASCII, &flg));
283   if (flg) {
284     PetscCall(PetscViewerASCIIPrintf(pv, "symmetry: %c\n", a->hmatrix->get_symmetry_type()));
285     if (PetscAbsScalar(a->s - 1.0) > PETSC_MACHINE_EPSILON) {
286 #if defined(PETSC_USE_COMPLEX)
287       PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g+%gi\n", (double)PetscRealPart(a->s), (double)PetscImaginaryPart(a->s)));
288 #else
289       PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g\n", (double)a->s));
290 #endif
291     }
292     PetscCall(PetscViewerASCIIPrintf(pv, "minimum cluster size: %" PetscInt_FMT "\n", a->bs[0]));
293     PetscCall(PetscViewerASCIIPrintf(pv, "maximum block size: %" PetscInt_FMT "\n", a->bs[1]));
294     PetscCall(PetscViewerASCIIPrintf(pv, "epsilon: %g\n", (double)a->epsilon));
295     PetscCall(PetscViewerASCIIPrintf(pv, "eta: %g\n", (double)a->eta));
296     PetscCall(PetscViewerASCIIPrintf(pv, "minimum target depth: %" PetscInt_FMT "\n", a->depth[0]));
297     PetscCall(PetscViewerASCIIPrintf(pv, "minimum source depth: %" PetscInt_FMT "\n", a->depth[1]));
298     PetscCall(PetscViewerASCIIPrintf(pv, "compressor: %s\n", MatHtoolCompressorTypes[a->compressor]));
299     PetscCall(PetscViewerASCIIPrintf(pv, "clustering: %s\n", MatHtoolClusteringTypes[a->clustering]));
300     PetscCall(PetscViewerASCIIPrintf(pv, "compression ratio: %s\n", a->hmatrix->get_infos("Compression_ratio").c_str()));
301     PetscCall(PetscViewerASCIIPrintf(pv, "space saving: %s\n", a->hmatrix->get_infos("Space_saving").c_str()));
302     PetscCall(PetscViewerASCIIPrintf(pv, "number of dense (resp. low rank) matrices: %s (resp. %s)\n", a->hmatrix->get_infos("Number_of_dmat").c_str(), a->hmatrix->get_infos("Number_of_lrmat").c_str()));
303     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) dense block sizes: (%s, %s, %s)\n", a->hmatrix->get_infos("Dense_block_size_min").c_str(), a->hmatrix->get_infos("Dense_block_size_mean").c_str(),
304                                      a->hmatrix->get_infos("Dense_block_size_max").c_str()));
305     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) low rank block sizes: (%s, %s, %s)\n", a->hmatrix->get_infos("Low_rank_block_size_min").c_str(), a->hmatrix->get_infos("Low_rank_block_size_mean").c_str(),
306                                      a->hmatrix->get_infos("Low_rank_block_size_max").c_str()));
307     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) ranks: (%s, %s, %s)\n", a->hmatrix->get_infos("Rank_min").c_str(), a->hmatrix->get_infos("Rank_mean").c_str(), a->hmatrix->get_infos("Rank_max").c_str()));
308   }
309   PetscFunctionReturn(0);
310 }
311 
312 static PetscErrorCode MatScale_Htool(Mat A, PetscScalar s) {
313   Mat_Htool *a = (Mat_Htool *)A->data;
314 
315   PetscFunctionBegin;
316   a->s *= s;
317   PetscFunctionReturn(0);
318 }
319 
320 /* naive implementation of MatGetRow() needed for MatConvert_Nest_AIJ() */
321 static PetscErrorCode MatGetRow_Htool(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) {
322   Mat_Htool   *a = (Mat_Htool *)A->data;
323   PetscInt    *idxc;
324   PetscBLASInt one = 1, bn;
325 
326   PetscFunctionBegin;
327   if (nz) *nz = A->cmap->N;
328   if (idx || v) { /* even if !idx, need to set idxc for htool::copy_submatrix() */
329     PetscCall(PetscMalloc1(A->cmap->N, &idxc));
330     for (PetscInt i = 0; i < A->cmap->N; ++i) idxc[i] = i;
331   }
332   if (idx) *idx = idxc;
333   if (v) {
334     PetscCall(PetscMalloc1(A->cmap->N, v));
335     if (a->wrapper) a->wrapper->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
336     else reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx)->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
337     PetscCall(PetscBLASIntCast(A->cmap->N, &bn));
338     PetscCallBLAS("BLASscal", BLASscal_(&bn, &a->s, *v, &one));
339   }
340   if (!idx) PetscCall(PetscFree(idxc));
341   PetscFunctionReturn(0);
342 }
343 
344 static PetscErrorCode MatRestoreRow_Htool(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) {
345   PetscFunctionBegin;
346   if (nz) *nz = 0;
347   if (idx) PetscCall(PetscFree(*idx));
348   if (v) PetscCall(PetscFree(*v));
349   PetscFunctionReturn(0);
350 }
351 
352 static PetscErrorCode MatSetFromOptions_Htool(Mat A, PetscOptionItems *PetscOptionsObject) {
353   Mat_Htool *a = (Mat_Htool *)A->data;
354   PetscInt   n;
355   PetscBool  flg;
356 
357   PetscFunctionBegin;
358   PetscOptionsHeadBegin(PetscOptionsObject, "Htool options");
359   PetscCall(PetscOptionsInt("-mat_htool_min_cluster_size", "Minimal leaf size in cluster tree", NULL, a->bs[0], a->bs, NULL));
360   PetscCall(PetscOptionsInt("-mat_htool_max_block_size", "Maximal number of coefficients in a dense block", NULL, a->bs[1], a->bs + 1, NULL));
361   PetscCall(PetscOptionsReal("-mat_htool_epsilon", "Relative error in Frobenius norm when approximating a block", NULL, a->epsilon, &a->epsilon, NULL));
362   PetscCall(PetscOptionsReal("-mat_htool_eta", "Admissibility condition tolerance", NULL, a->eta, &a->eta, NULL));
363   PetscCall(PetscOptionsInt("-mat_htool_min_target_depth", "Minimal cluster tree depth associated with the rows", NULL, a->depth[0], a->depth, NULL));
364   PetscCall(PetscOptionsInt("-mat_htool_min_source_depth", "Minimal cluster tree depth associated with the columns", NULL, a->depth[1], a->depth + 1, NULL));
365   n = 0;
366   PetscCall(PetscOptionsEList("-mat_htool_compressor", "Type of compression", "MatHtoolCompressorType", MatHtoolCompressorTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolCompressorTypes), MatHtoolCompressorTypes[MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA], &n, &flg));
367   if (flg) a->compressor = MatHtoolCompressorType(n);
368   n = 0;
369   PetscCall(PetscOptionsEList("-mat_htool_clustering", "Type of clustering", "MatHtoolClusteringType", MatHtoolClusteringTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolClusteringTypes), MatHtoolClusteringTypes[MAT_HTOOL_CLUSTERING_PCA_REGULAR], &n, &flg));
370   if (flg) a->clustering = MatHtoolClusteringType(n);
371   PetscOptionsHeadEnd();
372   PetscFunctionReturn(0);
373 }
374 
375 static PetscErrorCode MatAssemblyEnd_Htool(Mat A, MatAssemblyType type) {
376   Mat_Htool                                                   *a = (Mat_Htool *)A->data;
377   const PetscInt                                              *ranges;
378   PetscInt                                                    *offset;
379   PetscMPIInt                                                  size;
380   char                                                         S = PetscDefined(USE_COMPLEX) && A->hermitian == PETSC_BOOL3_TRUE ? 'H' : (A->symmetric == PETSC_BOOL3_TRUE ? 'S' : 'N'), uplo = S == 'N' ? 'N' : 'U';
381   htool::VirtualGenerator<PetscScalar>                        *generator = nullptr;
382   std::shared_ptr<htool::VirtualCluster>                       t, s = nullptr;
383   std::shared_ptr<htool::VirtualLowRankGenerator<PetscScalar>> compressor = nullptr;
384 
385   PetscFunctionBegin;
386   PetscCall(PetscCitationsRegister(HtoolCitation, &HtoolCite));
387   delete a->wrapper;
388   delete a->hmatrix;
389   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
390   PetscCall(PetscMalloc1(2 * size, &offset));
391   PetscCall(MatGetOwnershipRanges(A, &ranges));
392   for (PetscInt i = 0; i < size; ++i) {
393     offset[2 * i]     = ranges[i];
394     offset[2 * i + 1] = ranges[i + 1] - ranges[i];
395   }
396   switch (a->clustering) {
397   case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC: t = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::GeometricSplitting>>>(a->dim); break;
398   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC: t = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::GeometricSplitting>>>(a->dim); break;
399   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR: t = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::RegularSplitting>>>(a->dim); break;
400   default: t = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::RegularSplitting>>>(a->dim);
401   }
402   t->set_minclustersize(a->bs[0]);
403   t->build(A->rmap->N, a->gcoords_target, offset);
404   if (a->kernel) a->wrapper = new WrapperHtool(A->rmap->N, A->cmap->N, a->dim, a->kernel, a->kernelctx);
405   else {
406     a->wrapper = NULL;
407     generator  = reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx);
408   }
409   if (a->gcoords_target != a->gcoords_source) {
410     PetscCall(MatGetOwnershipRangesColumn(A, &ranges));
411     for (PetscInt i = 0; i < size; ++i) {
412       offset[2 * i]     = ranges[i];
413       offset[2 * i + 1] = ranges[i + 1] - ranges[i];
414     }
415     switch (a->clustering) {
416     case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC: s = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::GeometricSplitting>>>(a->dim); break;
417     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC: s = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::GeometricSplitting>>>(a->dim); break;
418     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR: s = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::RegularSplitting>>>(a->dim); break;
419     default: s = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::RegularSplitting>>>(a->dim);
420     }
421     s->set_minclustersize(a->bs[0]);
422     s->build(A->cmap->N, a->gcoords_source, offset);
423     S = uplo = 'N';
424   }
425   PetscCall(PetscFree(offset));
426   switch (a->compressor) {
427   case MAT_HTOOL_COMPRESSOR_FULL_ACA: compressor = std::make_shared<htool::fullACA<PetscScalar>>(); break;
428   case MAT_HTOOL_COMPRESSOR_SVD: compressor = std::make_shared<htool::SVD<PetscScalar>>(); break;
429   default: compressor = std::make_shared<htool::sympartialACA<PetscScalar>>();
430   }
431   a->hmatrix = dynamic_cast<htool::VirtualHMatrix<PetscScalar> *>(new htool::HMatrix<PetscScalar>(t, s ? s : t, a->epsilon, a->eta, S, uplo));
432   a->hmatrix->set_compression(compressor);
433   a->hmatrix->set_maxblocksize(a->bs[1]);
434   a->hmatrix->set_mintargetdepth(a->depth[0]);
435   a->hmatrix->set_minsourcedepth(a->depth[1]);
436   if (s) a->hmatrix->build(a->wrapper ? *a->wrapper : *generator, a->gcoords_target, a->gcoords_source);
437   else a->hmatrix->build(a->wrapper ? *a->wrapper : *generator, a->gcoords_target);
438   PetscFunctionReturn(0);
439 }
440 
441 static PetscErrorCode MatProductNumeric_Htool(Mat C) {
442   Mat_Product       *product = C->product;
443   Mat_Htool         *a       = (Mat_Htool *)product->A->data;
444   const PetscScalar *in;
445   PetscScalar       *out;
446   PetscInt           N, lda;
447 
448   PetscFunctionBegin;
449   MatCheckProduct(C, 1);
450   PetscCall(MatGetSize(C, NULL, &N));
451   PetscCall(MatDenseGetLDA(C, &lda));
452   PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
453   PetscCall(MatDenseGetArrayRead(product->B, &in));
454   PetscCall(MatDenseGetArrayWrite(C, &out));
455   switch (product->type) {
456   case MATPRODUCT_AB: a->hmatrix->mvprod_local_to_local(in, out, N); break;
457   case MATPRODUCT_AtB: a->hmatrix->mvprod_transp_local_to_local(in, out, N); break;
458   default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatProductType %s is not supported", MatProductTypes[product->type]);
459   }
460   PetscCall(MatDenseRestoreArrayWrite(C, &out));
461   PetscCall(MatDenseRestoreArrayRead(product->B, &in));
462   PetscCall(MatScale(C, a->s));
463   PetscFunctionReturn(0);
464 }
465 
466 static PetscErrorCode MatProductSymbolic_Htool(Mat C) {
467   Mat_Product *product = C->product;
468   Mat          A, B;
469   PetscBool    flg;
470 
471   PetscFunctionBegin;
472   MatCheckProduct(C, 1);
473   A = product->A;
474   B = product->B;
475   PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQDENSE, MATMPIDENSE, ""));
476   PetscCheck(flg, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "MatProduct_AB not supported for %s", ((PetscObject)product->B)->type_name);
477   switch (product->type) {
478   case MATPRODUCT_AB:
479     if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) PetscCall(MatSetSizes(C, A->rmap->n, B->cmap->n, A->rmap->N, B->cmap->N));
480     break;
481   case MATPRODUCT_AtB:
482     if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) PetscCall(MatSetSizes(C, A->cmap->n, B->cmap->n, A->cmap->N, B->cmap->N));
483     break;
484   default: SETERRQ(PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "ProductType %s is not supported", MatProductTypes[product->type]);
485   }
486   PetscCall(MatSetType(C, MATDENSE));
487   PetscCall(MatSetUp(C));
488   PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
489   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
490   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
491   C->ops->productsymbolic = NULL;
492   C->ops->productnumeric  = MatProductNumeric_Htool;
493   PetscFunctionReturn(0);
494 }
495 
496 static PetscErrorCode MatProductSetFromOptions_Htool(Mat C) {
497   PetscFunctionBegin;
498   MatCheckProduct(C, 1);
499   if (C->product->type == MATPRODUCT_AB || C->product->type == MATPRODUCT_AtB) C->ops->productsymbolic = MatProductSymbolic_Htool;
500   PetscFunctionReturn(0);
501 }
502 
503 static PetscErrorCode MatHtoolGetHierarchicalMat_Htool(Mat A, const htool::VirtualHMatrix<PetscScalar> **hmatrix) {
504   Mat_Htool *a = (Mat_Htool *)A->data;
505 
506   PetscFunctionBegin;
507   *hmatrix = a->hmatrix;
508   PetscFunctionReturn(0);
509 }
510 
511 /*@C
512      MatHtoolGetHierarchicalMat - Retrieves the opaque pointer to a Htool virtual matrix stored in a MATHTOOL.
513 
514    Input Parameter:
515 .     A - hierarchical matrix
516 
517    Output Parameter:
518 .     hmatrix - opaque pointer to a Htool virtual matrix
519 
520    Level: advanced
521 
522 .seealso: `MATHTOOL`
523 @*/
524 PETSC_EXTERN PetscErrorCode MatHtoolGetHierarchicalMat(Mat A, const htool::VirtualHMatrix<PetscScalar> **hmatrix) {
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
527   PetscValidPointer(hmatrix, 2);
528   PetscTryMethod(A, "MatHtoolGetHierarchicalMat_C", (Mat, const htool::VirtualHMatrix<PetscScalar> **), (A, hmatrix));
529   PetscFunctionReturn(0);
530 }
531 
532 static PetscErrorCode MatHtoolSetKernel_Htool(Mat A, MatHtoolKernel kernel, void *kernelctx) {
533   Mat_Htool *a = (Mat_Htool *)A->data;
534 
535   PetscFunctionBegin;
536   a->kernel    = kernel;
537   a->kernelctx = kernelctx;
538   delete a->wrapper;
539   if (a->kernel) a->wrapper = new WrapperHtool(A->rmap->N, A->cmap->N, a->dim, a->kernel, a->kernelctx);
540   PetscFunctionReturn(0);
541 }
542 
543 /*@C
544      MatHtoolSetKernel - Sets the kernel and context used for the assembly of a MATHTOOL.
545 
546    Input Parameters:
547 +     A - hierarchical matrix
548 .     kernel - computational kernel (or NULL)
549 -     kernelctx - kernel context (if kernel is NULL, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
550 
551    Level: advanced
552 
553 .seealso: `MATHTOOL`, `MatCreateHtoolFromKernel()`
554 @*/
555 PETSC_EXTERN PetscErrorCode MatHtoolSetKernel(Mat A, MatHtoolKernel kernel, void *kernelctx) {
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
558   if (!kernelctx) PetscValidFunction(kernel, 2);
559   if (!kernel) PetscValidPointer(kernelctx, 3);
560   PetscTryMethod(A, "MatHtoolSetKernel_C", (Mat, MatHtoolKernel, void *), (A, kernel, kernelctx));
561   PetscFunctionReturn(0);
562 }
563 
564 static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A, IS *is) {
565   Mat_Htool            *a = (Mat_Htool *)A->data;
566   std::vector<PetscInt> source;
567 
568   PetscFunctionBegin;
569   source = a->hmatrix->get_source_cluster()->get_local_perm();
570   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), source.size(), source.data(), PETSC_COPY_VALUES, is));
571   PetscCall(ISSetPermutation(*is));
572   PetscFunctionReturn(0);
573 }
574 
575 /*@C
576      MatHtoolGetPermutationSource - Gets the permutation associated to the source cluster.
577 
578    Input Parameter:
579 .     A - hierarchical matrix
580 
581    Output Parameter:
582 .     is - permutation
583 
584    Level: advanced
585 
586 .seealso: `MATHTOOL`, `MatHtoolGetPermutationTarget()`, `MatHtoolUsePermutation()`
587 @*/
588 PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationSource(Mat A, IS *is) {
589   PetscFunctionBegin;
590   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
591   if (!is) PetscValidPointer(is, 2);
592   PetscTryMethod(A, "MatHtoolGetPermutationSource_C", (Mat, IS *), (A, is));
593   PetscFunctionReturn(0);
594 }
595 
596 static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A, IS *is) {
597   Mat_Htool            *a = (Mat_Htool *)A->data;
598   std::vector<PetscInt> target;
599 
600   PetscFunctionBegin;
601   target = a->hmatrix->get_target_cluster()->get_local_perm();
602   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), target.size(), target.data(), PETSC_COPY_VALUES, is));
603   PetscCall(ISSetPermutation(*is));
604   PetscFunctionReturn(0);
605 }
606 
607 /*@C
608      MatHtoolGetPermutationTarget - Gets the permutation associated to the target cluster.
609 
610    Input Parameter:
611 .     A - hierarchical matrix
612 
613    Output Parameter:
614 .     is - permutation
615 
616    Level: advanced
617 
618 .seealso: `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolUsePermutation()`
619 @*/
620 PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationTarget(Mat A, IS *is) {
621   PetscFunctionBegin;
622   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
623   if (!is) PetscValidPointer(is, 2);
624   PetscTryMethod(A, "MatHtoolGetPermutationTarget_C", (Mat, IS *), (A, is));
625   PetscFunctionReturn(0);
626 }
627 
628 static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A, PetscBool use) {
629   Mat_Htool *a = (Mat_Htool *)A->data;
630 
631   PetscFunctionBegin;
632   a->hmatrix->set_use_permutation(use);
633   PetscFunctionReturn(0);
634 }
635 
636 /*@C
637      MatHtoolUsePermutation - Sets whether MATHTOOL should permute input (resp. output) vectors following its internal source (resp. target) permutation.
638 
639    Input Parameters:
640 +     A - hierarchical matrix
641 -     use - Boolean value
642 
643    Level: advanced
644 
645 .seealso: `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolGetPermutationTarget()`
646 @*/
647 PETSC_EXTERN PetscErrorCode MatHtoolUsePermutation(Mat A, PetscBool use) {
648   PetscFunctionBegin;
649   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
650   PetscValidLogicalCollectiveBool(A, use, 2);
651   PetscTryMethod(A, "MatHtoolUsePermutation_C", (Mat, PetscBool), (A, use));
652   PetscFunctionReturn(0);
653 }
654 
655 static PetscErrorCode MatConvert_Htool_Dense(Mat A, MatType newtype, MatReuse reuse, Mat *B) {
656   Mat          C;
657   Mat_Htool   *a = (Mat_Htool *)A->data;
658   PetscInt     lda;
659   PetscScalar *array;
660 
661   PetscFunctionBegin;
662   if (reuse == MAT_REUSE_MATRIX) {
663     C = *B;
664     PetscCheck(C->rmap->n == A->rmap->n && C->cmap->N == A->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible dimensions");
665     PetscCall(MatDenseGetLDA(C, &lda));
666     PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
667   } else {
668     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
669     PetscCall(MatSetSizes(C, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
670     PetscCall(MatSetType(C, MATDENSE));
671     PetscCall(MatSetUp(C));
672     PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
673   }
674   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
675   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
676   PetscCall(MatDenseGetArrayWrite(C, &array));
677   a->hmatrix->copy_local_dense_perm(array);
678   PetscCall(MatDenseRestoreArrayWrite(C, &array));
679   PetscCall(MatScale(C, a->s));
680   if (reuse == MAT_INPLACE_MATRIX) {
681     PetscCall(MatHeaderReplace(A, &C));
682   } else *B = C;
683   PetscFunctionReturn(0);
684 }
685 
686 static PetscErrorCode GenEntriesTranspose(PetscInt sdim, PetscInt M, PetscInt N, const PetscInt *rows, const PetscInt *cols, PetscScalar *ptr, void *ctx) {
687   MatHtoolKernelTranspose *generator = (MatHtoolKernelTranspose *)ctx;
688   PetscScalar             *tmp;
689 
690   PetscFunctionBegin;
691   generator->kernel(sdim, N, M, cols, rows, ptr, generator->kernelctx);
692   PetscCall(PetscMalloc1(M * N, &tmp));
693   PetscCall(PetscArraycpy(tmp, ptr, M * N));
694   for (PetscInt i = 0; i < M; ++i) {
695     for (PetscInt j = 0; j < N; ++j) ptr[i + j * M] = tmp[j + i * N];
696   }
697   PetscCall(PetscFree(tmp));
698   PetscFunctionReturn(0);
699 }
700 
701 /* naive implementation which keeps a reference to the original Mat */
702 static PetscErrorCode MatTranspose_Htool(Mat A, MatReuse reuse, Mat *B) {
703   Mat                      C;
704   Mat_Htool               *a = (Mat_Htool *)A->data, *c;
705   PetscInt                 M = A->rmap->N, N = A->cmap->N, m = A->rmap->n, n = A->cmap->n;
706   PetscContainer           container;
707   MatHtoolKernelTranspose *kernelt;
708 
709   PetscFunctionBegin;
710   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
711   PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MatTranspose() with MAT_INPLACE_MATRIX not supported");
712   if (reuse == MAT_INITIAL_MATRIX) {
713     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
714     PetscCall(MatSetSizes(C, n, m, N, M));
715     PetscCall(MatSetType(C, ((PetscObject)A)->type_name));
716     PetscCall(MatSetUp(C));
717     PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)C), &container));
718     PetscCall(PetscNew(&kernelt));
719     PetscCall(PetscContainerSetPointer(container, kernelt));
720     PetscCall(PetscObjectCompose((PetscObject)C, "KernelTranspose", (PetscObject)container));
721   } else {
722     C = *B;
723     PetscCall(PetscObjectQuery((PetscObject)C, "KernelTranspose", (PetscObject *)&container));
724     PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call MatTranspose() with MAT_INITIAL_MATRIX first");
725     PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
726   }
727   c         = (Mat_Htool *)C->data;
728   c->dim    = a->dim;
729   c->s      = a->s;
730   c->kernel = GenEntriesTranspose;
731   if (kernelt->A != A) {
732     PetscCall(MatDestroy(&kernelt->A));
733     kernelt->A = A;
734     PetscCall(PetscObjectReference((PetscObject)A));
735   }
736   kernelt->kernel    = a->kernel;
737   kernelt->kernelctx = a->kernelctx;
738   c->kernelctx       = kernelt;
739   if (reuse == MAT_INITIAL_MATRIX) {
740     PetscCall(PetscMalloc1(N * c->dim, &c->gcoords_target));
741     PetscCall(PetscArraycpy(c->gcoords_target, a->gcoords_source, N * c->dim));
742     if (a->gcoords_target != a->gcoords_source) {
743       PetscCall(PetscMalloc1(M * c->dim, &c->gcoords_source));
744       PetscCall(PetscArraycpy(c->gcoords_source, a->gcoords_target, M * c->dim));
745     } else c->gcoords_source = c->gcoords_target;
746     PetscCall(PetscCalloc2(M, &c->work_source, N, &c->work_target));
747   }
748   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
749   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
750   if (reuse == MAT_INITIAL_MATRIX) *B = C;
751   PetscFunctionReturn(0);
752 }
753 
754 /*@C
755      MatCreateHtoolFromKernel - Creates a MATHTOOL from a user-supplied kernel.
756 
757    Input Parameters:
758 +     comm - MPI communicator
759 .     m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
760 .     n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
761 .     M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
762 .     N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
763 .     spacedim - dimension of the space coordinates
764 .     coords_target - coordinates of the target
765 .     coords_source - coordinates of the source
766 .     kernel - computational kernel (or NULL)
767 -     kernelctx - kernel context (if kernel is NULL, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
768 
769    Output Parameter:
770 .     B - matrix
771 
772    Options Database Keys:
773 +     -mat_htool_min_cluster_size <PetscInt> - minimal leaf size in cluster tree
774 .     -mat_htool_max_block_size <PetscInt> - maximal number of coefficients in a dense block
775 .     -mat_htool_epsilon <PetscReal> - relative error in Frobenius norm when approximating a block
776 .     -mat_htool_eta <PetscReal> - admissibility condition tolerance
777 .     -mat_htool_min_target_depth <PetscInt> - minimal cluster tree depth associated with the rows
778 .     -mat_htool_min_source_depth <PetscInt> - minimal cluster tree depth associated with the columns
779 .     -mat_htool_compressor <sympartialACA, fullACA, SVD> - type of compression
780 -     -mat_htool_clustering <PCARegular, PCAGeometric, BounbingBox1Regular, BoundingBox1Geometric> - type of clustering
781 
782    Level: intermediate
783 
784 .seealso: `MatCreate()`, `MATHTOOL`, `PCSetCoordinates()`, `MatHtoolSetKernel()`, `MatHtoolCompressorType`, `MATH2OPUS`, `MatCreateH2OpusFromKernel()`
785 @*/
786 PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt spacedim, const PetscReal coords_target[], const PetscReal coords_source[], MatHtoolKernel kernel, void *kernelctx, Mat *B) {
787   Mat        A;
788   Mat_Htool *a;
789 
790   PetscFunctionBegin;
791   PetscCall(MatCreate(comm, &A));
792   PetscValidLogicalCollectiveInt(A, spacedim, 6);
793   PetscValidRealPointer(coords_target, 7);
794   PetscValidRealPointer(coords_source, 8);
795   if (!kernelctx) PetscValidFunction(kernel, 9);
796   if (!kernel) PetscValidPointer(kernelctx, 10);
797   PetscCall(MatSetSizes(A, m, n, M, N));
798   PetscCall(MatSetType(A, MATHTOOL));
799   PetscCall(MatSetUp(A));
800   a            = (Mat_Htool *)A->data;
801   a->dim       = spacedim;
802   a->s         = 1.0;
803   a->kernel    = kernel;
804   a->kernelctx = kernelctx;
805   PetscCall(PetscCalloc1(A->rmap->N * spacedim, &a->gcoords_target));
806   PetscCall(PetscArraycpy(a->gcoords_target + A->rmap->rstart * spacedim, coords_target, A->rmap->n * spacedim));
807   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_target, A->rmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global target coordinates */
808   if (coords_target != coords_source) {
809     PetscCall(PetscCalloc1(A->cmap->N * spacedim, &a->gcoords_source));
810     PetscCall(PetscArraycpy(a->gcoords_source + A->cmap->rstart * spacedim, coords_source, A->cmap->n * spacedim));
811     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_source, A->cmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global source coordinates */
812   } else a->gcoords_source = a->gcoords_target;
813   PetscCall(PetscCalloc2(A->cmap->N, &a->work_source, A->rmap->N, &a->work_target));
814   *B = A;
815   PetscFunctionReturn(0);
816 }
817 
818 /*MC
819      MATHTOOL = "htool" - A matrix type for hierarchical matrices using the Htool package.
820 
821   Use ./configure --download-htool to install PETSc to use Htool.
822 
823    Options Database Keys:
824 .     -mat_type htool - matrix type to "htool" during a call to MatSetFromOptions()
825 
826    Level: beginner
827 
828 .seealso: `MATH2OPUS`, `MATDENSE`, `MatCreateHtoolFromKernel()`, `MatHtoolSetKernel()`
829 M*/
830 PETSC_EXTERN PetscErrorCode MatCreate_Htool(Mat A) {
831   Mat_Htool *a;
832 
833   PetscFunctionBegin;
834   PetscCall(PetscNewLog(A, &a));
835   A->data = (void *)a;
836   PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATHTOOL));
837   PetscCall(PetscMemzero(A->ops, sizeof(struct _MatOps)));
838   A->ops->getdiagonal      = MatGetDiagonal_Htool;
839   A->ops->getdiagonalblock = MatGetDiagonalBlock_Htool;
840   A->ops->mult             = MatMult_Htool;
841   A->ops->multadd          = MatMultAdd_Htool;
842   A->ops->multtranspose    = MatMultTranspose_Htool;
843   if (!PetscDefined(USE_COMPLEX)) A->ops->multhermitiantranspose = MatMultTranspose_Htool;
844   A->ops->increaseoverlap   = MatIncreaseOverlap_Htool;
845   A->ops->createsubmatrices = MatCreateSubMatrices_Htool;
846   A->ops->transpose         = MatTranspose_Htool;
847   A->ops->destroy           = MatDestroy_Htool;
848   A->ops->view              = MatView_Htool;
849   A->ops->setfromoptions    = MatSetFromOptions_Htool;
850   A->ops->scale             = MatScale_Htool;
851   A->ops->getrow            = MatGetRow_Htool;
852   A->ops->restorerow        = MatRestoreRow_Htool;
853   A->ops->assemblyend       = MatAssemblyEnd_Htool;
854   a->dim                    = 0;
855   a->gcoords_target         = NULL;
856   a->gcoords_source         = NULL;
857   a->s                      = 1.0;
858   a->bs[0]                  = 10;
859   a->bs[1]                  = 1000000;
860   a->epsilon                = PetscSqrtReal(PETSC_SMALL);
861   a->eta                    = 10.0;
862   a->depth[0]               = 0;
863   a->depth[1]               = 0;
864   a->compressor             = MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA;
865   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", MatProductSetFromOptions_Htool));
866   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", MatProductSetFromOptions_Htool));
867   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", MatConvert_Htool_Dense));
868   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", MatConvert_Htool_Dense));
869   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", MatHtoolGetHierarchicalMat_Htool));
870   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", MatHtoolSetKernel_Htool));
871   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", MatHtoolGetPermutationSource_Htool));
872   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", MatHtoolGetPermutationTarget_Htool));
873   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", MatHtoolUsePermutation_Htool));
874   PetscFunctionReturn(0);
875 }
876