xref: /petsc/src/mat/impls/htool/htool.cxx (revision 08401ef684002a709c6d3db98a0c9f54a8bcf1ec)
1 #include <../src/mat/impls/htool/htool.hpp> /*I "petscmat.h" I*/
2 #include <petscblaslapack.h>
3 #include <set>
4 
5 const char *const MatHtoolCompressorTypes[] = { "sympartialACA", "fullACA", "SVD" };
6 const char *const MatHtoolClusteringTypes[] = { "PCARegular", "PCAGeometric", "BoundingBox1Regular", "BoundingBox1Geometric" };
7 const char HtoolCitation[] = "@article{marchand2020two,\n"
8 "  Author = {Marchand, Pierre and Claeys, Xavier and Jolivet, Pierre and Nataf, Fr\\'ed\\'eric and Tournier, Pierre-Henri},\n"
9 "  Title = {Two-level preconditioning for $h$-version boundary element approximation of hypersingular operator with {GenEO}},\n"
10 "  Year = {2020},\n"
11 "  Publisher = {Elsevier},\n"
12 "  Journal = {Numerische Mathematik},\n"
13 "  Volume = {146},\n"
14 "  Pages = {597--628},\n"
15 "  Url = {https://github.com/htool-ddm/htool}\n"
16 "}\n";
17 static PetscBool HtoolCite = PETSC_FALSE;
18 
19 static PetscErrorCode MatGetDiagonal_Htool(Mat A,Vec v)
20 {
21   Mat_Htool      *a = (Mat_Htool*)A->data;
22   PetscScalar    *x;
23   PetscBool      flg;
24 
25   PetscFunctionBegin;
26   PetscCall(MatHasCongruentLayouts(A,&flg));
27   PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only congruent layouts supported");
28   PetscCall(VecGetArrayWrite(v,&x));
29   a->hmatrix->copy_local_diagonal(x);
30   PetscCall(VecRestoreArrayWrite(v,&x));
31   PetscCall(VecScale(v,a->s));
32   PetscFunctionReturn(0);
33 }
34 
35 static PetscErrorCode MatGetDiagonalBlock_Htool(Mat A,Mat *b)
36 {
37   Mat_Htool      *a = (Mat_Htool*)A->data;
38   Mat            B;
39   PetscScalar    *ptr;
40   PetscBool      flg;
41 
42   PetscFunctionBegin;
43   PetscCall(MatHasCongruentLayouts(A,&flg));
44   PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only congruent layouts supported");
45   PetscCall(PetscObjectQuery((PetscObject)A,"DiagonalBlock",(PetscObject*)&B)); /* same logic as in MatGetDiagonalBlock_MPIDense() */
46   if (!B) {
47     PetscCall(MatCreateDense(PETSC_COMM_SELF,A->rmap->n,A->rmap->n,A->rmap->n,A->rmap->n,NULL,&B));
48     PetscCall(MatDenseGetArrayWrite(B,&ptr));
49     a->hmatrix->copy_local_diagonal_block(ptr);
50     PetscCall(MatDenseRestoreArrayWrite(B,&ptr));
51     PetscCall(MatPropagateSymmetryOptions(A,B));
52     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
53     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
54     PetscCall(MatScale(B,a->s));
55     PetscCall(PetscObjectCompose((PetscObject)A,"DiagonalBlock",(PetscObject)B));
56     *b   = B;
57     PetscCall(MatDestroy(&B));
58   } else *b = B;
59   PetscFunctionReturn(0);
60 }
61 
62 static PetscErrorCode MatMult_Htool(Mat A,Vec x,Vec y)
63 {
64   Mat_Htool         *a = (Mat_Htool*)A->data;
65   const PetscScalar *in;
66   PetscScalar       *out;
67 
68   PetscFunctionBegin;
69   PetscCall(VecGetArrayRead(x,&in));
70   PetscCall(VecGetArrayWrite(y,&out));
71   a->hmatrix->mvprod_local_to_local(in,out);
72   PetscCall(VecRestoreArrayRead(x,&in));
73   PetscCall(VecRestoreArrayWrite(y,&out));
74   PetscCall(VecScale(y,a->s));
75   PetscFunctionReturn(0);
76 }
77 
78 /* naive implementation of MatMultAdd() needed for FEM-BEM coupling via MATNEST */
79 static PetscErrorCode MatMultAdd_Htool(Mat A,Vec v1,Vec v2,Vec v3)
80 {
81   Mat_Htool         *a = (Mat_Htool*)A->data;
82   Vec               tmp;
83   const PetscScalar scale = a->s;
84 
85   PetscFunctionBegin;
86   PetscCall(VecDuplicate(v2,&tmp));
87   PetscCall(VecCopy(v2,v3)); /* no-op in MatMultAdd(bA->m[i][j],bx[j],by[i],by[i]) since VecCopy() checks for x == y */
88   a->s = 1.0; /* set s to 1.0 since VecAXPY() may be used to scale the MatMult() output Vec */
89   PetscCall(MatMult_Htool(A,v1,tmp));
90   PetscCall(VecAXPY(v3,scale,tmp));
91   PetscCall(VecDestroy(&tmp));
92   a->s = scale; /* set s back to its original value */
93   PetscFunctionReturn(0);
94 }
95 
96 static PetscErrorCode MatMultTranspose_Htool(Mat A,Vec x,Vec y)
97 {
98   Mat_Htool         *a = (Mat_Htool*)A->data;
99   const PetscScalar *in;
100   PetscScalar       *out;
101 
102   PetscFunctionBegin;
103   PetscCall(VecGetArrayRead(x,&in));
104   PetscCall(VecGetArrayWrite(y,&out));
105   a->hmatrix->mvprod_transp_local_to_local(in,out);
106   PetscCall(VecRestoreArrayRead(x,&in));
107   PetscCall(VecRestoreArrayWrite(y,&out));
108   PetscCall(VecScale(y,a->s));
109   PetscFunctionReturn(0);
110 }
111 
112 static PetscErrorCode MatIncreaseOverlap_Htool(Mat A,PetscInt is_max,IS is[],PetscInt ov)
113 {
114   std::set<PetscInt> set;
115   const PetscInt     *idx;
116   PetscInt           *oidx,size,bs[2];
117   PetscMPIInt        csize;
118 
119   PetscFunctionBegin;
120   PetscCall(MatGetBlockSizes(A,bs,bs+1));
121   if (bs[0] != bs[1]) bs[0] = 1;
122   for (PetscInt i=0; i<is_max; ++i) {
123     /* basic implementation that adds indices by shifting an IS by -ov, -ov+1..., -1, 1..., ov-1, ov */
124     /* needed to avoid subdomain matrices to replicate A since it is dense                           */
125     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)is[i]),&csize));
126     PetscCheck(csize == 1,PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported parallel IS");
127     PetscCall(ISGetSize(is[i],&size));
128     PetscCall(ISGetIndices(is[i],&idx));
129     for (PetscInt j=0; j<size; ++j) {
130       set.insert(idx[j]);
131       for (PetscInt k=1; k<=ov; ++k) {               /* for each layer of overlap      */
132         if (idx[j] - k >= 0) set.insert(idx[j] - k); /* do not insert negative indices */
133         if (idx[j] + k < A->rmap->N && idx[j] + k < A->cmap->N) set.insert(idx[j] + k); /* do not insert indices greater than the dimension of A */
134       }
135     }
136     PetscCall(ISRestoreIndices(is[i],&idx));
137     PetscCall(ISDestroy(is+i));
138     if (bs[0] > 1) {
139       for (std::set<PetscInt>::iterator it=set.cbegin(); it!=set.cend(); it++) {
140         std::vector<PetscInt> block(bs[0]);
141         std::iota(block.begin(),block.end(),(*it/bs[0])*bs[0]);
142         set.insert(block.cbegin(),block.cend());
143       }
144     }
145     size = set.size(); /* size with overlap */
146     PetscCall(PetscMalloc1(size,&oidx));
147     for (const PetscInt j : set) *oidx++ = j;
148     oidx -= size;
149     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,size,oidx,PETSC_OWN_POINTER,is+i));
150   }
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode MatCreateSubMatrices_Htool(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
155 {
156   Mat_Htool         *a = (Mat_Htool*)A->data;
157   Mat               D,B,BT;
158   const PetscScalar *copy;
159   PetscScalar       *ptr;
160   const PetscInt    *idxr,*idxc,*it;
161   PetscInt          nrow,m,i;
162   PetscBool         flg;
163 
164   PetscFunctionBegin;
165   if (scall != MAT_REUSE_MATRIX) {
166     PetscCall(PetscCalloc1(n,submat));
167   }
168   for (i=0; i<n; ++i) {
169     PetscCall(ISGetLocalSize(irow[i],&nrow));
170     PetscCall(ISGetLocalSize(icol[i],&m));
171     PetscCall(ISGetIndices(irow[i],&idxr));
172     PetscCall(ISGetIndices(icol[i],&idxc));
173     if (scall != MAT_REUSE_MATRIX) {
174       PetscCall(MatCreateDense(PETSC_COMM_SELF,nrow,m,nrow,m,NULL,(*submat)+i));
175     }
176     PetscCall(MatDenseGetArrayWrite((*submat)[i],&ptr));
177     if (irow[i] == icol[i]) { /* same row and column IS? */
178       PetscCall(MatHasCongruentLayouts(A,&flg));
179       if (flg) {
180         PetscCall(ISSorted(irow[i],&flg));
181         if (flg) { /* sorted IS? */
182           it = std::lower_bound(idxr,idxr+nrow,A->rmap->rstart);
183           if (it != idxr+nrow && *it == A->rmap->rstart) { /* rmap->rstart in IS? */
184             if (std::distance(idxr,it) + A->rmap->n <= nrow) { /* long enough IS to store the local diagonal block? */
185               for (PetscInt j=0; j<A->rmap->n && flg; ++j) if (PetscUnlikely(it[j] != A->rmap->rstart+j)) flg = PETSC_FALSE;
186               if (flg) { /* complete local diagonal block in IS? */
187                 /* fast extraction when the local diagonal block is part of the submatrix, e.g., for PCASM or PCHPDDM
188                  *      [   B   C   E   ]
189                  *  A = [   B   D   E   ]
190                  *      [   B   F   E   ]
191                  */
192                 m = std::distance(idxr,it); /* shift of the coefficient (0,0) of block D from above */
193                 PetscCall(MatGetDiagonalBlock_Htool(A,&D));
194                 PetscCall(MatDenseGetArrayRead(D,&copy));
195                 for (PetscInt k=0; k<A->rmap->n; ++k) {
196                   PetscCall(PetscArraycpy(ptr+(m+k)*nrow+m,copy+k*A->rmap->n,A->rmap->n)); /* block D from above */
197                 }
198                 PetscCall(MatDenseRestoreArrayRead(D,&copy));
199                 if (m) {
200                   a->wrapper->copy_submatrix(nrow,m,idxr,idxc,ptr); /* vertical block B from above */
201                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
202                   if (A->symmetric || A->hermitian) {
203                     PetscCall(MatCreateDense(PETSC_COMM_SELF,A->rmap->n,m,A->rmap->n,m,ptr+m,&B));
204                     PetscCall(MatDenseSetLDA(B,nrow));
205                     PetscCall(MatCreateDense(PETSC_COMM_SELF,m,A->rmap->n,m,A->rmap->n,ptr+m*nrow,&BT));
206                     PetscCall(MatDenseSetLDA(BT,nrow));
207                     if (A->hermitian && PetscDefined(USE_COMPLEX)) {
208                       PetscCall(MatHermitianTranspose(B,MAT_REUSE_MATRIX,&BT));
209                     } else {
210                       PetscCall(MatTranspose(B,MAT_REUSE_MATRIX,&BT));
211                     }
212                     PetscCall(MatDestroy(&B));
213                     PetscCall(MatDestroy(&BT));
214                   } else {
215                     for (PetscInt k=0; k<A->rmap->n; ++k) { /* block C from above */
216                       a->wrapper->copy_submatrix(m,1,idxr,idxc+m+k,ptr+(m+k)*nrow);
217                     }
218                   }
219                 }
220                 if (m+A->rmap->n != nrow) {
221                   a->wrapper->copy_submatrix(nrow,std::distance(it+A->rmap->n,idxr+nrow),idxr,idxc+m+A->rmap->n,ptr+(m+A->rmap->n)*nrow); /* vertical block E from above */
222                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
223                   if (A->symmetric || A->hermitian) {
224                     PetscCall(MatCreateDense(PETSC_COMM_SELF,A->rmap->n,nrow-(m+A->rmap->n),A->rmap->n,nrow-(m+A->rmap->n),ptr+(m+A->rmap->n)*nrow+m,&B));
225                     PetscCall(MatDenseSetLDA(B,nrow));
226                     PetscCall(MatCreateDense(PETSC_COMM_SELF,nrow-(m+A->rmap->n),A->rmap->n,nrow-(m+A->rmap->n),A->rmap->n,ptr+m*nrow+m+A->rmap->n,&BT));
227                     PetscCall(MatDenseSetLDA(BT,nrow));
228                     if (A->hermitian && PetscDefined(USE_COMPLEX)) {
229                       PetscCall(MatHermitianTranspose(B,MAT_REUSE_MATRIX,&BT));
230                     } else {
231                       PetscCall(MatTranspose(B,MAT_REUSE_MATRIX,&BT));
232                     }
233                     PetscCall(MatDestroy(&B));
234                     PetscCall(MatDestroy(&BT));
235                   } else {
236                     for (PetscInt k=0; k<A->rmap->n; ++k) { /* block F from above */
237                       a->wrapper->copy_submatrix(std::distance(it+A->rmap->n,idxr+nrow),1,it+A->rmap->n,idxc+m+k,ptr+(m+k)*nrow+m+A->rmap->n);
238                     }
239                   }
240                 }
241               } /* complete local diagonal block not in IS */
242             } else flg = PETSC_FALSE; /* IS not long enough to store the local diagonal block */
243           } else flg = PETSC_FALSE; /* rmap->rstart not in IS */
244         } /* unsorted IS */
245       }
246     } else flg = PETSC_FALSE; /* different row and column IS */
247     if (!flg) a->wrapper->copy_submatrix(nrow,m,idxr,idxc,ptr); /* reassemble everything */
248     PetscCall(ISRestoreIndices(irow[i],&idxr));
249     PetscCall(ISRestoreIndices(icol[i],&idxc));
250     PetscCall(MatDenseRestoreArrayWrite((*submat)[i],&ptr));
251     PetscCall(MatAssemblyBegin((*submat)[i],MAT_FINAL_ASSEMBLY));
252     PetscCall(MatAssemblyEnd((*submat)[i],MAT_FINAL_ASSEMBLY));
253     PetscCall(MatScale((*submat)[i],a->s));
254   }
255   PetscFunctionReturn(0);
256 }
257 
258 static PetscErrorCode MatDestroy_Htool(Mat A)
259 {
260   Mat_Htool               *a = (Mat_Htool*)A->data;
261   PetscContainer          container;
262   MatHtoolKernelTranspose *kernelt;
263 
264   PetscFunctionBegin;
265   PetscCall(PetscObjectChangeTypeName((PetscObject)A,NULL));
266   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_htool_seqdense_C",NULL));
267   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_htool_mpidense_C",NULL));
268   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatConvert_htool_seqdense_C",NULL));
269   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatConvert_htool_mpidense_C",NULL));
270   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolGetHierarchicalMat_C",NULL));
271   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolSetKernel_C",NULL));
272   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolGetPermutationSource_C",NULL));
273   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolGetPermutationTarget_C",NULL));
274   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolUsePermutation_C",NULL));
275   PetscCall(PetscObjectQuery((PetscObject)A,"KernelTranspose",(PetscObject*)&container));
276   if (container) { /* created in MatTranspose_Htool() */
277     PetscCall(PetscContainerGetPointer(container,(void**)&kernelt));
278     PetscCall(MatDestroy(&kernelt->A));
279     PetscCall(PetscFree(kernelt));
280     PetscCall(PetscContainerDestroy(&container));
281     PetscCall(PetscObjectCompose((PetscObject)A,"KernelTranspose",NULL));
282   }
283   if (a->gcoords_source != a->gcoords_target) {
284     PetscCall(PetscFree(a->gcoords_source));
285   }
286   PetscCall(PetscFree(a->gcoords_target));
287   PetscCall(PetscFree2(a->work_source,a->work_target));
288   delete a->wrapper;
289   delete a->hmatrix;
290   PetscCall(PetscFree(A->data));
291   PetscFunctionReturn(0);
292 }
293 
294 static PetscErrorCode MatView_Htool(Mat A,PetscViewer pv)
295 {
296   Mat_Htool      *a = (Mat_Htool*)A->data;
297   PetscBool      flg;
298 
299   PetscFunctionBegin;
300   PetscCall(PetscObjectTypeCompare((PetscObject)pv,PETSCVIEWERASCII,&flg));
301   if (flg) {
302     PetscCall(PetscViewerASCIIPrintf(pv,"symmetry: %c\n",a->hmatrix->get_symmetry_type()));
303     if (PetscAbsScalar(a->s-1.0) > PETSC_MACHINE_EPSILON) {
304 #if defined(PETSC_USE_COMPLEX)
305       PetscCall(PetscViewerASCIIPrintf(pv,"scaling: %g+%gi\n",(double)PetscRealPart(a->s),(double)PetscImaginaryPart(a->s)));
306 #else
307       PetscCall(PetscViewerASCIIPrintf(pv,"scaling: %g\n",(double)a->s));
308 #endif
309     }
310     PetscCall(PetscViewerASCIIPrintf(pv,"minimum cluster size: %" PetscInt_FMT "\n",a->bs[0]));
311     PetscCall(PetscViewerASCIIPrintf(pv,"maximum block size: %" PetscInt_FMT "\n",a->bs[1]));
312     PetscCall(PetscViewerASCIIPrintf(pv,"epsilon: %g\n",(double)a->epsilon));
313     PetscCall(PetscViewerASCIIPrintf(pv,"eta: %g\n",(double)a->eta));
314     PetscCall(PetscViewerASCIIPrintf(pv,"minimum target depth: %" PetscInt_FMT "\n",a->depth[0]));
315     PetscCall(PetscViewerASCIIPrintf(pv,"minimum source depth: %" PetscInt_FMT "\n",a->depth[1]));
316     PetscCall(PetscViewerASCIIPrintf(pv,"compressor: %s\n",MatHtoolCompressorTypes[a->compressor]));
317     PetscCall(PetscViewerASCIIPrintf(pv,"clustering: %s\n",MatHtoolClusteringTypes[a->clustering]));
318     PetscCall(PetscViewerASCIIPrintf(pv,"compression ratio: %s\n",a->hmatrix->get_infos("Compression_ratio").c_str()));
319     PetscCall(PetscViewerASCIIPrintf(pv,"space saving: %s\n",a->hmatrix->get_infos("Space_saving").c_str()));
320     PetscCall(PetscViewerASCIIPrintf(pv,"number of dense (resp. low rank) matrices: %s (resp. %s)\n",a->hmatrix->get_infos("Number_of_dmat").c_str(),a->hmatrix->get_infos("Number_of_lrmat").c_str()));
321     PetscCall(PetscViewerASCIIPrintf(pv,"(minimum, mean, maximum) dense block sizes: (%s, %s, %s)\n",a->hmatrix->get_infos("Dense_block_size_min").c_str(),a->hmatrix->get_infos("Dense_block_size_mean").c_str(),a->hmatrix->get_infos("Dense_block_size_max").c_str()));
322     PetscCall(PetscViewerASCIIPrintf(pv,"(minimum, mean, maximum) low rank block sizes: (%s, %s, %s)\n",a->hmatrix->get_infos("Low_rank_block_size_min").c_str(),a->hmatrix->get_infos("Low_rank_block_size_mean").c_str(),a->hmatrix->get_infos("Low_rank_block_size_max").c_str()));
323     PetscCall(PetscViewerASCIIPrintf(pv,"(minimum, mean, maximum) ranks: (%s, %s, %s)\n",a->hmatrix->get_infos("Rank_min").c_str(),a->hmatrix->get_infos("Rank_mean").c_str(),a->hmatrix->get_infos("Rank_max").c_str()));
324   }
325   PetscFunctionReturn(0);
326 }
327 
328 static PetscErrorCode MatScale_Htool(Mat A,PetscScalar s)
329 {
330   Mat_Htool *a = (Mat_Htool*)A->data;
331 
332   PetscFunctionBegin;
333   a->s *= s;
334   PetscFunctionReturn(0);
335 }
336 
337 /* naive implementation of MatGetRow() needed for MatConvert_Nest_AIJ() */
338 static PetscErrorCode MatGetRow_Htool(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
339 {
340   Mat_Htool      *a = (Mat_Htool*)A->data;
341   PetscInt       *idxc;
342   PetscBLASInt   one = 1,bn;
343 
344   PetscFunctionBegin;
345   if (nz) *nz = A->cmap->N;
346   if (idx || v) { /* even if !idx, need to set idxc for htool::copy_submatrix() */
347     PetscCall(PetscMalloc1(A->cmap->N,&idxc));
348     for (PetscInt i=0; i<A->cmap->N; ++i) idxc[i] = i;
349   }
350   if (idx) *idx = idxc;
351   if (v) {
352     PetscCall(PetscMalloc1(A->cmap->N,v));
353     if (a->wrapper) a->wrapper->copy_submatrix(1,A->cmap->N,&row,idxc,*v);
354     else reinterpret_cast<htool::VirtualGenerator<PetscScalar>*>(a->kernelctx)->copy_submatrix(1,A->cmap->N,&row,idxc,*v);
355     PetscCall(PetscBLASIntCast(A->cmap->N,&bn));
356     PetscStackCallBLAS("BLASscal",BLASscal_(&bn,&a->s,*v,&one));
357   }
358   if (!idx) {
359     PetscCall(PetscFree(idxc));
360   }
361   PetscFunctionReturn(0);
362 }
363 
364 static PetscErrorCode MatRestoreRow_Htool(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
365 {
366   PetscFunctionBegin;
367   if (nz) *nz = 0;
368   if (idx) {
369     PetscCall(PetscFree(*idx));
370   }
371   if (v) {
372     PetscCall(PetscFree(*v));
373   }
374   PetscFunctionReturn(0);
375 }
376 
377 static PetscErrorCode MatSetFromOptions_Htool(PetscOptionItems *PetscOptionsObject,Mat A)
378 {
379   Mat_Htool      *a = (Mat_Htool*)A->data;
380   PetscInt       n;
381   PetscBool      flg;
382 
383   PetscFunctionBegin;
384   PetscCall(PetscOptionsHead(PetscOptionsObject,"Htool options"));
385   PetscCall(PetscOptionsInt("-mat_htool_min_cluster_size","Minimal leaf size in cluster tree",NULL,a->bs[0],a->bs,NULL));
386   PetscCall(PetscOptionsInt("-mat_htool_max_block_size","Maximal number of coefficients in a dense block",NULL,a->bs[1],a->bs + 1,NULL));
387   PetscCall(PetscOptionsReal("-mat_htool_epsilon","Relative error in Frobenius norm when approximating a block",NULL,a->epsilon,&a->epsilon,NULL));
388   PetscCall(PetscOptionsReal("-mat_htool_eta","Admissibility condition tolerance",NULL,a->eta,&a->eta,NULL));
389   PetscCall(PetscOptionsInt("-mat_htool_min_target_depth","Minimal cluster tree depth associated with the rows",NULL,a->depth[0],a->depth,NULL));
390   PetscCall(PetscOptionsInt("-mat_htool_min_source_depth","Minimal cluster tree depth associated with the columns",NULL,a->depth[1],a->depth + 1,NULL));
391   n = 0;
392   PetscCall(PetscOptionsEList("-mat_htool_compressor","Type of compression","MatHtoolCompressorType",MatHtoolCompressorTypes,PETSC_STATIC_ARRAY_LENGTH(MatHtoolCompressorTypes),MatHtoolCompressorTypes[MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA],&n,&flg));
393   if (flg) a->compressor = MatHtoolCompressorType(n);
394   n = 0;
395   PetscCall(PetscOptionsEList("-mat_htool_clustering","Type of clustering","MatHtoolClusteringType",MatHtoolClusteringTypes,PETSC_STATIC_ARRAY_LENGTH(MatHtoolClusteringTypes),MatHtoolClusteringTypes[MAT_HTOOL_CLUSTERING_PCA_REGULAR],&n,&flg));
396   if (flg) a->clustering = MatHtoolClusteringType(n);
397   PetscCall(PetscOptionsTail());
398   PetscFunctionReturn(0);
399 }
400 
401 static PetscErrorCode MatAssemblyEnd_Htool(Mat A,MatAssemblyType type)
402 {
403   Mat_Htool                                                    *a = (Mat_Htool*)A->data;
404   const PetscInt                                               *ranges;
405   PetscInt                                                     *offset;
406   PetscMPIInt                                                  size;
407   char                                                         S = PetscDefined(USE_COMPLEX) && A->hermitian ? 'H' : (A->symmetric ? 'S' : 'N'),uplo = S == 'N' ? 'N' : 'U';
408   htool::VirtualGenerator<PetscScalar>                         *generator = nullptr;
409   std::shared_ptr<htool::VirtualCluster>                       t,s = nullptr;
410   std::shared_ptr<htool::VirtualLowRankGenerator<PetscScalar>> compressor = nullptr;
411 
412   PetscFunctionBegin;
413   PetscCall(PetscCitationsRegister(HtoolCitation,&HtoolCite));
414   delete a->wrapper;
415   delete a->hmatrix;
416   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
417   PetscCall(PetscMalloc1(2*size,&offset));
418   PetscCall(MatGetOwnershipRanges(A,&ranges));
419   for (PetscInt i=0; i<size; ++i) {
420     offset[2*i] = ranges[i];
421     offset[2*i+1] = ranges[i+1] - ranges[i];
422   }
423   switch (a->clustering) {
424   case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
425     t = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
426     break;
427   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
428     t = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
429     break;
430   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
431     t = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::RegularSplitting>>>(a->dim);
432     break;
433   default:
434     t = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::RegularSplitting>>>(a->dim);
435   }
436   t->set_minclustersize(a->bs[0]);
437   t->build(A->rmap->N,a->gcoords_target,offset);
438   if (a->kernel) a->wrapper = new WrapperHtool(A->rmap->N,A->cmap->N,a->dim,a->kernel,a->kernelctx);
439   else {
440     a->wrapper = NULL;
441     generator = reinterpret_cast<htool::VirtualGenerator<PetscScalar>*>(a->kernelctx);
442   }
443   if (a->gcoords_target != a->gcoords_source) {
444     PetscCall(MatGetOwnershipRangesColumn(A,&ranges));
445     for (PetscInt i=0; i<size; ++i) {
446       offset[2*i] = ranges[i];
447       offset[2*i+1] = ranges[i+1] - ranges[i];
448     }
449     switch (a->clustering) {
450     case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
451       s = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
452       break;
453     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
454       s = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
455       break;
456     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
457       s = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::RegularSplitting>>>(a->dim);
458       break;
459     default:
460       s = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::RegularSplitting>>>(a->dim);
461     }
462     s->set_minclustersize(a->bs[0]);
463     s->build(A->cmap->N,a->gcoords_source,offset);
464     S = uplo = 'N';
465   }
466   PetscCall(PetscFree(offset));
467   switch (a->compressor) {
468   case MAT_HTOOL_COMPRESSOR_FULL_ACA:
469     compressor = std::make_shared<htool::fullACA<PetscScalar>>();
470     break;
471   case MAT_HTOOL_COMPRESSOR_SVD:
472     compressor = std::make_shared<htool::SVD<PetscScalar>>();
473     break;
474   default:
475     compressor = std::make_shared<htool::sympartialACA<PetscScalar>>();
476   }
477   a->hmatrix = dynamic_cast<htool::VirtualHMatrix<PetscScalar>*>(new htool::HMatrix<PetscScalar>(t,s ? s : t,a->epsilon,a->eta,S,uplo));
478   a->hmatrix->set_compression(compressor);
479   a->hmatrix->set_maxblocksize(a->bs[1]);
480   a->hmatrix->set_mintargetdepth(a->depth[0]);
481   a->hmatrix->set_minsourcedepth(a->depth[1]);
482   if (s) a->hmatrix->build(a->wrapper ? *a->wrapper : *generator,a->gcoords_target,a->gcoords_source);
483   else   a->hmatrix->build(a->wrapper ? *a->wrapper : *generator,a->gcoords_target);
484   PetscFunctionReturn(0);
485 }
486 
487 static PetscErrorCode MatProductNumeric_Htool(Mat C)
488 {
489   Mat_Product       *product = C->product;
490   Mat_Htool         *a = (Mat_Htool*)product->A->data;
491   const PetscScalar *in;
492   PetscScalar       *out;
493   PetscInt          N,lda;
494 
495   PetscFunctionBegin;
496   MatCheckProduct(C,1);
497   PetscCall(MatGetSize(C,NULL,&N));
498   PetscCall(MatDenseGetLDA(C,&lda));
499   PetscCheck(lda == C->rmap->n,PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")",lda,C->rmap->n);
500   PetscCall(MatDenseGetArrayRead(product->B,&in));
501   PetscCall(MatDenseGetArrayWrite(C,&out));
502   switch (product->type) {
503   case MATPRODUCT_AB:
504     a->hmatrix->mvprod_local_to_local(in,out,N);
505     break;
506   case MATPRODUCT_AtB:
507     a->hmatrix->mvprod_transp_local_to_local(in,out,N);
508     break;
509   default:
510     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatProductType %s is not supported",MatProductTypes[product->type]);
511   }
512   PetscCall(MatDenseRestoreArrayWrite(C,&out));
513   PetscCall(MatDenseRestoreArrayRead(product->B,&in));
514   PetscCall(MatScale(C,a->s));
515   PetscFunctionReturn(0);
516 }
517 
518 static PetscErrorCode MatProductSymbolic_Htool(Mat C)
519 {
520   Mat_Product    *product = C->product;
521   Mat            A,B;
522   PetscBool      flg;
523 
524   PetscFunctionBegin;
525   MatCheckProduct(C,1);
526   A = product->A;
527   B = product->B;
528   PetscCall(PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,""));
529   PetscCheck(flg,PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"MatProduct_AB not supported for %s",((PetscObject)product->B)->type_name);
530   switch (product->type) {
531   case MATPRODUCT_AB:
532     if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) {
533       PetscCall(MatSetSizes(C,A->rmap->n,B->cmap->n,A->rmap->N,B->cmap->N));
534     }
535     break;
536   case MATPRODUCT_AtB:
537     if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) {
538       PetscCall(MatSetSizes(C,A->cmap->n,B->cmap->n,A->cmap->N,B->cmap->N));
539     }
540     break;
541   default:
542     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"ProductType %s is not supported",MatProductTypes[product->type]);
543   }
544   PetscCall(MatSetType(C,MATDENSE));
545   PetscCall(MatSetUp(C));
546   PetscCall(MatSetOption(C,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE));
547   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
548   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
549   C->ops->productsymbolic = NULL;
550   C->ops->productnumeric = MatProductNumeric_Htool;
551   PetscFunctionReturn(0);
552 }
553 
554 static PetscErrorCode MatProductSetFromOptions_Htool(Mat C)
555 {
556   PetscFunctionBegin;
557   MatCheckProduct(C,1);
558   if (C->product->type == MATPRODUCT_AB || C->product->type == MATPRODUCT_AtB) C->ops->productsymbolic = MatProductSymbolic_Htool;
559   PetscFunctionReturn(0);
560 }
561 
562 static PetscErrorCode MatHtoolGetHierarchicalMat_Htool(Mat A,const htool::VirtualHMatrix<PetscScalar> **hmatrix)
563 {
564   Mat_Htool *a = (Mat_Htool*)A->data;
565 
566   PetscFunctionBegin;
567   *hmatrix = a->hmatrix;
568   PetscFunctionReturn(0);
569 }
570 
571 /*@C
572      MatHtoolGetHierarchicalMat - Retrieves the opaque pointer to a Htool virtual matrix stored in a MATHTOOL.
573 
574    Input Parameter:
575 .     A - hierarchical matrix
576 
577    Output Parameter:
578 .     hmatrix - opaque pointer to a Htool virtual matrix
579 
580    Level: advanced
581 
582 .seealso:  MATHTOOL
583 @*/
584 PETSC_EXTERN PetscErrorCode MatHtoolGetHierarchicalMat(Mat A,const htool::VirtualHMatrix<PetscScalar> **hmatrix)
585 {
586   PetscFunctionBegin;
587   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
588   PetscValidPointer(hmatrix,2);
589   PetscTryMethod(A,"MatHtoolGetHierarchicalMat_C",(Mat,const htool::VirtualHMatrix<PetscScalar>**),(A,hmatrix));
590   PetscFunctionReturn(0);
591 }
592 
593 static PetscErrorCode MatHtoolSetKernel_Htool(Mat A,MatHtoolKernel kernel,void *kernelctx)
594 {
595   Mat_Htool *a = (Mat_Htool*)A->data;
596 
597   PetscFunctionBegin;
598   a->kernel    = kernel;
599   a->kernelctx = kernelctx;
600   delete a->wrapper;
601   if (a->kernel) a->wrapper = new WrapperHtool(A->rmap->N,A->cmap->N,a->dim,a->kernel,a->kernelctx);
602   PetscFunctionReturn(0);
603 }
604 
605 /*@C
606      MatHtoolSetKernel - Sets the kernel and context used for the assembly of a MATHTOOL.
607 
608    Input Parameters:
609 +     A - hierarchical matrix
610 .     kernel - computational kernel (or NULL)
611 -     kernelctx - kernel context (if kernel is NULL, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
612 
613    Level: advanced
614 
615 .seealso:  MATHTOOL, MatCreateHtoolFromKernel()
616 @*/
617 PETSC_EXTERN PetscErrorCode MatHtoolSetKernel(Mat A,MatHtoolKernel kernel,void *kernelctx)
618 {
619   PetscFunctionBegin;
620   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
621   if (!kernelctx) PetscValidFunction(kernel,2);
622   if (!kernel)    PetscValidPointer(kernelctx,3);
623   PetscTryMethod(A,"MatHtoolSetKernel_C",(Mat,MatHtoolKernel,void*),(A,kernel,kernelctx));
624   PetscFunctionReturn(0);
625 }
626 
627 static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A,IS* is)
628 {
629   Mat_Htool             *a = (Mat_Htool*)A->data;
630   std::vector<PetscInt> source;
631 
632   PetscFunctionBegin;
633   source = a->hmatrix->get_source_cluster()->get_local_perm();
634   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A),source.size(),source.data(),PETSC_COPY_VALUES,is));
635   PetscCall(ISSetPermutation(*is));
636   PetscFunctionReturn(0);
637 }
638 
639 /*@C
640      MatHtoolGetPermutationSource - Gets the permutation associated to the source cluster.
641 
642    Input Parameter:
643 .     A - hierarchical matrix
644 
645    Output Parameter:
646 .     is - permutation
647 
648    Level: advanced
649 
650 .seealso:  MATHTOOL, MatHtoolGetPermutationTarget(), MatHtoolUsePermutation()
651 @*/
652 PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationSource(Mat A,IS* is)
653 {
654   PetscFunctionBegin;
655   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
656   if (!is) PetscValidPointer(is,2);
657   PetscTryMethod(A,"MatHtoolGetPermutationSource_C",(Mat,IS*),(A,is));
658   PetscFunctionReturn(0);
659 }
660 
661 static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A,IS* is)
662 {
663   Mat_Htool             *a = (Mat_Htool*)A->data;
664   std::vector<PetscInt> target;
665 
666   PetscFunctionBegin;
667   target = a->hmatrix->get_target_cluster()->get_local_perm();
668   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A),target.size(),target.data(),PETSC_COPY_VALUES,is));
669   PetscCall(ISSetPermutation(*is));
670   PetscFunctionReturn(0);
671 }
672 
673 /*@C
674      MatHtoolGetPermutationTarget - Gets the permutation associated to the target cluster.
675 
676    Input Parameter:
677 .     A - hierarchical matrix
678 
679    Output Parameter:
680 .     is - permutation
681 
682    Level: advanced
683 
684 .seealso:  MATHTOOL, MatHtoolGetPermutationSource(), MatHtoolUsePermutation()
685 @*/
686 PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationTarget(Mat A,IS* is)
687 {
688   PetscFunctionBegin;
689   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
690   if (!is) PetscValidPointer(is,2);
691   PetscTryMethod(A,"MatHtoolGetPermutationTarget_C",(Mat,IS*),(A,is));
692   PetscFunctionReturn(0);
693 }
694 
695 static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A,PetscBool use)
696 {
697   Mat_Htool *a = (Mat_Htool*)A->data;
698 
699   PetscFunctionBegin;
700   a->hmatrix->set_use_permutation(use);
701   PetscFunctionReturn(0);
702 }
703 
704 /*@C
705      MatHtoolUsePermutation - Sets whether MATHTOOL should permute input (resp. output) vectors following its internal source (resp. target) permutation.
706 
707    Input Parameters:
708 +     A - hierarchical matrix
709 -     use - Boolean value
710 
711    Level: advanced
712 
713 .seealso:  MATHTOOL, MatHtoolGetPermutationSource(), MatHtoolGetPermutationTarget()
714 @*/
715 PETSC_EXTERN PetscErrorCode MatHtoolUsePermutation(Mat A,PetscBool use)
716 {
717   PetscFunctionBegin;
718   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
719   PetscValidLogicalCollectiveBool(A,use,2);
720   PetscTryMethod(A,"MatHtoolUsePermutation_C",(Mat,PetscBool),(A,use));
721   PetscFunctionReturn(0);
722 }
723 
724 static PetscErrorCode MatConvert_Htool_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
725 {
726   Mat            C;
727   Mat_Htool      *a = (Mat_Htool*)A->data;
728   PetscInt       lda;
729   PetscScalar    *array;
730 
731   PetscFunctionBegin;
732   if (reuse == MAT_REUSE_MATRIX) {
733     C = *B;
734     PetscCheck(C->rmap->n == A->rmap->n && C->cmap->N == A->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible dimensions");
735     PetscCall(MatDenseGetLDA(C,&lda));
736     PetscCheck(lda == C->rmap->n,PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")",lda,C->rmap->n);
737   } else {
738     PetscCall(MatCreate(PetscObjectComm((PetscObject)A),&C));
739     PetscCall(MatSetSizes(C,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N));
740     PetscCall(MatSetType(C,MATDENSE));
741     PetscCall(MatSetUp(C));
742     PetscCall(MatSetOption(C,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE));
743   }
744   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
745   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
746   PetscCall(MatDenseGetArrayWrite(C,&array));
747   a->hmatrix->copy_local_dense_perm(array);
748   PetscCall(MatDenseRestoreArrayWrite(C,&array));
749   PetscCall(MatScale(C,a->s));
750   if (reuse == MAT_INPLACE_MATRIX) {
751     PetscCall(MatHeaderReplace(A,&C));
752   } else *B = C;
753   PetscFunctionReturn(0);
754 }
755 
756 static PetscErrorCode GenEntriesTranspose(PetscInt sdim,PetscInt M,PetscInt N,const PetscInt *rows,const PetscInt *cols,PetscScalar *ptr,void *ctx)
757 {
758   MatHtoolKernelTranspose *generator = (MatHtoolKernelTranspose*)ctx;
759   PetscScalar             *tmp;
760 
761   PetscFunctionBegin;
762   generator->kernel(sdim,N,M,cols,rows,ptr,generator->kernelctx);
763   PetscCall(PetscMalloc1(M*N,&tmp));
764   PetscCall(PetscArraycpy(tmp,ptr,M*N));
765   for (PetscInt i=0; i<M; ++i) {
766     for (PetscInt j=0; j<N; ++j) ptr[i+j*M] = tmp[j+i*N];
767   }
768   PetscCall(PetscFree(tmp));
769   PetscFunctionReturn(0);
770 }
771 
772 /* naive implementation which keeps a reference to the original Mat */
773 static PetscErrorCode MatTranspose_Htool(Mat A,MatReuse reuse,Mat *B)
774 {
775   Mat                     C;
776   Mat_Htool               *a = (Mat_Htool*)A->data,*c;
777   PetscInt                M = A->rmap->N,N = A->cmap->N,m = A->rmap->n,n = A->cmap->n;
778   PetscContainer          container;
779   MatHtoolKernelTranspose *kernelt;
780 
781   PetscFunctionBegin;
782   PetscCheck(reuse != MAT_INPLACE_MATRIX,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatTranspose() with MAT_INPLACE_MATRIX not supported");
783   if (reuse == MAT_INITIAL_MATRIX) {
784     PetscCall(MatCreate(PetscObjectComm((PetscObject)A),&C));
785     PetscCall(MatSetSizes(C,n,m,N,M));
786     PetscCall(MatSetType(C,((PetscObject)A)->type_name));
787     PetscCall(MatSetUp(C));
788     PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)C),&container));
789     PetscCall(PetscNew(&kernelt));
790     PetscCall(PetscContainerSetPointer(container,kernelt));
791     PetscCall(PetscObjectCompose((PetscObject)C,"KernelTranspose",(PetscObject)container));
792   } else {
793     C = *B;
794     PetscCall(PetscObjectQuery((PetscObject)C,"KernelTranspose",(PetscObject*)&container));
795     PetscCheck(container,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call MatTranspose() with MAT_INITIAL_MATRIX first");
796     PetscCall(PetscContainerGetPointer(container,(void**)&kernelt));
797   }
798   c                  = (Mat_Htool*)C->data;
799   c->dim             = a->dim;
800   c->s               = a->s;
801   c->kernel          = GenEntriesTranspose;
802   if (kernelt->A != A) {
803     PetscCall(MatDestroy(&kernelt->A));
804     kernelt->A       = A;
805     PetscCall(PetscObjectReference((PetscObject)A));
806   }
807   kernelt->kernel    = a->kernel;
808   kernelt->kernelctx = a->kernelctx;
809   c->kernelctx       = kernelt;
810   if (reuse == MAT_INITIAL_MATRIX) {
811     PetscCall(PetscMalloc1(N*c->dim,&c->gcoords_target));
812     PetscCall(PetscArraycpy(c->gcoords_target,a->gcoords_source,N*c->dim));
813     if (a->gcoords_target != a->gcoords_source) {
814       PetscCall(PetscMalloc1(M*c->dim,&c->gcoords_source));
815       PetscCall(PetscArraycpy(c->gcoords_source,a->gcoords_target,M*c->dim));
816     } else c->gcoords_source = c->gcoords_target;
817     PetscCall(PetscCalloc2(M,&c->work_source,N,&c->work_target));
818   }
819   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
820   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
821   if (reuse == MAT_INITIAL_MATRIX) *B = C;
822   PetscFunctionReturn(0);
823 }
824 
825 /*@C
826      MatCreateHtoolFromKernel - Creates a MATHTOOL from a user-supplied kernel.
827 
828    Input Parameters:
829 +     comm - MPI communicator
830 .     m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
831 .     n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
832 .     M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
833 .     N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
834 .     spacedim - dimension of the space coordinates
835 .     coords_target - coordinates of the target
836 .     coords_source - coordinates of the source
837 .     kernel - computational kernel (or NULL)
838 -     kernelctx - kernel context (if kernel is NULL, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
839 
840    Output Parameter:
841 .     B - matrix
842 
843    Options Database Keys:
844 +     -mat_htool_min_cluster_size <PetscInt> - minimal leaf size in cluster tree
845 .     -mat_htool_max_block_size <PetscInt> - maximal number of coefficients in a dense block
846 .     -mat_htool_epsilon <PetscReal> - relative error in Frobenius norm when approximating a block
847 .     -mat_htool_eta <PetscReal> - admissibility condition tolerance
848 .     -mat_htool_min_target_depth <PetscInt> - minimal cluster tree depth associated with the rows
849 .     -mat_htool_min_source_depth <PetscInt> - minimal cluster tree depth associated with the columns
850 .     -mat_htool_compressor <sympartialACA, fullACA, SVD> - type of compression
851 -     -mat_htool_clustering <PCARegular, PCAGeometric, BounbingBox1Regular, BoundingBox1Geometric> - type of clustering
852 
853    Level: intermediate
854 
855 .seealso:  MatCreate(), MATHTOOL, PCSetCoordinates(), MatHtoolSetKernel(), MatHtoolCompressorType, MATH2OPUS, MatCreateH2OpusFromKernel()
856 @*/
857 PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt spacedim,const PetscReal coords_target[],const PetscReal coords_source[],MatHtoolKernel kernel,void *kernelctx,Mat *B)
858 {
859   Mat            A;
860   Mat_Htool      *a;
861 
862   PetscFunctionBegin;
863   PetscCall(MatCreate(comm,&A));
864   PetscValidLogicalCollectiveInt(A,spacedim,6);
865   PetscValidRealPointer(coords_target,7);
866   PetscValidRealPointer(coords_source,8);
867   if (!kernelctx) PetscValidFunction(kernel,9);
868   if (!kernel)    PetscValidPointer(kernelctx,10);
869   PetscCall(MatSetSizes(A,m,n,M,N));
870   PetscCall(MatSetType(A,MATHTOOL));
871   PetscCall(MatSetUp(A));
872   a            = (Mat_Htool*)A->data;
873   a->dim       = spacedim;
874   a->s         = 1.0;
875   a->kernel    = kernel;
876   a->kernelctx = kernelctx;
877   PetscCall(PetscCalloc1(A->rmap->N*spacedim,&a->gcoords_target));
878   PetscCall(PetscArraycpy(a->gcoords_target+A->rmap->rstart*spacedim,coords_target,A->rmap->n*spacedim));
879   PetscCall(MPIU_Allreduce(MPI_IN_PLACE,a->gcoords_target,A->rmap->N*spacedim,MPIU_REAL,MPI_SUM,PetscObjectComm((PetscObject)A))); /* global target coordinates */
880   if (coords_target != coords_source) {
881     PetscCall(PetscCalloc1(A->cmap->N*spacedim,&a->gcoords_source));
882     PetscCall(PetscArraycpy(a->gcoords_source+A->cmap->rstart*spacedim,coords_source,A->cmap->n*spacedim));
883     PetscCall(MPIU_Allreduce(MPI_IN_PLACE,a->gcoords_source,A->cmap->N*spacedim,MPIU_REAL,MPI_SUM,PetscObjectComm((PetscObject)A))); /* global source coordinates */
884   } else a->gcoords_source = a->gcoords_target;
885   PetscCall(PetscCalloc2(A->cmap->N,&a->work_source,A->rmap->N,&a->work_target));
886   *B = A;
887   PetscFunctionReturn(0);
888 }
889 
890 /*MC
891      MATHTOOL = "htool" - A matrix type for hierarchical matrices using the Htool package.
892 
893   Use ./configure --download-htool to install PETSc to use Htool.
894 
895    Options Database Keys:
896 .     -mat_type htool - matrix type to "htool" during a call to MatSetFromOptions()
897 
898    Level: beginner
899 
900 .seealso: MATH2OPUS, MATDENSE, MatCreateHtoolFromKernel(), MatHtoolSetKernel()
901 M*/
902 PETSC_EXTERN PetscErrorCode MatCreate_Htool(Mat A)
903 {
904   Mat_Htool *a;
905 
906   PetscFunctionBegin;
907   PetscCall(PetscNewLog(A,&a));
908   A->data = (void*)a;
909   PetscCall(PetscObjectChangeTypeName((PetscObject)A,MATHTOOL));
910   PetscCall(PetscMemzero(A->ops,sizeof(struct _MatOps)));
911   A->ops->getdiagonal       = MatGetDiagonal_Htool;
912   A->ops->getdiagonalblock  = MatGetDiagonalBlock_Htool;
913   A->ops->mult              = MatMult_Htool;
914   A->ops->multadd           = MatMultAdd_Htool;
915   A->ops->multtranspose     = MatMultTranspose_Htool;
916   if (!PetscDefined(USE_COMPLEX)) A->ops->multhermitiantranspose = MatMultTranspose_Htool;
917   A->ops->increaseoverlap   = MatIncreaseOverlap_Htool;
918   A->ops->createsubmatrices = MatCreateSubMatrices_Htool;
919   A->ops->transpose         = MatTranspose_Htool;
920   A->ops->destroy           = MatDestroy_Htool;
921   A->ops->view              = MatView_Htool;
922   A->ops->setfromoptions    = MatSetFromOptions_Htool;
923   A->ops->scale             = MatScale_Htool;
924   A->ops->getrow            = MatGetRow_Htool;
925   A->ops->restorerow        = MatRestoreRow_Htool;
926   A->ops->assemblyend       = MatAssemblyEnd_Htool;
927   a->dim                    = 0;
928   a->gcoords_target         = NULL;
929   a->gcoords_source         = NULL;
930   a->s                      = 1.0;
931   a->bs[0]                  = 10;
932   a->bs[1]                  = 1000000;
933   a->epsilon                = PetscSqrtReal(PETSC_SMALL);
934   a->eta                    = 10.0;
935   a->depth[0]               = 0;
936   a->depth[1]               = 0;
937   a->compressor             = MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA;
938   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_htool_seqdense_C",MatProductSetFromOptions_Htool));
939   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_htool_mpidense_C",MatProductSetFromOptions_Htool));
940   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatConvert_htool_seqdense_C",MatConvert_Htool_Dense));
941   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatConvert_htool_mpidense_C",MatConvert_Htool_Dense));
942   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolGetHierarchicalMat_C",MatHtoolGetHierarchicalMat_Htool));
943   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolSetKernel_C",MatHtoolSetKernel_Htool));
944   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolGetPermutationSource_C",MatHtoolGetPermutationSource_Htool));
945   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolGetPermutationTarget_C",MatHtoolGetPermutationTarget_Htool));
946   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHtoolUsePermutation_C",MatHtoolUsePermutation_Htool));
947   PetscFunctionReturn(0);
948 }
949