xref: /petsc/src/mat/graphops/partition/impls/chaco/chaco.c (revision 1b37a2a7cc4a4fb30c3e967db1c694c0a1013f51)
1 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
2 
3 #if defined(PETSC_HAVE_UNISTD_H)
4   #include <unistd.h>
5 #endif
6 
7 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
8   #include <chaco.h>
9 #else
10 /* Older versions of Chaco do not have an include file */
11 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, float *ewgts, float *x, float *y, float *z, char *outassignname, char *outfilename, short *assignment, int architecture, int ndims_tot, int mesh_dims[3], double *goal, int global_method, int local_method, int rqi_flag, int vmax, int ndims, double eigtol, long seed);
12 #endif
13 
14 extern int FREE_GRAPH;
15 
16 /*
17 int       nvtxs;                number of vertices in full graph
18 int      *start;                start of edge list for each vertex
19 int      *adjacency;            edge list data
20 int      *vwgts;                weights for all vertices
21 float    *ewgts;                weights for all edges
22 float    *x, *y, *z;            coordinates for inertial method
23 char     *outassignname;        name of assignment output file
24 char     *outfilename;          output file name
25 short    *assignment;           set number of each vtx (length n)
26 int       architecture;         0 => hypercube, d => d-dimensional mesh
27 int       ndims_tot;            total number of cube dimensions to divide
28 int       mesh_dims[3];         dimensions of mesh of processors
29 double   *goal;                 desired set sizes for each set
30 int       global_method;        global partitioning algorithm
31 int       local_method;         local partitioning algorithm
32 int       rqi_flag;             should I use RQI/Symmlq eigensolver?
33 int       vmax;                 how many vertices to coarsen down to?
34 int       ndims;                number of eigenvectors (2^d sets)
35 double    eigtol;               tolerance on eigenvectors
36 long      seed;                 for random graph mutations
37 */
38 
39 typedef struct {
40   PetscBool         verbose;
41   PetscInt          eignum;
42   PetscReal         eigtol;
43   MPChacoGlobalType global_method; /* global method */
44   MPChacoLocalType  local_method;  /* local method */
45   MPChacoEigenType  eigen_method;  /* eigensolver */
46   PetscInt          nbvtxcoarsed;  /* number of vertices for the coarse graph */
47 } MatPartitioning_Chaco;
48 
49 #define SIZE_LOG 10000 /* size of buffer for mesg_log */
50 
51 static PetscErrorCode MatPartitioningApply_Chaco(MatPartitioning part, IS *partitioning)
52 {
53   int                    cerr;
54   PetscInt              *parttab, *locals, i, nb_locals, M, N;
55   PetscMPIInt            size, rank;
56   Mat                    mat = part->adj, matAdj, matSeq, *A;
57   Mat_MPIAdj            *adj;
58   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
59   PetscBool              flg;
60   IS                     isrow, iscol;
61   int                    nvtxs, *start, *adjacency, *vwgts, architecture, ndims_tot;
62   int                    mesh_dims[3], global_method, local_method, rqi_flag, vmax, ndims;
63 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
64   int *assignment;
65 #else
66   short *assignment;
67 #endif
68   double eigtol;
69   long   seed;
70   char  *mesg_log;
71 #if defined(PETSC_HAVE_UNISTD_H)
72   int fd_stdout, fd_pipe[2], count;
73 #endif
74 
75   PetscFunctionBegin;
76   PetscCheck(!part->use_edge_weights, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Chaco does not support edge weights");
77   FREE_GRAPH = 0; /* otherwise Chaco will attempt to free memory for adjacency graph */
78   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
79   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
80   PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
81   if (size > 1) {
82     if (flg) {
83       PetscCall(MatMPIAdjToSeq(mat, &matSeq));
84     } else {
85       PetscCall(PetscInfo(part, "Converting distributed matrix to sequential: this could be a performance loss\n"));
86       PetscCall(MatGetSize(mat, &M, &N));
87       PetscCall(ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow));
88       PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
89       PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A));
90       PetscCall(ISDestroy(&isrow));
91       PetscCall(ISDestroy(&iscol));
92       matSeq = *A;
93       PetscCall(PetscFree(A));
94     }
95   } else {
96     PetscCall(PetscObjectReference((PetscObject)mat));
97     matSeq = mat;
98   }
99 
100   if (!flg) { /* convert regular matrix to MPIADJ */
101     PetscCall(MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matAdj));
102   } else {
103     PetscCall(PetscObjectReference((PetscObject)matSeq));
104     matAdj = matSeq;
105   }
106 
107   adj = (Mat_MPIAdj *)matAdj->data; /* finally adj contains adjacency graph */
108 
109   /* arguments for Chaco library */
110   nvtxs         = mat->rmap->N;         /* number of vertices in full graph */
111   start         = adj->i;               /* start of edge list for each vertex */
112   vwgts         = part->vertex_weights; /* weights for all vertices */
113   architecture  = 1;                    /* 0 => hypercube, d => d-dimensional mesh */
114   ndims_tot     = 0;                    /* total number of cube dimensions to divide */
115   mesh_dims[0]  = part->n;              /* dimensions of mesh of processors */
116   global_method = chaco->global_method; /* global partitioning algorithm */
117   local_method  = chaco->local_method;  /* local partitioning algorithm */
118   rqi_flag      = chaco->eigen_method;  /* should I use RQI/Symmlq eigensolver? */
119   vmax          = chaco->nbvtxcoarsed;  /* how many vertices to coarsen down to? */
120   ndims         = chaco->eignum;        /* number of eigenvectors (2^d sets) */
121   eigtol        = chaco->eigtol;        /* tolerance on eigenvectors */
122   seed          = 123636512;            /* for random graph mutations */
123 
124   PetscCall(PetscMalloc1(mat->rmap->N, &assignment));
125   PetscCall(PetscMalloc1(start[nvtxs], &adjacency));
126   for (i = 0; i < start[nvtxs]; i++) adjacency[i] = (adj->j)[i] + 1; /* 1-based indexing */
127 
128     /* redirect output to buffer */
129 #if defined(PETSC_HAVE_UNISTD_H)
130   fd_stdout = dup(1);
131   PetscCheck(!pipe(fd_pipe), PETSC_COMM_SELF, PETSC_ERR_SYS, "Could not open pipe");
132   close(1);
133   dup2(fd_pipe[1], 1);
134   PetscCall(PetscMalloc1(SIZE_LOG, &mesg_log));
135 #endif
136 
137   /* library call */
138   cerr = interface(nvtxs, start, adjacency, vwgts, NULL, NULL, NULL, NULL, NULL, NULL, assignment, architecture, ndims_tot, mesh_dims, NULL, global_method, local_method, rqi_flag, vmax, ndims, eigtol, seed);
139 
140 #if defined(PETSC_HAVE_UNISTD_H)
141   PetscCall(PetscFFlush(stdout));
142   count = read(fd_pipe[0], mesg_log, (SIZE_LOG - 1) * sizeof(char));
143   if (count < 0) count = 0;
144   mesg_log[count] = 0;
145   close(1);
146   dup2(fd_stdout, 1);
147   close(fd_stdout);
148   close(fd_pipe[0]);
149   close(fd_pipe[1]);
150   if (chaco->verbose) PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "%s", mesg_log));
151   PetscCall(PetscFree(mesg_log));
152 #endif
153   PetscCheck(!cerr, PETSC_COMM_SELF, PETSC_ERR_LIB, "Chaco failed");
154 
155   PetscCall(PetscMalloc1(mat->rmap->N, &parttab));
156   for (i = 0; i < nvtxs; i++) parttab[i] = assignment[i];
157 
158   /* creation of the index set */
159   nb_locals = mat->rmap->n;
160   locals    = parttab + mat->rmap->rstart;
161   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), nb_locals, locals, PETSC_COPY_VALUES, partitioning));
162 
163   /* clean up */
164   PetscCall(PetscFree(parttab));
165   PetscCall(PetscFree(adjacency));
166   PetscCall(PetscFree(assignment));
167   PetscCall(MatDestroy(&matSeq));
168   PetscCall(MatDestroy(&matAdj));
169   PetscFunctionReturn(PETSC_SUCCESS);
170 }
171 
172 static PetscErrorCode MatPartitioningView_Chaco(MatPartitioning part, PetscViewer viewer)
173 {
174   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
175   PetscBool              isascii;
176 
177   PetscFunctionBegin;
178   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
179   if (isascii) {
180     PetscCall(PetscViewerASCIIPrintf(viewer, "  Global method: %s\n", MPChacoGlobalTypes[chaco->global_method]));
181     PetscCall(PetscViewerASCIIPrintf(viewer, "  Local method: %s\n", MPChacoLocalTypes[chaco->local_method]));
182     PetscCall(PetscViewerASCIIPrintf(viewer, "  Number of vertices for the coarse graph: %" PetscInt_FMT "\n", chaco->nbvtxcoarsed));
183     PetscCall(PetscViewerASCIIPrintf(viewer, "  Eigensolver: %s\n", MPChacoEigenTypes[chaco->eigen_method]));
184     PetscCall(PetscViewerASCIIPrintf(viewer, "  Tolerance for eigensolver: %g\n", chaco->eigtol));
185     PetscCall(PetscViewerASCIIPrintf(viewer, "  Number of eigenvectors: %" PetscInt_FMT "\n", chaco->eignum));
186   }
187   PetscFunctionReturn(PETSC_SUCCESS);
188 }
189 
190 /*@
191   MatPartitioningChacoSetGlobal - Set the global method for Chaco partitioner.
192 
193   Collective
194 
195   Input Parameters:
196 + part   - the partitioning context
197 - method - one of `MP_CHACO_MULTILEVEL`, `MP_CHACO_SPECTRAL`, `MP_CHACO_LINEAR`,
198             `MP_CHACO_RANDOM` or `MP_CHACO_SCATTERED`
199 
200   Options Database Key:
201 . -mat_partitioning_chaco_global <method> - the global method
202 
203   Level: advanced
204 
205   Note:
206   The default is the multi-level method. See Chaco documentation for
207   additional details.
208 
209 .seealso: `MatPartitioning`, `MatPartioningSetType()`, `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetLocal()`, `MatPartitioningChacoGetGlobal()`
210 @*/
211 PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning part, MPChacoGlobalType method)
212 {
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
215   PetscValidLogicalCollectiveEnum(part, method, 2);
216   PetscTryMethod(part, "MatPartitioningChacoSetGlobal_C", (MatPartitioning, MPChacoGlobalType), (part, method));
217   PetscFunctionReturn(PETSC_SUCCESS);
218 }
219 
220 static PetscErrorCode MatPartitioningChacoSetGlobal_Chaco(MatPartitioning part, MPChacoGlobalType method)
221 {
222   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
223 
224   PetscFunctionBegin;
225   switch (method) {
226   case MP_CHACO_MULTILEVEL:
227   case MP_CHACO_SPECTRAL:
228   case MP_CHACO_LINEAR:
229   case MP_CHACO_RANDOM:
230   case MP_CHACO_SCATTERED:
231     chaco->global_method = method;
232     break;
233   default:
234     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Chaco: Unknown or unsupported option");
235   }
236   PetscFunctionReturn(PETSC_SUCCESS);
237 }
238 
239 /*@
240   MatPartitioningChacoGetGlobal - Get the global method used by the Chaco partitioner.
241 
242   Not Collective
243 
244   Input Parameter:
245 . part - the partitioning context
246 
247   Output Parameter:
248 . method - the method
249 
250   Level: advanced
251 
252 .seealso: `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetGlobal()`
253 @*/
254 PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning part, MPChacoGlobalType *method)
255 {
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
258   PetscAssertPointer(method, 2);
259   PetscTryMethod(part, "MatPartitioningChacoGetGlobal_C", (MatPartitioning, MPChacoGlobalType *), (part, method));
260   PetscFunctionReturn(PETSC_SUCCESS);
261 }
262 
263 static PetscErrorCode MatPartitioningChacoGetGlobal_Chaco(MatPartitioning part, MPChacoGlobalType *method)
264 {
265   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
266 
267   PetscFunctionBegin;
268   *method = chaco->global_method;
269   PetscFunctionReturn(PETSC_SUCCESS);
270 }
271 
272 /*@
273   MatPartitioningChacoSetLocal - Set the local method for the Chaco partitioner.
274 
275   Collective
276 
277   Input Parameters:
278 + part   - the partitioning context
279 - method - one of `MP_CHACO_KERNIGHAN` or `MP_CHACO_NONE`
280 
281   Options Database Key:
282 . -mat_partitioning_chaco_local <method> - the local method
283 
284   Level: advanced
285 
286   Note:
287   The default is to apply the Kernighan-Lin heuristic. See Chaco documentation
288   for additional details.
289 
290 .seealso: `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetGlobal()`, `MatPartitioningChacoGetLocal()`
291 @*/
292 PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning part, MPChacoLocalType method)
293 {
294   PetscFunctionBegin;
295   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
296   PetscValidLogicalCollectiveEnum(part, method, 2);
297   PetscTryMethod(part, "MatPartitioningChacoSetLocal_C", (MatPartitioning, MPChacoLocalType), (part, method));
298   PetscFunctionReturn(PETSC_SUCCESS);
299 }
300 
301 static PetscErrorCode MatPartitioningChacoSetLocal_Chaco(MatPartitioning part, MPChacoLocalType method)
302 {
303   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
304 
305   PetscFunctionBegin;
306   switch (method) {
307   case MP_CHACO_KERNIGHAN:
308   case MP_CHACO_NONE:
309     chaco->local_method = method;
310     break;
311   default:
312     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Chaco: Unknown or unsupported option");
313   }
314   PetscFunctionReturn(PETSC_SUCCESS);
315 }
316 
317 /*@
318   MatPartitioningChacoGetLocal - Get local method used by the Chaco partitioner.
319 
320   Not Collective
321 
322   Input Parameter:
323 . part - the partitioning context
324 
325   Output Parameter:
326 . method - the method
327 
328   Level: advanced
329 
330 .seealso: `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetLocal()`
331 @*/
332 PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning part, MPChacoLocalType *method)
333 {
334   PetscFunctionBegin;
335   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
336   PetscAssertPointer(method, 2);
337   PetscUseMethod(part, "MatPartitioningChacoGetLocal_C", (MatPartitioning, MPChacoLocalType *), (part, method));
338   PetscFunctionReturn(PETSC_SUCCESS);
339 }
340 
341 static PetscErrorCode MatPartitioningChacoGetLocal_Chaco(MatPartitioning part, MPChacoLocalType *method)
342 {
343   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
344 
345   PetscFunctionBegin;
346   *method = chaco->local_method;
347   PetscFunctionReturn(PETSC_SUCCESS);
348 }
349 
350 /*@
351   MatPartitioningChacoSetCoarseLevel - Set the coarse level parameter for the
352   Chaco partitioner.
353 
354   Collective
355 
356   Input Parameters:
357 + part  - the partitioning context
358 - level - the coarse level in range [0.0,1.0]
359 
360   Options Database Key:
361 . -mat_partitioning_chaco_coarse <l> - Coarse level
362 
363   Level: advanced
364 
365 .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`
366 @*/
367 PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning part, PetscReal level)
368 {
369   PetscFunctionBegin;
370   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
371   PetscValidLogicalCollectiveReal(part, level, 2);
372   PetscTryMethod(part, "MatPartitioningChacoSetCoarseLevel_C", (MatPartitioning, PetscReal), (part, level));
373   PetscFunctionReturn(PETSC_SUCCESS);
374 }
375 
376 static PetscErrorCode MatPartitioningChacoSetCoarseLevel_Chaco(MatPartitioning part, PetscReal level)
377 {
378   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
379 
380   PetscFunctionBegin;
381   PetscCheck(level >= 0.0 && level < 1.0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Chaco: level of coarsening out of range [0.0-1.0]");
382   chaco->nbvtxcoarsed = (PetscInt)(part->adj->cmap->N * level);
383   if (chaco->nbvtxcoarsed < 20) chaco->nbvtxcoarsed = 20;
384   PetscFunctionReturn(PETSC_SUCCESS);
385 }
386 
387 /*@
388   MatPartitioningChacoSetEigenSolver - Set the eigensolver method for Chaco partitioner.
389 
390   Collective
391 
392   Input Parameters:
393 + part   - the partitioning context
394 - method - one of `MP_CHACO_LANCZOS` or `MP_CHACO_RQI`
395 
396   Options Database Key:
397 . -mat_partitioning_chaco_eigen_solver <method> - the eigensolver
398 
399   Level: advanced
400 
401   Note:
402   The default is to use a Lanczos method. See Chaco documentation for details.
403 
404 .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenTol()`, `MatPartitioningChacoSetEigenNumber()`,
405           `MatPartitioningChacoGetEigenSolver()`
406 @*/
407 PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning part, MPChacoEigenType method)
408 {
409   PetscFunctionBegin;
410   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
411   PetscValidLogicalCollectiveEnum(part, method, 2);
412   PetscTryMethod(part, "MatPartitioningChacoSetEigenSolver_C", (MatPartitioning, MPChacoEigenType), (part, method));
413   PetscFunctionReturn(PETSC_SUCCESS);
414 }
415 
416 static PetscErrorCode MatPartitioningChacoSetEigenSolver_Chaco(MatPartitioning part, MPChacoEigenType method)
417 {
418   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
419 
420   PetscFunctionBegin;
421   switch (method) {
422   case MP_CHACO_LANCZOS:
423   case MP_CHACO_RQI:
424     chaco->eigen_method = method;
425     break;
426   default:
427     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Chaco: Unknown or unsupported option");
428   }
429   PetscFunctionReturn(PETSC_SUCCESS);
430 }
431 
432 /*@
433   MatPartitioningChacoGetEigenSolver - Get the eigensolver used by the Chaco partitioner.
434 
435   Not Collective
436 
437   Input Parameter:
438 . part - the partitioning context
439 
440   Output Parameter:
441 . method - the method
442 
443   Level: advanced
444 
445 .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenSolver()`
446 @*/
447 PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning part, MPChacoEigenType *method)
448 {
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
451   PetscAssertPointer(method, 2);
452   PetscUseMethod(part, "MatPartitioningChacoGetEigenSolver_C", (MatPartitioning, MPChacoEigenType *), (part, method));
453   PetscFunctionReturn(PETSC_SUCCESS);
454 }
455 
456 static PetscErrorCode MatPartitioningChacoGetEigenSolver_Chaco(MatPartitioning part, MPChacoEigenType *method)
457 {
458   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
459 
460   PetscFunctionBegin;
461   *method = chaco->eigen_method;
462   PetscFunctionReturn(PETSC_SUCCESS);
463 }
464 
465 /*@
466   MatPartitioningChacoSetEigenTol - Sets the tolerance for the eigensolver used by Chaco
467 
468   Collective
469 
470   Input Parameters:
471 + part - the partitioning context
472 - tol  - the tolerance
473 
474   Options Database Key:
475 . -mat_partitioning_chaco_eigen_tol <tol> - Tolerance for eigensolver
476 
477   Note:
478   Must be positive. The default value is 0.001.
479 
480   Level: advanced
481 
482 .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenSolver()`, `MatPartitioningChacoGetEigenTol()`
483 @*/
484 PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning part, PetscReal tol)
485 {
486   PetscFunctionBegin;
487   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
488   PetscValidLogicalCollectiveReal(part, tol, 2);
489   PetscTryMethod(part, "MatPartitioningChacoSetEigenTol_C", (MatPartitioning, PetscReal), (part, tol));
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 
493 static PetscErrorCode MatPartitioningChacoSetEigenTol_Chaco(MatPartitioning part, PetscReal tol)
494 {
495   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
496 
497   PetscFunctionBegin;
498   if (tol == PETSC_DEFAULT) chaco->eigtol = 0.001;
499   else {
500     PetscCheck(tol > 0.0, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Tolerance must be positive");
501     chaco->eigtol = tol;
502   }
503   PetscFunctionReturn(PETSC_SUCCESS);
504 }
505 
506 /*@
507   MatPartitioningChacoGetEigenTol - Gets the eigensolver tolerance used by Chaco
508 
509   Not Collective
510 
511   Input Parameter:
512 . part - the partitioning context
513 
514   Output Parameter:
515 . tol - the tolerance
516 
517   Level: advanced
518 
519 .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenTol()`
520 @*/
521 PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning part, PetscReal *tol)
522 {
523   PetscFunctionBegin;
524   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
525   PetscAssertPointer(tol, 2);
526   PetscUseMethod(part, "MatPartitioningChacoGetEigenTol_C", (MatPartitioning, PetscReal *), (part, tol));
527   PetscFunctionReturn(PETSC_SUCCESS);
528 }
529 
530 static PetscErrorCode MatPartitioningChacoGetEigenTol_Chaco(MatPartitioning part, PetscReal *tol)
531 {
532   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
533 
534   PetscFunctionBegin;
535   *tol = chaco->eigtol;
536   PetscFunctionReturn(PETSC_SUCCESS);
537 }
538 
539 /*@
540   MatPartitioningChacoSetEigenNumber - Sets the number of eigenvectors to compute by Chaco during partitioning
541   during partitioning.
542 
543   Collective
544 
545   Input Parameters:
546 + part - the partitioning context
547 - num  - the number of eigenvectors
548 
549   Options Database Key:
550 . -mat_partitioning_chaco_eigen_number <n> - Number of eigenvectors
551 
552   Note:
553   Accepted values are 1, 2 or 3, indicating partitioning by bisection,
554   quadrisection, or octosection.
555 
556   Level: advanced
557 
558 .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenSolver()`, `MatPartitioningChacoGetEigenTol()`
559 @*/
560 PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning part, PetscInt num)
561 {
562   PetscFunctionBegin;
563   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
564   PetscValidLogicalCollectiveInt(part, num, 2);
565   PetscTryMethod(part, "MatPartitioningChacoSetEigenNumber_C", (MatPartitioning, PetscInt), (part, num));
566   PetscFunctionReturn(PETSC_SUCCESS);
567 }
568 
569 static PetscErrorCode MatPartitioningChacoSetEigenNumber_Chaco(MatPartitioning part, PetscInt num)
570 {
571   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
572 
573   PetscFunctionBegin;
574   if (num == PETSC_DEFAULT) chaco->eignum = 1;
575   else {
576     PetscCheck(num >= 1 && num <= 3, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Can only specify 1, 2 or 3 eigenvectors");
577     chaco->eignum = num;
578   }
579   PetscFunctionReturn(PETSC_SUCCESS);
580 }
581 
582 /*@
583   MatPartitioningChacoGetEigenNumber - Gets the number of eigenvectors used by Chaco.
584 
585   Not Collective
586 
587   Input Parameter:
588 . part - the partitioning context
589 
590   Output Parameter:
591 . num - number of eigenvectors
592 
593   Level: advanced
594 
595 .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenNumber()`
596 @*/
597 PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning part, PetscInt *num)
598 {
599   PetscFunctionBegin;
600   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
601   PetscAssertPointer(num, 2);
602   PetscUseMethod(part, "MatPartitioningChacoGetEigenNumber_C", (MatPartitioning, PetscInt *), (part, num));
603   PetscFunctionReturn(PETSC_SUCCESS);
604 }
605 
606 static PetscErrorCode MatPartitioningChacoGetEigenNumber_Chaco(MatPartitioning part, PetscInt *num)
607 {
608   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
609 
610   PetscFunctionBegin;
611   *num = chaco->eignum;
612   PetscFunctionReturn(PETSC_SUCCESS);
613 }
614 
615 static PetscErrorCode MatPartitioningSetFromOptions_Chaco(MatPartitioning part, PetscOptionItems *PetscOptionsObject)
616 {
617   PetscInt               i;
618   PetscReal              r;
619   PetscBool              flag;
620   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
621   MPChacoGlobalType      global;
622   MPChacoLocalType       local;
623   MPChacoEigenType       eigen;
624 
625   PetscFunctionBegin;
626   PetscOptionsHeadBegin(PetscOptionsObject, "Chaco partitioning options");
627   PetscCall(PetscOptionsEnum("-mat_partitioning_chaco_global", "Global method", "MatPartitioningChacoSetGlobal", MPChacoGlobalTypes, (PetscEnum)chaco->global_method, (PetscEnum *)&global, &flag));
628   if (flag) PetscCall(MatPartitioningChacoSetGlobal(part, global));
629   PetscCall(PetscOptionsEnum("-mat_partitioning_chaco_local", "Local method", "MatPartitioningChacoSetLocal", MPChacoLocalTypes, (PetscEnum)chaco->local_method, (PetscEnum *)&local, &flag));
630   if (flag) PetscCall(MatPartitioningChacoSetLocal(part, local));
631   PetscCall(PetscOptionsReal("-mat_partitioning_chaco_coarse", "Coarse level", "MatPartitioningChacoSetCoarseLevel", 0.0, &r, &flag));
632   if (flag) PetscCall(MatPartitioningChacoSetCoarseLevel(part, r));
633   PetscCall(PetscOptionsEnum("-mat_partitioning_chaco_eigen_solver", "Eigensolver method", "MatPartitioningChacoSetEigenSolver", MPChacoEigenTypes, (PetscEnum)chaco->eigen_method, (PetscEnum *)&eigen, &flag));
634   if (flag) PetscCall(MatPartitioningChacoSetEigenSolver(part, eigen));
635   PetscCall(PetscOptionsReal("-mat_partitioning_chaco_eigen_tol", "Eigensolver tolerance", "MatPartitioningChacoSetEigenTol", chaco->eigtol, &r, &flag));
636   if (flag) PetscCall(MatPartitioningChacoSetEigenTol(part, r));
637   PetscCall(PetscOptionsInt("-mat_partitioning_chaco_eigen_number", "Number of eigenvectors: 1, 2, or 3 (bi-, quadri-, or octosection)", "MatPartitioningChacoSetEigenNumber", chaco->eignum, &i, &flag));
638   if (flag) PetscCall(MatPartitioningChacoSetEigenNumber(part, i));
639   PetscCall(PetscOptionsBool("-mat_partitioning_chaco_verbose", "Show library output", "", chaco->verbose, &chaco->verbose, NULL));
640   PetscOptionsHeadEnd();
641   PetscFunctionReturn(PETSC_SUCCESS);
642 }
643 
644 static PetscErrorCode MatPartitioningDestroy_Chaco(MatPartitioning part)
645 {
646   MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
647 
648   PetscFunctionBegin;
649   PetscCall(PetscFree(chaco));
650   /* clear composed functions */
651   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetGlobal_C", NULL));
652   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetGlobal_C", NULL));
653   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetLocal_C", NULL));
654   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetLocal_C", NULL));
655   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetCoarseLevel_C", NULL));
656   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenSolver_C", NULL));
657   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenSolver_C", NULL));
658   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenTol_C", NULL));
659   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenTol_C", NULL));
660   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenNumber_C", NULL));
661   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenNumber_C", NULL));
662   PetscFunctionReturn(PETSC_SUCCESS);
663 }
664 
665 /*MC
666    MATPARTITIONINGCHACO - Creates a partitioning context that uses the external package Chaco {cite}`chaco95`
667 
668    Level: beginner
669 
670    Note:
671    Does not use the `MatPartitioningSetUseEdgeWeights()` option
672 
673 .seealso: `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningType`
674 M*/
675 
676 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Chaco(MatPartitioning part)
677 {
678   MatPartitioning_Chaco *chaco;
679 
680   PetscFunctionBegin;
681   PetscCall(PetscNew(&chaco));
682   part->data = (void *)chaco;
683 
684   chaco->global_method = MP_CHACO_MULTILEVEL;
685   chaco->local_method  = MP_CHACO_KERNIGHAN;
686   chaco->eigen_method  = MP_CHACO_LANCZOS;
687   chaco->nbvtxcoarsed  = 200;
688   chaco->eignum        = 1;
689   chaco->eigtol        = 0.001;
690   chaco->verbose       = PETSC_FALSE;
691 
692   part->ops->apply          = MatPartitioningApply_Chaco;
693   part->ops->view           = MatPartitioningView_Chaco;
694   part->ops->destroy        = MatPartitioningDestroy_Chaco;
695   part->ops->setfromoptions = MatPartitioningSetFromOptions_Chaco;
696 
697   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetGlobal_C", MatPartitioningChacoSetGlobal_Chaco));
698   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetGlobal_C", MatPartitioningChacoGetGlobal_Chaco));
699   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetLocal_C", MatPartitioningChacoSetLocal_Chaco));
700   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetLocal_C", MatPartitioningChacoGetLocal_Chaco));
701   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetCoarseLevel_C", MatPartitioningChacoSetCoarseLevel_Chaco));
702   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenSolver_C", MatPartitioningChacoSetEigenSolver_Chaco));
703   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenSolver_C", MatPartitioningChacoGetEigenSolver_Chaco));
704   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenTol_C", MatPartitioningChacoSetEigenTol_Chaco));
705   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenTol_C", MatPartitioningChacoGetEigenTol_Chaco));
706   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenNumber_C", MatPartitioningChacoSetEigenNumber_Chaco));
707   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenNumber_C", MatPartitioningChacoGetEigenNumber_Chaco));
708   PetscFunctionReturn(PETSC_SUCCESS);
709 }
710