18be712e4SBarry Smith #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
28be712e4SBarry Smith
38be712e4SBarry Smith /*
48be712e4SBarry Smith Currently using ParMetis-4.0.2
58be712e4SBarry Smith */
68be712e4SBarry Smith
78be712e4SBarry Smith #include <parmetis.h>
88be712e4SBarry Smith
98be712e4SBarry Smith /*
108be712e4SBarry Smith The first 5 elements of this structure are the input control array to Metis
118be712e4SBarry Smith */
128be712e4SBarry Smith typedef struct {
138be712e4SBarry Smith PetscInt cuts; /* number of cuts made (output) */
148be712e4SBarry Smith PetscInt foldfactor;
158be712e4SBarry Smith PetscInt parallel; /* use parallel partitioner for coarse problem */
168be712e4SBarry Smith PetscInt indexing; /* 0 indicates C indexing, 1 Fortran */
178be712e4SBarry Smith PetscInt printout; /* indicates if one wishes Metis to print info */
188be712e4SBarry Smith PetscBool repartition;
198be712e4SBarry Smith } MatPartitioning_Parmetis;
208be712e4SBarry Smith
218be712e4SBarry Smith #define PetscCallPARMETIS(n, func) \
228be712e4SBarry Smith do { \
238be712e4SBarry Smith PetscCheck(n != METIS_ERROR_INPUT, PETSC_COMM_SELF, PETSC_ERR_LIB, "ParMETIS error due to wrong inputs and/or options for %s", func); \
248be712e4SBarry Smith PetscCheck(n != METIS_ERROR_MEMORY, PETSC_COMM_SELF, PETSC_ERR_LIB, "ParMETIS error due to insufficient memory in %s", func); \
258be712e4SBarry Smith PetscCheck(n != METIS_ERROR, PETSC_COMM_SELF, PETSC_ERR_LIB, "ParMETIS general error in %s", func); \
268be712e4SBarry Smith } while (0)
278be712e4SBarry Smith
288be712e4SBarry Smith #define PetscCallParmetis_(name, func, args) \
298be712e4SBarry Smith do { \
308be712e4SBarry Smith PetscStackPushExternal(name); \
318be712e4SBarry Smith int status = func args; \
328be712e4SBarry Smith PetscStackPop; \
338be712e4SBarry Smith PetscCallPARMETIS(status, name); \
348be712e4SBarry Smith } while (0)
358be712e4SBarry Smith
368be712e4SBarry Smith #define PetscCallParmetis(func, args) PetscCallParmetis_(PetscStringize(func), func, args)
378be712e4SBarry Smith
MatPartitioningApply_Parmetis_Private(MatPartitioning part,PetscBool useND,PetscBool isImprove,IS * partitioning)388be712e4SBarry Smith static PetscErrorCode MatPartitioningApply_Parmetis_Private(MatPartitioning part, PetscBool useND, PetscBool isImprove, IS *partitioning)
398be712e4SBarry Smith {
408be712e4SBarry Smith MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
418be712e4SBarry Smith PetscInt *locals = NULL;
428be712e4SBarry Smith Mat mat = part->adj, amat, pmat;
438be712e4SBarry Smith PetscBool flg;
448be712e4SBarry Smith PetscInt bs = 1;
458be712e4SBarry Smith
468be712e4SBarry Smith PetscFunctionBegin;
478be712e4SBarry Smith PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
488be712e4SBarry Smith PetscAssertPointer(partitioning, 4);
498be712e4SBarry Smith PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
508be712e4SBarry Smith if (flg) {
518be712e4SBarry Smith amat = mat;
528be712e4SBarry Smith PetscCall(PetscObjectReference((PetscObject)amat));
538be712e4SBarry Smith } else {
548be712e4SBarry Smith /* bs indicates if the converted matrix is "reduced" from the original and hence the
558be712e4SBarry Smith resulting partition results need to be stretched to match the original matrix */
568be712e4SBarry Smith PetscCall(MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &amat));
578be712e4SBarry Smith if (amat->rmap->n > 0) bs = mat->rmap->n / amat->rmap->n;
588be712e4SBarry Smith }
598be712e4SBarry Smith PetscCall(MatMPIAdjCreateNonemptySubcommMat(amat, &pmat));
608be712e4SBarry Smith PetscCallMPI(MPI_Barrier(PetscObjectComm((PetscObject)part)));
618be712e4SBarry Smith
628be712e4SBarry Smith if (pmat) {
638be712e4SBarry Smith MPI_Comm pcomm, comm;
648be712e4SBarry Smith Mat_MPIAdj *adj = (Mat_MPIAdj *)pmat->data;
658be712e4SBarry Smith PetscInt *vtxdist = pmat->rmap->range;
668be712e4SBarry Smith PetscInt *xadj = adj->i;
678be712e4SBarry Smith PetscInt *adjncy = adj->j;
688be712e4SBarry Smith PetscInt *NDorder = NULL;
698be712e4SBarry Smith PetscInt itmp = 0, wgtflag = 0, numflag = 0, ncon = part->ncon, nparts = part->n, options[24], i, j;
706497c311SBarry Smith real_t *tpwgts, *ubvec, itr = (real_t)0.1;
718be712e4SBarry Smith
728be712e4SBarry Smith PetscCall(PetscObjectGetComm((PetscObject)pmat, &pcomm));
738be712e4SBarry Smith if (PetscDefined(USE_DEBUG)) {
748be712e4SBarry Smith /* check that matrix has no diagonal entries */
758be712e4SBarry Smith PetscInt rstart;
768be712e4SBarry Smith PetscCall(MatGetOwnershipRange(pmat, &rstart, NULL));
778be712e4SBarry Smith for (i = 0; i < pmat->rmap->n; i++) {
788be712e4SBarry Smith for (j = xadj[i]; j < xadj[i + 1]; j++) PetscCheck(adjncy[j] != i + rstart, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Row %" PetscInt_FMT " has diagonal entry; Parmetis forbids diagonal entry", i + rstart);
798be712e4SBarry Smith }
808be712e4SBarry Smith }
818be712e4SBarry Smith
828be712e4SBarry Smith PetscCall(PetscMalloc1(pmat->rmap->n, &locals));
838be712e4SBarry Smith
848be712e4SBarry Smith if (isImprove) {
858be712e4SBarry Smith PetscInt i;
868be712e4SBarry Smith const PetscInt *part_indices;
878be712e4SBarry Smith PetscValidHeaderSpecific(*partitioning, IS_CLASSID, 4);
888be712e4SBarry Smith PetscCall(ISGetIndices(*partitioning, &part_indices));
898be712e4SBarry Smith for (i = 0; i < pmat->rmap->n; i++) locals[i] = part_indices[i * bs];
908be712e4SBarry Smith PetscCall(ISRestoreIndices(*partitioning, &part_indices));
918be712e4SBarry Smith PetscCall(ISDestroy(partitioning));
928be712e4SBarry Smith }
938be712e4SBarry Smith
948be712e4SBarry Smith if (adj->values && part->use_edge_weights && !part->vertex_weights) wgtflag = 1;
958be712e4SBarry Smith if (part->vertex_weights && !adj->values) wgtflag = 2;
968be712e4SBarry Smith if (part->vertex_weights && adj->values && part->use_edge_weights) wgtflag = 3;
978be712e4SBarry Smith
988be712e4SBarry Smith if (PetscLogPrintInfo) {
998be712e4SBarry Smith itmp = pmetis->printout;
1008be712e4SBarry Smith pmetis->printout = 127;
1018be712e4SBarry Smith }
1028be712e4SBarry Smith PetscCall(PetscMalloc1(ncon * nparts, &tpwgts));
1038be712e4SBarry Smith for (i = 0; i < ncon; i++) {
1048be712e4SBarry Smith for (j = 0; j < nparts; j++) {
1058be712e4SBarry Smith if (part->part_weights) {
1066497c311SBarry Smith tpwgts[i * nparts + j] = (real_t)part->part_weights[i * nparts + j];
1078be712e4SBarry Smith } else {
1086497c311SBarry Smith tpwgts[i * nparts + j] = (real_t)1. / nparts;
1098be712e4SBarry Smith }
1108be712e4SBarry Smith }
1118be712e4SBarry Smith }
1128be712e4SBarry Smith PetscCall(PetscMalloc1(ncon, &ubvec));
1136497c311SBarry Smith for (i = 0; i < ncon; i++) ubvec[i] = (real_t)1.05;
1148be712e4SBarry Smith /* This sets the defaults */
1158be712e4SBarry Smith options[0] = 1;
1168be712e4SBarry Smith for (i = 1; i < 24; i++) options[i] = -1;
1178be712e4SBarry Smith options[1] = 0; /* no verbosity */
1188be712e4SBarry Smith options[2] = 0;
1198be712e4SBarry Smith options[3] = PARMETIS_PSR_COUPLED; /* Seed */
1208be712e4SBarry Smith /* Duplicate the communicator to be sure that ParMETIS attribute caching does not interfere with PETSc. */
1218be712e4SBarry Smith PetscCallMPI(MPI_Comm_dup(pcomm, &comm));
1228be712e4SBarry Smith if (useND) {
1238be712e4SBarry Smith PetscInt *sizes, *seps, log2size, subd, *level;
1248be712e4SBarry Smith PetscMPIInt size;
1258be712e4SBarry Smith idx_t mtype = PARMETIS_MTYPE_GLOBAL, rtype = PARMETIS_SRTYPE_2PHASE, p_nseps = 1, s_nseps = 1;
1266497c311SBarry Smith real_t ubfrac = (real_t)1.05;
1278be712e4SBarry Smith
1288be712e4SBarry Smith PetscCallMPI(MPI_Comm_size(comm, &size));
1298be712e4SBarry Smith PetscCall(PetscMalloc1(pmat->rmap->n, &NDorder));
1308be712e4SBarry Smith PetscCall(PetscMalloc3(2 * size, &sizes, 4 * size, &seps, size, &level));
1318be712e4SBarry Smith PetscCallParmetis(ParMETIS_V32_NodeND, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)&numflag, &mtype, &rtype, &p_nseps, &s_nseps, &ubfrac, NULL /* seed */, NULL /* dbglvl */, (idx_t *)NDorder, (idx_t *)(sizes), &comm));
1328be712e4SBarry Smith log2size = PetscLog2Real(size);
1338be712e4SBarry Smith subd = PetscPowInt(2, log2size);
1348be712e4SBarry Smith PetscCall(MatPartitioningSizesToSep_Private(subd, sizes, seps, level));
1358be712e4SBarry Smith for (i = 0; i < pmat->rmap->n; i++) {
1368be712e4SBarry Smith PetscInt loc;
1378be712e4SBarry Smith
1388be712e4SBarry Smith PetscCall(PetscFindInt(NDorder[i], 2 * subd, seps, &loc));
1398be712e4SBarry Smith if (loc < 0) {
1408be712e4SBarry Smith loc = -(loc + 1);
1418be712e4SBarry Smith if (loc % 2) { /* part of subdomain */
1428be712e4SBarry Smith locals[i] = loc / 2;
1438be712e4SBarry Smith } else {
1448be712e4SBarry Smith PetscCall(PetscFindInt(NDorder[i], 2 * (subd - 1), seps + 2 * subd, &loc));
1458be712e4SBarry Smith loc = loc < 0 ? -(loc + 1) / 2 : loc / 2;
1468be712e4SBarry Smith locals[i] = level[loc];
1478be712e4SBarry Smith }
1488be712e4SBarry Smith } else locals[i] = loc / 2;
1498be712e4SBarry Smith }
1508be712e4SBarry Smith PetscCall(PetscFree3(sizes, seps, level));
1518be712e4SBarry Smith } else {
1528be712e4SBarry Smith if (pmetis->repartition) {
1538be712e4SBarry Smith PetscCallParmetis(ParMETIS_V3_AdaptiveRepart, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)part->vertex_weights, (idx_t *)adj->values, (idx_t *)&wgtflag, (idx_t *)&numflag, (idx_t *)&ncon, (idx_t *)&nparts, tpwgts, ubvec, &itr, (idx_t *)options,
1548be712e4SBarry Smith (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
1558be712e4SBarry Smith } else if (isImprove) {
1568be712e4SBarry Smith PetscCallParmetis(ParMETIS_V3_RefineKway, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)adj->values, (idx_t *)&wgtflag, (idx_t *)&numflag, (idx_t *)&ncon, (idx_t *)&nparts, tpwgts, ubvec, (idx_t *)options,
1578be712e4SBarry Smith (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
1588be712e4SBarry Smith } else {
1598be712e4SBarry Smith PetscCallParmetis(ParMETIS_V3_PartKway, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)adj->values, (idx_t *)&wgtflag, (idx_t *)&numflag, (idx_t *)&ncon, (idx_t *)&nparts, tpwgts, ubvec, (idx_t *)options,
1608be712e4SBarry Smith (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
1618be712e4SBarry Smith }
1628be712e4SBarry Smith }
1638be712e4SBarry Smith PetscCallMPI(MPI_Comm_free(&comm));
1648be712e4SBarry Smith
1658be712e4SBarry Smith PetscCall(PetscFree(tpwgts));
1668be712e4SBarry Smith PetscCall(PetscFree(ubvec));
1678be712e4SBarry Smith if (PetscLogPrintInfo) pmetis->printout = itmp;
1688be712e4SBarry Smith
1698be712e4SBarry Smith if (bs > 1) {
1708be712e4SBarry Smith PetscInt i, j, *newlocals;
1718be712e4SBarry Smith PetscCall(PetscMalloc1(bs * pmat->rmap->n, &newlocals));
1728be712e4SBarry Smith for (i = 0; i < pmat->rmap->n; i++) {
1738be712e4SBarry Smith for (j = 0; j < bs; j++) newlocals[bs * i + j] = locals[i];
1748be712e4SBarry Smith }
1758be712e4SBarry Smith PetscCall(PetscFree(locals));
1768be712e4SBarry Smith PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), bs * pmat->rmap->n, newlocals, PETSC_OWN_POINTER, partitioning));
1778be712e4SBarry Smith } else {
1788be712e4SBarry Smith PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), pmat->rmap->n, locals, PETSC_OWN_POINTER, partitioning));
1798be712e4SBarry Smith }
1808be712e4SBarry Smith if (useND) {
1818be712e4SBarry Smith IS ndis;
1828be712e4SBarry Smith
1838be712e4SBarry Smith if (bs > 1) {
1848be712e4SBarry Smith PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)part), bs, pmat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
1858be712e4SBarry Smith } else {
1868be712e4SBarry Smith PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), pmat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
1878be712e4SBarry Smith }
1888be712e4SBarry Smith PetscCall(ISSetPermutation(ndis));
189f4f49eeaSPierre Jolivet PetscCall(PetscObjectCompose((PetscObject)*partitioning, "_petsc_matpartitioning_ndorder", (PetscObject)ndis));
1908be712e4SBarry Smith PetscCall(ISDestroy(&ndis));
1918be712e4SBarry Smith }
1928be712e4SBarry Smith } else {
1938be712e4SBarry Smith PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), 0, NULL, PETSC_COPY_VALUES, partitioning));
1948be712e4SBarry Smith if (useND) {
1958be712e4SBarry Smith IS ndis;
1968be712e4SBarry Smith
1978be712e4SBarry Smith if (bs > 1) {
1988be712e4SBarry Smith PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)part), bs, 0, NULL, PETSC_COPY_VALUES, &ndis));
1998be712e4SBarry Smith } else {
2008be712e4SBarry Smith PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), 0, NULL, PETSC_COPY_VALUES, &ndis));
2018be712e4SBarry Smith }
2028be712e4SBarry Smith PetscCall(ISSetPermutation(ndis));
203f4f49eeaSPierre Jolivet PetscCall(PetscObjectCompose((PetscObject)*partitioning, "_petsc_matpartitioning_ndorder", (PetscObject)ndis));
2048be712e4SBarry Smith PetscCall(ISDestroy(&ndis));
2058be712e4SBarry Smith }
2068be712e4SBarry Smith }
2078be712e4SBarry Smith PetscCall(MatDestroy(&pmat));
2088be712e4SBarry Smith PetscCall(MatDestroy(&amat));
2098be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
2108be712e4SBarry Smith }
2118be712e4SBarry Smith
2128be712e4SBarry Smith /*
2138be712e4SBarry Smith Uses the ParMETIS parallel matrix partitioner to compute a nested dissection ordering of the matrix in parallel
2148be712e4SBarry Smith */
MatPartitioningApplyND_Parmetis(MatPartitioning part,IS * partitioning)2158be712e4SBarry Smith static PetscErrorCode MatPartitioningApplyND_Parmetis(MatPartitioning part, IS *partitioning)
2168be712e4SBarry Smith {
2178be712e4SBarry Smith PetscFunctionBegin;
2188be712e4SBarry Smith PetscCall(MatPartitioningApply_Parmetis_Private(part, PETSC_TRUE, PETSC_FALSE, partitioning));
2198be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
2208be712e4SBarry Smith }
2218be712e4SBarry Smith
2228be712e4SBarry Smith /*
2238be712e4SBarry Smith Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
2248be712e4SBarry Smith */
MatPartitioningApply_Parmetis(MatPartitioning part,IS * partitioning)2258be712e4SBarry Smith static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part, IS *partitioning)
2268be712e4SBarry Smith {
2278be712e4SBarry Smith PetscFunctionBegin;
2288be712e4SBarry Smith PetscCall(MatPartitioningApply_Parmetis_Private(part, PETSC_FALSE, PETSC_FALSE, partitioning));
2298be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
2308be712e4SBarry Smith }
2318be712e4SBarry Smith
2328be712e4SBarry Smith /*
2338be712e4SBarry Smith Uses the ParMETIS to improve the quality of a partition
2348be712e4SBarry Smith */
MatPartitioningImprove_Parmetis(MatPartitioning part,IS * partitioning)2358be712e4SBarry Smith static PetscErrorCode MatPartitioningImprove_Parmetis(MatPartitioning part, IS *partitioning)
2368be712e4SBarry Smith {
2378be712e4SBarry Smith PetscFunctionBegin;
2388be712e4SBarry Smith PetscCall(MatPartitioningApply_Parmetis_Private(part, PETSC_FALSE, PETSC_TRUE, partitioning));
2398be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
2408be712e4SBarry Smith }
2418be712e4SBarry Smith
MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)2428be712e4SBarry Smith static PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part, PetscViewer viewer)
2438be712e4SBarry Smith {
2448be712e4SBarry Smith MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
2458be712e4SBarry Smith PetscMPIInt rank;
246*9f196a02SMartin Diehl PetscBool isascii;
2478be712e4SBarry Smith
2488be712e4SBarry Smith PetscFunctionBegin;
2498be712e4SBarry Smith PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
250*9f196a02SMartin Diehl PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
251*9f196a02SMartin Diehl if (isascii) {
2528be712e4SBarry Smith if (pmetis->parallel == 2) {
2538be712e4SBarry Smith PetscCall(PetscViewerASCIIPrintf(viewer, " Using parallel coarse grid partitioner\n"));
2548be712e4SBarry Smith } else {
2558be712e4SBarry Smith PetscCall(PetscViewerASCIIPrintf(viewer, " Using sequential coarse grid partitioner\n"));
2568be712e4SBarry Smith }
2578be712e4SBarry Smith PetscCall(PetscViewerASCIIPrintf(viewer, " Using %" PetscInt_FMT " fold factor\n", pmetis->foldfactor));
2588be712e4SBarry Smith PetscCall(PetscViewerASCIIPushSynchronized(viewer));
2598be712e4SBarry Smith PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d]Number of cuts found %" PetscInt_FMT "\n", rank, pmetis->cuts));
2608be712e4SBarry Smith PetscCall(PetscViewerFlush(viewer));
2618be712e4SBarry Smith PetscCall(PetscViewerASCIIPopSynchronized(viewer));
2628be712e4SBarry Smith }
2638be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
2648be712e4SBarry Smith }
2658be712e4SBarry Smith
2668be712e4SBarry Smith /*@
2678be712e4SBarry Smith MatPartitioningParmetisSetCoarseSequential - Use the sequential code to
2688be712e4SBarry Smith do the partitioning of the coarse grid.
2698be712e4SBarry Smith
2708be712e4SBarry Smith Logically Collective
2718be712e4SBarry Smith
2728be712e4SBarry Smith Input Parameter:
2738be712e4SBarry Smith . part - the partitioning context
2748be712e4SBarry Smith
2758be712e4SBarry Smith Level: advanced
2768be712e4SBarry Smith
2778be712e4SBarry Smith .seealso: `MATPARTITIONINGPARMETIS`
2788be712e4SBarry Smith @*/
MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)2798be712e4SBarry Smith PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
2808be712e4SBarry Smith {
2818be712e4SBarry Smith MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
2828be712e4SBarry Smith
2838be712e4SBarry Smith PetscFunctionBegin;
2848be712e4SBarry Smith pmetis->parallel = 1;
2858be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
2868be712e4SBarry Smith }
2878be712e4SBarry Smith
2888be712e4SBarry Smith /*@
2898be712e4SBarry Smith MatPartitioningParmetisSetRepartition - Repartition
2908be712e4SBarry Smith current mesh to rebalance computation.
2918be712e4SBarry Smith
2928be712e4SBarry Smith Logically Collective
2938be712e4SBarry Smith
2948be712e4SBarry Smith Input Parameter:
2958be712e4SBarry Smith . part - the partitioning context
2968be712e4SBarry Smith
2978be712e4SBarry Smith Level: advanced
2988be712e4SBarry Smith
2998be712e4SBarry Smith .seealso: `MATPARTITIONINGPARMETIS`
3008be712e4SBarry Smith @*/
MatPartitioningParmetisSetRepartition(MatPartitioning part)3018be712e4SBarry Smith PetscErrorCode MatPartitioningParmetisSetRepartition(MatPartitioning part)
3028be712e4SBarry Smith {
3038be712e4SBarry Smith MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
3048be712e4SBarry Smith
3058be712e4SBarry Smith PetscFunctionBegin;
3068be712e4SBarry Smith pmetis->repartition = PETSC_TRUE;
3078be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
3088be712e4SBarry Smith }
3098be712e4SBarry Smith
3108be712e4SBarry Smith /*@
3118be712e4SBarry Smith MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.
3128be712e4SBarry Smith
3138be712e4SBarry Smith Input Parameter:
3148be712e4SBarry Smith . part - the partitioning context
3158be712e4SBarry Smith
3168be712e4SBarry Smith Output Parameter:
3178be712e4SBarry Smith . cut - the edge cut
3188be712e4SBarry Smith
3198be712e4SBarry Smith Level: advanced
3208be712e4SBarry Smith
3218be712e4SBarry Smith .seealso: `MATPARTITIONINGPARMETIS`
3228be712e4SBarry Smith @*/
MatPartitioningParmetisGetEdgeCut(MatPartitioning part,PetscInt * cut)3238be712e4SBarry Smith PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
3248be712e4SBarry Smith {
3258be712e4SBarry Smith MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
3268be712e4SBarry Smith
3278be712e4SBarry Smith PetscFunctionBegin;
3288be712e4SBarry Smith *cut = pmetis->cuts;
3298be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
3308be712e4SBarry Smith }
3318be712e4SBarry Smith
MatPartitioningSetFromOptions_Parmetis(MatPartitioning part,PetscOptionItems PetscOptionsObject)332ce78bad3SBarry Smith static PetscErrorCode MatPartitioningSetFromOptions_Parmetis(MatPartitioning part, PetscOptionItems PetscOptionsObject)
3338be712e4SBarry Smith {
3348be712e4SBarry Smith PetscBool flag = PETSC_FALSE;
3358be712e4SBarry Smith
3368be712e4SBarry Smith PetscFunctionBegin;
3378be712e4SBarry Smith PetscOptionsHeadBegin(PetscOptionsObject, "Set ParMeTiS partitioning options");
3388be712e4SBarry Smith PetscCall(PetscOptionsBool("-mat_partitioning_parmetis_coarse_sequential", "Use sequential coarse partitioner", "MatPartitioningParmetisSetCoarseSequential", flag, &flag, NULL));
3398be712e4SBarry Smith if (flag) PetscCall(MatPartitioningParmetisSetCoarseSequential(part));
3408be712e4SBarry Smith PetscCall(PetscOptionsBool("-mat_partitioning_parmetis_repartition", "", "MatPartitioningParmetisSetRepartition", flag, &flag, NULL));
3418be712e4SBarry Smith if (flag) PetscCall(MatPartitioningParmetisSetRepartition(part));
3428be712e4SBarry Smith PetscOptionsHeadEnd();
3438be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
3448be712e4SBarry Smith }
3458be712e4SBarry Smith
MatPartitioningDestroy_Parmetis(MatPartitioning part)3468be712e4SBarry Smith static PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
3478be712e4SBarry Smith {
3488be712e4SBarry Smith MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
3498be712e4SBarry Smith
3508be712e4SBarry Smith PetscFunctionBegin;
3518be712e4SBarry Smith PetscCall(PetscFree(pmetis));
3528be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
3538be712e4SBarry Smith }
3548be712e4SBarry Smith
3558be712e4SBarry Smith /*MC
3568be712e4SBarry Smith MATPARTITIONINGPARMETIS - Creates a partitioning context via the external package PARMETIS.
3578be712e4SBarry Smith
3588be712e4SBarry Smith Collective
3598be712e4SBarry Smith
3608be712e4SBarry Smith Input Parameter:
3618be712e4SBarry Smith . part - the partitioning context
3628be712e4SBarry Smith
3638be712e4SBarry Smith Options Database Key:
3648be712e4SBarry Smith . -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner
3658be712e4SBarry Smith
3668be712e4SBarry Smith Level: beginner
3678be712e4SBarry Smith
3688be712e4SBarry Smith Note:
3698be712e4SBarry Smith See https://www-users.cs.umn.edu/~karypis/metis/
3708be712e4SBarry Smith
3718be712e4SBarry Smith .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MatPartitioningParmetisSetCoarseSequential()`, `MatPartitioningParmetisSetRepartition()`,
3728be712e4SBarry Smith `MatPartitioningParmetisGetEdgeCut()`
3738be712e4SBarry Smith M*/
3748be712e4SBarry Smith
MatPartitioningCreate_Parmetis(MatPartitioning part)3758be712e4SBarry Smith PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Parmetis(MatPartitioning part)
3768be712e4SBarry Smith {
3778be712e4SBarry Smith MatPartitioning_Parmetis *pmetis;
3788be712e4SBarry Smith
3798be712e4SBarry Smith PetscFunctionBegin;
3808be712e4SBarry Smith PetscCall(PetscNew(&pmetis));
3818be712e4SBarry Smith part->data = (void *)pmetis;
3828be712e4SBarry Smith
3838be712e4SBarry Smith pmetis->cuts = 0; /* output variable */
3848be712e4SBarry Smith pmetis->foldfactor = 150; /*folding factor */
3858be712e4SBarry Smith pmetis->parallel = 2; /* use parallel partitioner for coarse grid */
3868be712e4SBarry Smith pmetis->indexing = 0; /* index numbering starts from 0 */
3878be712e4SBarry Smith pmetis->printout = 0; /* print no output while running */
3888be712e4SBarry Smith pmetis->repartition = PETSC_FALSE;
3898be712e4SBarry Smith
3908be712e4SBarry Smith part->ops->apply = MatPartitioningApply_Parmetis;
3918be712e4SBarry Smith part->ops->applynd = MatPartitioningApplyND_Parmetis;
3928be712e4SBarry Smith part->ops->improve = MatPartitioningImprove_Parmetis;
3938be712e4SBarry Smith part->ops->view = MatPartitioningView_Parmetis;
3948be712e4SBarry Smith part->ops->destroy = MatPartitioningDestroy_Parmetis;
3958be712e4SBarry Smith part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
3968be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
3978be712e4SBarry Smith }
3988be712e4SBarry Smith
3998be712e4SBarry Smith /*@
4008be712e4SBarry Smith MatMeshToCellGraph - Convert a mesh to a cell graph.
4018be712e4SBarry Smith
4028be712e4SBarry Smith Collective
4038be712e4SBarry Smith
4048be712e4SBarry Smith Input Parameters:
4058be712e4SBarry Smith + mesh - the graph that represents the coupling of the vertices of the mesh
4068be712e4SBarry Smith - ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangles and
4078be712e4SBarry Smith quadrilaterials, 3 for tetrahedrals and 4 for hexahedrals
4088be712e4SBarry Smith
4098be712e4SBarry Smith Output Parameter:
4108be712e4SBarry Smith . dual - the dual graph
4118be712e4SBarry Smith
4128be712e4SBarry Smith Level: advanced
4138be712e4SBarry Smith
4148be712e4SBarry Smith Notes:
4158be712e4SBarry Smith Uses the ParMETIS package to convert a `Mat` that represents coupling of vertices of a mesh
4168be712e4SBarry Smith to a `Mat` the represents the graph of the coupling between cells (the "dual" graph) and is
4178be712e4SBarry Smith suitable for partitioning with the `MatPartitioning` object. Use this to partition cells of a
4188be712e4SBarry Smith mesh.
4198be712e4SBarry Smith
4208be712e4SBarry Smith Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()
4218be712e4SBarry Smith
4228be712e4SBarry Smith Each row of the mesh object represents a single cell in the mesh. For triangles it has 3 entries, quadrilaterials 4 entries,
4238be712e4SBarry Smith tetrahedrals 4 entries and hexahedrals 8 entries. You can mix triangles and quadrilaterals in the same mesh, but cannot
4248be712e4SBarry Smith mix tetrahedrals and hexahedrals
4258be712e4SBarry Smith The columns of each row of the `Mat` mesh are the global vertex numbers of the vertices of that row's cell.
4268be712e4SBarry Smith The number of rows in mesh is number of cells, the number of columns is the number of vertices.
4278be712e4SBarry Smith
4288be712e4SBarry Smith .seealso: `MatCreateMPIAdj()`, `MatPartitioningCreate()`
4298be712e4SBarry Smith @*/
MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat * dual)4308be712e4SBarry Smith PetscErrorCode MatMeshToCellGraph(Mat mesh, PetscInt ncommonnodes, Mat *dual)
4318be712e4SBarry Smith {
4328be712e4SBarry Smith PetscInt *newxadj, *newadjncy;
4338be712e4SBarry Smith PetscInt numflag = 0;
4348be712e4SBarry Smith Mat_MPIAdj *adj = (Mat_MPIAdj *)mesh->data, *newadj;
4358be712e4SBarry Smith PetscBool flg;
4368be712e4SBarry Smith MPI_Comm comm;
4378be712e4SBarry Smith
4388be712e4SBarry Smith PetscFunctionBegin;
4398be712e4SBarry Smith PetscCall(PetscObjectTypeCompare((PetscObject)mesh, MATMPIADJ, &flg));
4408be712e4SBarry Smith PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Must use MPIAdj matrix type");
4418be712e4SBarry Smith
4428be712e4SBarry Smith PetscCall(PetscObjectGetComm((PetscObject)mesh, &comm));
4438be712e4SBarry Smith PetscCallParmetis(ParMETIS_V3_Mesh2Dual, ((idx_t *)mesh->rmap->range, (idx_t *)adj->i, (idx_t *)adj->j, (idx_t *)&numflag, (idx_t *)&ncommonnodes, (idx_t **)&newxadj, (idx_t **)&newadjncy, &comm));
4448be712e4SBarry Smith PetscCall(MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh), mesh->rmap->n, mesh->rmap->N, newxadj, newadjncy, NULL, dual));
4458be712e4SBarry Smith newadj = (Mat_MPIAdj *)(*dual)->data;
4468be712e4SBarry Smith
4478be712e4SBarry Smith newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
4488be712e4SBarry Smith PetscFunctionReturn(PETSC_SUCCESS);
4498be712e4SBarry Smith }
450