xref: /petsc/src/mat/graphops/partition/partition.c (revision 98d129c30f3ee9fdddc40fdbc5a989b7be64f888)
1 #include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
2 
3 /* Logging support */
4 PetscClassId MAT_PARTITIONING_CLASSID;
5 
6 /*
7    Simplest partitioning, keeps the current partitioning.
8 */
9 static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
10 {
11   PetscInt    m;
12   PetscMPIInt rank, size;
13 
14   PetscFunctionBegin;
15   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
16   if (part->n != size) {
17     const char *prefix;
18     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
19     SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
20   }
21   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
22 
23   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
24   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
25   PetscFunctionReturn(PETSC_SUCCESS);
26 }
27 
28 /*
29    partition an index to rebalance the computation
30 */
31 static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
32 {
33   PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;
34 
35   PetscFunctionBegin;
36   PetscCall(MatGetSize(part->adj, &M, NULL));
37   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
38   nparts = part->n;
39   PetscCall(PetscMalloc1(nparts, &parts));
40   d = M / nparts;
41   for (i = 0; i < nparts; i++) parts[i] = d;
42   r = M % nparts;
43   for (i = 0; i < r; i++) parts[i] += 1;
44   for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
45   PetscCall(PetscMalloc1(m, &indices));
46   PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
47   for (i = start; i < end; i++) {
48     PetscCall(PetscFindInt(i, nparts, parts, &loc));
49     if (loc < 0) loc = -(loc + 1);
50     else loc = loc + 1;
51     indices[i - start] = loc;
52   }
53   PetscCall(PetscFree(parts));
54   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
59 {
60   PetscInt    cell, n, N, p, rstart, rend, *color;
61   PetscMPIInt size;
62 
63   PetscFunctionBegin;
64   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
65   PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
66   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
67   PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");
68 
69   PetscCall(MatGetSize(part->adj, &N, NULL));
70   n = (PetscInt)PetscSqrtReal((PetscReal)N);
71   PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
72   PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
73   PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
74   PetscCall(PetscMalloc1(rend - rstart, &color));
75   /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
76   for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
77   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
78   PetscFunctionReturn(PETSC_SUCCESS);
79 }
80 
81 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
82 {
83   PetscFunctionBegin;
84   part->ops->apply   = MatPartitioningApply_Current;
85   part->ops->view    = NULL;
86   part->ops->destroy = NULL;
87   PetscFunctionReturn(PETSC_SUCCESS);
88 }
89 
90 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
91 {
92   PetscFunctionBegin;
93   part->ops->apply   = MatPartitioningApply_Average;
94   part->ops->view    = NULL;
95   part->ops->destroy = NULL;
96   PetscFunctionReturn(PETSC_SUCCESS);
97 }
98 
99 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
100 {
101   PetscFunctionBegin;
102   part->ops->apply   = MatPartitioningApply_Square;
103   part->ops->view    = NULL;
104   part->ops->destroy = NULL;
105   PetscFunctionReturn(PETSC_SUCCESS);
106 }
107 
108 /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
109        seps[  0 :         2*p) : the start and end node of each subdomain
110        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
111      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
112    The arrays must be large enough
113 */
114 PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
115 {
116   PetscInt l2p, i, pTree, pStartTree;
117 
118   PetscFunctionBegin;
119   l2p = PetscLog2Real(p);
120   PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
121   if (!p) PetscFunctionReturn(PETSC_SUCCESS);
122   PetscCall(PetscArrayzero(seps, 2 * p - 2));
123   PetscCall(PetscArrayzero(level, p - 1));
124   seps[2 * p - 2] = sizes[2 * p - 2];
125   pTree           = p;
126   pStartTree      = 0;
127   while (pTree != 1) {
128     for (i = pStartTree; i < pStartTree + pTree; i++) {
129       seps[i] += sizes[i];
130       seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
131     }
132     pStartTree += pTree;
133     pTree = pTree / 2;
134   }
135   seps[2 * p - 2] -= sizes[2 * p - 2];
136 
137   pStartTree = 2 * p - 2;
138   pTree      = 1;
139   while (pStartTree > 0) {
140     for (i = pStartTree; i < pStartTree + pTree; i++) {
141       PetscInt k = 2 * i - (pStartTree + 2 * pTree);
142       PetscInt n = seps[k + 1];
143 
144       seps[k + 1]  = seps[i] - sizes[k + 1];
145       seps[k]      = seps[k + 1] + sizes[k + 1] - n - sizes[k];
146       level[i - p] = -pTree - i + pStartTree;
147     }
148     pTree *= 2;
149     pStartTree -= pTree;
150   }
151   /* I know there should be a formula */
152   PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
153   for (i = 2 * p - 2; i >= 0; i--) {
154     seps[2 * i]     = seps[i];
155     seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
156   }
157   PetscFunctionReturn(PETSC_SUCCESS);
158 }
159 
160 PetscFunctionList MatPartitioningList              = NULL;
161 PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;
162 
163 /*@C
164   MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.
165 
166   Not Collective
167 
168   Input Parameters:
169 + sname    - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
170 - function - function pointer that creates the partitioning type
171 
172   Level: developer
173 
174   Example Usage:
175 .vb
176    MatPartitioningRegister("my_part", MyPartCreate);
177 .ve
178 
179   Then, your partitioner can be chosen with the procedural interface via `MatPartitioningSetType(part, "my_part")` or at runtime via the option
180   `-mat_partitioning_type my_part`
181 
182 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
183 @*/
184 PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
185 {
186   PetscFunctionBegin;
187   PetscCall(MatInitializePackage());
188   PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
189   PetscFunctionReturn(PETSC_SUCCESS);
190 }
191 
192 /*@C
193   MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
194   from the partitioning context.
195 
196   Not Collective
197 
198   Input Parameter:
199 . partitioning - the partitioning context
200 
201   Output Parameter:
202 . type - partitioner type
203 
204   Level: intermediate
205 
206 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
207 @*/
208 PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
209 {
210   PetscFunctionBegin;
211   PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
212   PetscAssertPointer(type, 2);
213   *type = ((PetscObject)partitioning)->type_name;
214   PetscFunctionReturn(PETSC_SUCCESS);
215 }
216 
217 /*@C
218   MatPartitioningSetNParts - Set how many partitions need to be created;
219   by default this is one per processor. Certain partitioning schemes may
220   in fact only support that option.
221 
222   Collective
223 
224   Input Parameters:
225 + part - the partitioning context
226 - n    - the number of partitions
227 
228   Level: intermediate
229 
230 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
231 @*/
232 PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
233 {
234   PetscFunctionBegin;
235   part->n = n;
236   PetscFunctionReturn(PETSC_SUCCESS);
237 }
238 
239 /*@
240   MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
241 
242   Collective
243 
244   Input Parameter:
245 . matp - the matrix partitioning object
246 
247   Output Parameter:
248 . partitioning - the partitioning. For each local node, a positive value indicates the processor
249                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).
250 
251   Level: intermediate
252 
253   Note:
254   The user can define additional partitionings; see `MatPartitioningRegister()`.
255 
256 .seealso: [](ch_matrices), `Mat`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
257           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
258           `ISPartitioningCount()`
259 @*/
260 PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
261 {
262   PetscFunctionBegin;
263   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
264   PetscAssertPointer(partitioning, 2);
265   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
266   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
267   PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
268   PetscUseTypeMethod(matp, applynd, partitioning);
269   PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));
270 
271   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
272   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 /*@
277   MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.
278 
279   Collective
280 
281   Input Parameter:
282 . matp - the matrix partitioning object
283 
284   Output Parameter:
285 . partitioning - the partitioning. For each local node this tells the processor
286                    number that that node is assigned to.
287 
288   Options Database Keys:
289 + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
290 - -mat_partitioning_view        - display information about the partitioning object
291 
292   Level: beginner
293 
294    The user can define additional partitionings; see `MatPartitioningRegister()`.
295 
296 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
297           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
298           `ISPartitioningCount()`
299 @*/
300 PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
301 {
302   PetscBool viewbalance, improve;
303 
304   PetscFunctionBegin;
305   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
306   PetscAssertPointer(partitioning, 2);
307   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
308   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
309   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
310   PetscUseTypeMethod(matp, apply, partitioning);
311   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
312 
313   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
314   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
315 
316   PetscObjectOptionsBegin((PetscObject)matp);
317   viewbalance = PETSC_FALSE;
318   PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
319   improve = PETSC_FALSE;
320   PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
321   PetscOptionsEnd();
322 
323   if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));
324 
325   if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
326   PetscFunctionReturn(PETSC_SUCCESS);
327 }
328 
329 /*@
330   MatPartitioningImprove - Improves the quality of a given partition.
331 
332   Collective
333 
334   Input Parameters:
335 + matp         - the matrix partitioning object
336 - partitioning - the original partitioning. For each local node this tells the processor
337                    number that that node is assigned to.
338 
339   Options Database Key:
340 . -mat_partitioning_improve - improve the quality of the given partition
341 
342   Level: beginner
343 
344 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
345           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
346           `ISPartitioningCount()`
347 @*/
348 PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
349 {
350   PetscFunctionBegin;
351   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
352   PetscAssertPointer(partitioning, 2);
353   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
354   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
355   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
356   PetscTryTypeMethod(matp, improve, partitioning);
357   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
358   PetscFunctionReturn(PETSC_SUCCESS);
359 }
360 
361 /*@
362   MatPartitioningViewImbalance - Display partitioning imbalance information.
363 
364   Collective
365 
366   Input Parameters:
367 + matp         - the matrix partitioning object
368 - partitioning - the partitioning. For each local node this tells the processor
369                    number that that node is assigned to.
370 
371   Options Database Key:
372 . -mat_partitioning_view_balance - view the balance information from the last partitioning
373 
374   Level: beginner
375 
376 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
377 @*/
378 PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
379 {
380   PetscInt        nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
381   const PetscInt *indices;
382   PetscViewer     viewer;
383 
384   PetscFunctionBegin;
385   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
386   PetscValidHeaderSpecific(partitioning, IS_CLASSID, 2);
387   nparts = matp->n;
388   PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
389   PetscCall(ISGetLocalSize(partitioning, &nlocal));
390   PetscCall(ISGetIndices(partitioning, &indices));
391   for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
392   PetscCall(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
393   PetscCall(ISRestoreIndices(partitioning, &indices));
394   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
395   for (i = 0; i < nparts; i++) {
396     minsub = PetscMin(minsub, subdomainsizes[i]);
397     maxsub = PetscMax(maxsub, subdomainsizes[i]);
398     avgsub += subdomainsizes[i];
399   }
400   avgsub /= nparts;
401   PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
402   PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
403   PetscCall(MatPartitioningView(matp, viewer));
404   PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
405   PetscFunctionReturn(PETSC_SUCCESS);
406 }
407 
408 /*@
409   MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
410   partitioned.
411 
412   Collective
413 
414   Input Parameters:
415 + part - the partitioning context
416 - adj  - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`
417 
418   Level: beginner
419 
420 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
421 @*/
422 PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
423 {
424   PetscFunctionBegin;
425   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
426   PetscValidHeaderSpecific(adj, MAT_CLASSID, 2);
427   part->adj = adj;
428   PetscFunctionReturn(PETSC_SUCCESS);
429 }
430 
431 /*@
432   MatPartitioningDestroy - Destroys the partitioning context.
433 
434   Collective
435 
436   Input Parameter:
437 . part - the partitioning context
438 
439   Level: beginner
440 
441 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
442 @*/
443 PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
444 {
445   PetscFunctionBegin;
446   if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
447   PetscValidHeaderSpecific(*part, MAT_PARTITIONING_CLASSID, 1);
448   if (--((PetscObject)*part)->refct > 0) {
449     *part = NULL;
450     PetscFunctionReturn(PETSC_SUCCESS);
451   }
452 
453   PetscTryTypeMethod(*part, destroy);
454   PetscCall(PetscFree((*part)->vertex_weights));
455   PetscCall(PetscFree((*part)->part_weights));
456   PetscCall(PetscHeaderDestroy(part));
457   PetscFunctionReturn(PETSC_SUCCESS);
458 }
459 
460 /*@C
461   MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
462 
463   Logically Collective
464 
465   Input Parameters:
466 + part    - the partitioning context
467 - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
468             1 if that is not provided
469 
470   Level: beginner
471 
472   Notes:
473   The array weights is freed by PETSc so the user should not free the array. In C/C++
474   the array must be obtained with a call to `PetscMalloc()`, not malloc().
475 
476   The weights may not be used by some partitioners
477 
478 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
479 @*/
480 PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
481 {
482   PetscFunctionBegin;
483   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
484   PetscCall(PetscFree(part->vertex_weights));
485   part->vertex_weights = (PetscInt *)weights;
486   PetscFunctionReturn(PETSC_SUCCESS);
487 }
488 
489 /*@C
490   MatPartitioningSetPartitionWeights - Sets the weights for each partition.
491 
492   Logically Collective
493 
494   Input Parameters:
495 + part    - the partitioning context
496 - weights - An array of size nparts that is used to specify the fraction of
497              vertex weight that should be distributed to each sub-domain for
498              the balance constraint. If all of the sub-domains are to be of
499              the same size, then each of the nparts elements should be set
500              to a value of 1/nparts. Note that the sum of all of the weights
501              should be one.
502 
503   Level: beginner
504 
505   Note:
506   The array weights is freed by PETSc so the user should not free the array. In C/C++
507   the array must be obtained with a call to `PetscMalloc()`, not malloc().
508 
509 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`
510 @*/
511 PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
512 {
513   PetscFunctionBegin;
514   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
515   PetscCall(PetscFree(part->part_weights));
516   part->part_weights = (PetscReal *)weights;
517   PetscFunctionReturn(PETSC_SUCCESS);
518 }
519 
520 /*@
521   MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
522 
523   Logically Collective
524 
525   Input Parameters:
526 + part             - the partitioning context
527 - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
528                      that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
529                      edge weights are stored in an ADJ matrix.
530 
531   Options Database Key:
532 . -mat_partitioning_use_edge_weights - (true or false)
533 
534   Level: beginner
535 
536 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
537 @*/
538 PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
539 {
540   PetscFunctionBegin;
541   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
542   part->use_edge_weights = use_edge_weights;
543   PetscFunctionReturn(PETSC_SUCCESS);
544 }
545 
546 /*@
547   MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
548 
549   Logically Collective
550 
551   Input Parameter:
552 . part - the partitioning context
553 
554   Output Parameter:
555 . use_edge_weights - the flag indicateing whether or not to edge weights are used.
556 
557   Level: beginner
558 
559 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
560           `MatPartitioningSetUseEdgeWeights`
561 @*/
562 PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
563 {
564   PetscFunctionBegin;
565   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
566   PetscAssertPointer(use_edge_weights, 2);
567   *use_edge_weights = part->use_edge_weights;
568   PetscFunctionReturn(PETSC_SUCCESS);
569 }
570 
571 /*@
572   MatPartitioningCreate - Creates a partitioning context.
573 
574   Collective
575 
576   Input Parameter:
577 . comm - MPI communicator
578 
579   Output Parameter:
580 . newp - location to put the context
581 
582   Level: beginner
583 
584 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
585           `MatPartitioningSetAdjacency()`
586 @*/
587 PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
588 {
589   MatPartitioning part;
590   PetscMPIInt     size;
591 
592   PetscFunctionBegin;
593   *newp = NULL;
594 
595   PetscCall(MatInitializePackage());
596   PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatGraphOperations", comm, MatPartitioningDestroy, MatPartitioningView));
597   part->vertex_weights   = NULL;
598   part->part_weights     = NULL;
599   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
600 
601   PetscCallMPI(MPI_Comm_size(comm, &size));
602   part->n    = (PetscInt)size;
603   part->ncon = 1;
604 
605   *newp = part;
606   PetscFunctionReturn(PETSC_SUCCESS);
607 }
608 
609 /*@C
610   MatPartitioningViewFromOptions - View a partitioning context from the options database
611 
612   Collective
613 
614   Input Parameters:
615 + A    - the partitioning context
616 . obj  - Optional object that provides the prefix used in the options database check
617 - name - command line option
618 
619   Options Database Key:
620 . -mat_partitioning_view [viewertype]:... - the viewer and its options
621 
622   Level: intermediate
623 
624   Note:
625 .vb
626     If no value is provided ascii:stdout is used
627        ascii[:[filename][:[format][:append]]]    defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
628                                                   for example ascii::ascii_info prints just the information about the object not all details
629                                                   unless :append is given filename opens in write mode, overwriting what was already there
630        binary[:[filename][:[format][:append]]]   defaults to the file binaryoutput
631        draw[:drawtype[:filename]]                for example, draw:tikz, draw:tikz:figure.tex  or draw:x
632        socket[:port]                             defaults to the standard output port
633        saws[:communicatorname]                    publishes object to the Scientific Application Webserver (SAWs)
634 .ve
635 
636 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
637 @*/
638 PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
639 {
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(A, MAT_PARTITIONING_CLASSID, 1);
642   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
643   PetscFunctionReturn(PETSC_SUCCESS);
644 }
645 
646 /*@C
647   MatPartitioningView - Prints the partitioning data structure.
648 
649   Collective
650 
651   Input Parameters:
652 + part   - the partitioning context
653 - viewer - optional visualization context
654 
655   Level: intermediate
656 
657   Note:
658   The available visualization contexts include
659 +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
660 -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
661   output where only the first processor opens
662   the file.  All other processors send their
663   data to the first processor to print.
664 
665   The user can open alternative visualization contexts with
666 .     `PetscViewerASCIIOpen()` - output to a specified file
667 
668 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
669 @*/
670 PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
671 {
672   PetscBool iascii;
673 
674   PetscFunctionBegin;
675   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
676   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
677   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
678   PetscCheckSameComm(part, 1, viewer, 2);
679 
680   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
681   if (iascii) {
682     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
683     if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, "  Using vertex weights\n"));
684   }
685   PetscCall(PetscViewerASCIIPushTab(viewer));
686   PetscTryTypeMethod(part, view, viewer);
687   PetscCall(PetscViewerASCIIPopTab(viewer));
688   PetscFunctionReturn(PETSC_SUCCESS);
689 }
690 
691 /*@C
692   MatPartitioningSetType - Sets the type of partitioner to use
693 
694   Collective
695 
696   Input Parameters:
697 + part - the partitioning context.
698 - type - a known method
699 
700   Options Database Key:
701 . -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods or see  `MatPartitioningType`
702 
703   Level: intermediate
704 
705 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
706 @*/
707 PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
708 {
709   PetscBool match;
710   PetscErrorCode (*r)(MatPartitioning);
711 
712   PetscFunctionBegin;
713   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
714   PetscAssertPointer(type, 2);
715 
716   PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
717   if (match) PetscFunctionReturn(PETSC_SUCCESS);
718 
719   PetscTryTypeMethod(part, destroy);
720   part->ops->destroy = NULL;
721 
722   part->setupcalled = 0;
723   part->data        = NULL;
724   PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));
725 
726   PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
727   PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);
728 
729   PetscCall((*r)(part));
730 
731   PetscCall(PetscFree(((PetscObject)part)->type_name));
732   PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
733   PetscFunctionReturn(PETSC_SUCCESS);
734 }
735 
736 /*@
737   MatPartitioningSetFromOptions - Sets various partitioning options from the
738   options database for the partitioning object
739 
740   Collective
741 
742   Input Parameter:
743 . part - the partitioning context.
744 
745   Options Database Keys:
746 + -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods
747 - -mat_partitioning_nparts       - number of subgraphs
748 
749   Level: beginner
750 
751   Note:
752   If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
753   no installed partitioners it does no repartioning.
754 
755 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
756 @*/
757 PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
758 {
759   PetscBool   flag;
760   char        type[256];
761   const char *def;
762 
763   PetscFunctionBegin;
764   PetscObjectOptionsBegin((PetscObject)part);
765   if (!((PetscObject)part)->type_name) {
766 #if defined(PETSC_HAVE_PARMETIS)
767     def = MATPARTITIONINGPARMETIS;
768 #elif defined(PETSC_HAVE_CHACO)
769     def = MATPARTITIONINGCHACO;
770 #elif defined(PETSC_HAVE_PARTY)
771     def = MATPARTITIONINGPARTY;
772 #elif defined(PETSC_HAVE_PTSCOTCH)
773     def = MATPARTITIONINGPTSCOTCH;
774 #else
775     def = MATPARTITIONINGCURRENT;
776 #endif
777   } else {
778     def = ((PetscObject)part)->type_name;
779   }
780   PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
781   if (flag) PetscCall(MatPartitioningSetType(part, type));
782 
783   PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));
784 
785   PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));
786 
787   /*
788     Set the type if it was never set.
789   */
790   if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));
791 
792   PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
793   PetscOptionsEnd();
794   PetscFunctionReturn(PETSC_SUCCESS);
795 }
796 
797 /*@C
798   MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex
799 
800   Not Collective
801 
802   Input Parameters:
803 + partitioning - the partitioning context
804 - ncon         - the number of weights
805 
806   Level: intermediate
807 
808 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
809 @*/
810 PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
811 {
812   PetscFunctionBegin;
813   PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
814   partitioning->ncon = ncon;
815   PetscFunctionReturn(PETSC_SUCCESS);
816 }
817