xref: /petsc/src/mat/graphops/partition/partition.c (revision f13dfd9ea68e0ddeee984e65c377a1819eab8a8a)
1 #include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
2 
3 /* Logging support */
4 PetscClassId MAT_PARTITIONING_CLASSID;
5 
6 /*
7    Simplest partitioning, keeps the current partitioning.
8 */
9 static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
10 {
11   PetscInt    m;
12   PetscMPIInt rank, size;
13 
14   PetscFunctionBegin;
15   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
16   if (part->n != size) {
17     const char *prefix;
18     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
19     SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
20   }
21   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
22 
23   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
24   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
25   PetscFunctionReturn(PETSC_SUCCESS);
26 }
27 
28 /*
29    partition an index to rebalance the computation
30 */
31 static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
32 {
33   PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;
34 
35   PetscFunctionBegin;
36   PetscCall(MatGetSize(part->adj, &M, NULL));
37   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
38   nparts = part->n;
39   PetscCall(PetscMalloc1(nparts, &parts));
40   d = M / nparts;
41   for (i = 0; i < nparts; i++) parts[i] = d;
42   r = M % nparts;
43   for (i = 0; i < r; i++) parts[i] += 1;
44   for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
45   PetscCall(PetscMalloc1(m, &indices));
46   PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
47   for (i = start; i < end; i++) {
48     PetscCall(PetscFindInt(i, nparts, parts, &loc));
49     if (loc < 0) loc = -(loc + 1);
50     else loc = loc + 1;
51     indices[i - start] = loc;
52   }
53   PetscCall(PetscFree(parts));
54   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
59 {
60   PetscInt    cell, n, N, p, rstart, rend, *color;
61   PetscMPIInt size;
62 
63   PetscFunctionBegin;
64   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
65   PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
66   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
67   PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");
68 
69   PetscCall(MatGetSize(part->adj, &N, NULL));
70   n = (PetscInt)PetscSqrtReal((PetscReal)N);
71   PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
72   PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
73   PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
74   PetscCall(PetscMalloc1(rend - rstart, &color));
75   /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
76   for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
77   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
78   PetscFunctionReturn(PETSC_SUCCESS);
79 }
80 
81 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
82 {
83   PetscFunctionBegin;
84   part->ops->apply   = MatPartitioningApply_Current;
85   part->ops->view    = NULL;
86   part->ops->destroy = NULL;
87   PetscFunctionReturn(PETSC_SUCCESS);
88 }
89 
90 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
91 {
92   PetscFunctionBegin;
93   part->ops->apply   = MatPartitioningApply_Average;
94   part->ops->view    = NULL;
95   part->ops->destroy = NULL;
96   PetscFunctionReturn(PETSC_SUCCESS);
97 }
98 
99 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
100 {
101   PetscFunctionBegin;
102   part->ops->apply   = MatPartitioningApply_Square;
103   part->ops->view    = NULL;
104   part->ops->destroy = NULL;
105   PetscFunctionReturn(PETSC_SUCCESS);
106 }
107 
108 /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
109        seps[  0 :         2*p) : the start and end node of each subdomain
110        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
111      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
112    The arrays must be large enough
113 */
114 PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
115 {
116   PetscInt l2p, i, pTree, pStartTree;
117 
118   PetscFunctionBegin;
119   l2p = PetscLog2Real(p);
120   PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
121   if (!p) PetscFunctionReturn(PETSC_SUCCESS);
122   PetscCall(PetscArrayzero(seps, 2 * p - 2));
123   PetscCall(PetscArrayzero(level, p - 1));
124   seps[2 * p - 2] = sizes[2 * p - 2];
125   pTree           = p;
126   pStartTree      = 0;
127   while (pTree != 1) {
128     for (i = pStartTree; i < pStartTree + pTree; i++) {
129       seps[i] += sizes[i];
130       seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
131     }
132     pStartTree += pTree;
133     pTree = pTree / 2;
134   }
135   seps[2 * p - 2] -= sizes[2 * p - 2];
136 
137   pStartTree = 2 * p - 2;
138   pTree      = 1;
139   while (pStartTree > 0) {
140     for (i = pStartTree; i < pStartTree + pTree; i++) {
141       PetscInt k = 2 * i - (pStartTree + 2 * pTree);
142       PetscInt n = seps[k + 1];
143 
144       seps[k + 1]  = seps[i] - sizes[k + 1];
145       seps[k]      = seps[k + 1] + sizes[k + 1] - n - sizes[k];
146       level[i - p] = -pTree - i + pStartTree;
147     }
148     pTree *= 2;
149     pStartTree -= pTree;
150   }
151   /* I know there should be a formula */
152   PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
153   for (i = 2 * p - 2; i >= 0; i--) {
154     seps[2 * i]     = seps[i];
155     seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
156   }
157   PetscFunctionReturn(PETSC_SUCCESS);
158 }
159 
160 PetscFunctionList MatPartitioningList              = NULL;
161 PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;
162 
163 /*@C
164   MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.
165 
166   Not Collective
167 
168   Input Parameters:
169 + sname    - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
170 - function - function pointer that creates the partitioning type
171 
172   Level: developer
173 
174   Example Usage:
175 .vb
176    MatPartitioningRegister("my_part", MyPartCreate);
177 .ve
178 
179   Then, your partitioner can be chosen with the procedural interface via `MatPartitioningSetType(part, "my_part")` or at runtime via the option
180   `-mat_partitioning_type my_part`
181 
182 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
183 @*/
184 PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
185 {
186   PetscFunctionBegin;
187   PetscCall(MatInitializePackage());
188   PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
189   PetscFunctionReturn(PETSC_SUCCESS);
190 }
191 
192 /*@C
193   MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
194   from the partitioning context.
195 
196   Not Collective
197 
198   Input Parameter:
199 . partitioning - the partitioning context
200 
201   Output Parameter:
202 . type - partitioner type
203 
204   Level: intermediate
205 
206 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
207 @*/
208 PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
209 {
210   PetscFunctionBegin;
211   PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
212   PetscAssertPointer(type, 2);
213   *type = ((PetscObject)partitioning)->type_name;
214   PetscFunctionReturn(PETSC_SUCCESS);
215 }
216 
217 /*@C
218   MatPartitioningSetNParts - Set how many partitions need to be created;
219   by default this is one per processor. Certain partitioning schemes may
220   in fact only support that option.
221 
222   Collective
223 
224   Input Parameters:
225 + part - the partitioning context
226 - n    - the number of partitions
227 
228   Level: intermediate
229 
230 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
231 @*/
232 PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
233 {
234   PetscFunctionBegin;
235   part->n = n;
236   PetscFunctionReturn(PETSC_SUCCESS);
237 }
238 
239 /*@
240   MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
241 
242   Collective
243 
244   Input Parameter:
245 . matp - the matrix partitioning object
246 
247   Output Parameter:
248 . partitioning - the partitioning. For each local node, a positive value indicates the processor
249                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).
250 
251   Level: intermediate
252 
253   Note:
254   The user can define additional partitionings; see `MatPartitioningRegister()`.
255 
256 .seealso: [](ch_matrices), `Mat`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
257           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
258           `ISPartitioningCount()`
259 @*/
260 PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
261 {
262   PetscFunctionBegin;
263   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
264   PetscAssertPointer(partitioning, 2);
265   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
266   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
267   PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
268   PetscUseTypeMethod(matp, applynd, partitioning);
269   PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));
270 
271   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
272   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 /*@
277   MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.
278 
279   Collective
280 
281   Input Parameter:
282 . matp - the matrix partitioning object
283 
284   Output Parameter:
285 . partitioning - the partitioning. For each local node this tells the processor
286                    number that that node is assigned to.
287 
288   Options Database Keys:
289 + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
290 - -mat_partitioning_view        - display information about the partitioning object
291 
292   Level: beginner
293 
294    The user can define additional partitionings; see `MatPartitioningRegister()`.
295 
296 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
297           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
298           `ISPartitioningCount()`
299 @*/
300 PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
301 {
302   PetscBool viewbalance, improve;
303 
304   PetscFunctionBegin;
305   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
306   PetscAssertPointer(partitioning, 2);
307   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
308   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
309   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
310   PetscUseTypeMethod(matp, apply, partitioning);
311   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
312 
313   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
314   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
315 
316   PetscObjectOptionsBegin((PetscObject)matp);
317   viewbalance = PETSC_FALSE;
318   PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
319   improve = PETSC_FALSE;
320   PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
321   PetscOptionsEnd();
322 
323   if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));
324 
325   if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
326   PetscFunctionReturn(PETSC_SUCCESS);
327 }
328 
329 /*@
330   MatPartitioningImprove - Improves the quality of a given partition.
331 
332   Collective
333 
334   Input Parameters:
335 + matp         - the matrix partitioning object
336 - partitioning - the original partitioning. For each local node this tells the processor
337                    number that that node is assigned to.
338 
339   Options Database Key:
340 . -mat_partitioning_improve - improve the quality of the given partition
341 
342   Level: beginner
343 
344 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
345           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
346           `ISPartitioningCount()`
347 @*/
348 PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
349 {
350   PetscFunctionBegin;
351   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
352   PetscAssertPointer(partitioning, 2);
353   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
354   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
355   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
356   PetscTryTypeMethod(matp, improve, partitioning);
357   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
358   PetscFunctionReturn(PETSC_SUCCESS);
359 }
360 
361 /*@
362   MatPartitioningViewImbalance - Display partitioning imbalance information.
363 
364   Collective
365 
366   Input Parameters:
367 + matp         - the matrix partitioning object
368 - partitioning - the partitioning. For each local node this tells the processor
369                    number that that node is assigned to.
370 
371   Options Database Key:
372 . -mat_partitioning_view_balance - view the balance information from the last partitioning
373 
374   Level: beginner
375 
376 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
377 @*/
378 PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
379 {
380   PetscInt        nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
381   const PetscInt *indices;
382   PetscViewer     viewer;
383 
384   PetscFunctionBegin;
385   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
386   PetscValidHeaderSpecific(partitioning, IS_CLASSID, 2);
387   nparts = matp->n;
388   PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
389   PetscCall(ISGetLocalSize(partitioning, &nlocal));
390   PetscCall(ISGetIndices(partitioning, &indices));
391   for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
392   PetscCall(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
393   PetscCall(ISRestoreIndices(partitioning, &indices));
394   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
395   for (i = 0; i < nparts; i++) {
396     minsub = PetscMin(minsub, subdomainsizes[i]);
397     maxsub = PetscMax(maxsub, subdomainsizes[i]);
398     avgsub += subdomainsizes[i];
399   }
400   avgsub /= nparts;
401   PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
402   PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
403   PetscCall(MatPartitioningView(matp, viewer));
404   PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
405   PetscFunctionReturn(PETSC_SUCCESS);
406 }
407 
408 /*@
409   MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
410   partitioned.
411 
412   Collective
413 
414   Input Parameters:
415 + part - the partitioning context
416 - adj  - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`
417 
418   Level: beginner
419 
420 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
421 @*/
422 PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
423 {
424   PetscFunctionBegin;
425   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
426   PetscValidHeaderSpecific(adj, MAT_CLASSID, 2);
427   part->adj = adj;
428   PetscFunctionReturn(PETSC_SUCCESS);
429 }
430 
431 /*@
432   MatPartitioningDestroy - Destroys the partitioning context.
433 
434   Collective
435 
436   Input Parameter:
437 . part - the partitioning context
438 
439   Level: beginner
440 
441 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
442 @*/
443 PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
444 {
445   PetscFunctionBegin;
446   if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
447   PetscValidHeaderSpecific(*part, MAT_PARTITIONING_CLASSID, 1);
448   if (--((PetscObject)*part)->refct > 0) {
449     *part = NULL;
450     PetscFunctionReturn(PETSC_SUCCESS);
451   }
452 
453   PetscTryTypeMethod(*part, destroy);
454   PetscCall(PetscFree((*part)->vertex_weights));
455   PetscCall(PetscFree((*part)->part_weights));
456   PetscCall(PetscHeaderDestroy(part));
457   PetscFunctionReturn(PETSC_SUCCESS);
458 }
459 
460 /*@C
461   MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
462 
463   Logically Collective
464 
465   Input Parameters:
466 + part    - the partitioning context
467 - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
468             1 if that is not provided
469 
470   Level: beginner
471 
472   Notes:
473   The array weights is freed by PETSc so the user should not free the array. In C/C++
474   the array must be obtained with a call to `PetscMalloc()`, not malloc().
475 
476   The weights may not be used by some partitioners
477 
478   Fortran Note:
479   The array `weights` is copied during this function call.
480 
481 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
482 @*/
483 PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
484 {
485   PetscFunctionBegin;
486   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
487   PetscCall(PetscFree(part->vertex_weights));
488   part->vertex_weights = (PetscInt *)weights;
489   PetscFunctionReturn(PETSC_SUCCESS);
490 }
491 
492 /*@C
493   MatPartitioningSetPartitionWeights - Sets the weights for each partition.
494 
495   Logically Collective
496 
497   Input Parameters:
498 + part    - the partitioning context
499 - weights - An array of size nparts that is used to specify the fraction of
500             vertex weight that should be distributed to each sub-domain for
501             the balance constraint. If all of the sub-domains are to be of
502             the same size, then each of the nparts elements should be set
503             to a value of 1/nparts. Note that the sum of all of the weights
504             should be one.
505 
506   Level: beginner
507 
508   Note:
509   The array weights is freed by PETSc so the user should not free the array. In C/C++
510   the array must be obtained with a call to `PetscMalloc()`, not malloc().
511 
512   Fortran Note:
513   The array `weights` is copied during this function call.
514 
515 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`
516 @*/
517 PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
518 {
519   PetscFunctionBegin;
520   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
521   PetscCall(PetscFree(part->part_weights));
522   part->part_weights = (PetscReal *)weights;
523   PetscFunctionReturn(PETSC_SUCCESS);
524 }
525 
526 /*@
527   MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
528 
529   Logically Collective
530 
531   Input Parameters:
532 + part             - the partitioning context
533 - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
534                      that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
535                      edge weights are stored in an ADJ matrix.
536 
537   Options Database Key:
538 . -mat_partitioning_use_edge_weights - (true or false)
539 
540   Level: beginner
541 
542 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
543 @*/
544 PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
545 {
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
548   part->use_edge_weights = use_edge_weights;
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 /*@
553   MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
554 
555   Logically Collective
556 
557   Input Parameter:
558 . part - the partitioning context
559 
560   Output Parameter:
561 . use_edge_weights - the flag indicateing whether or not to edge weights are used.
562 
563   Level: beginner
564 
565 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
566           `MatPartitioningSetUseEdgeWeights`
567 @*/
568 PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
569 {
570   PetscFunctionBegin;
571   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
572   PetscAssertPointer(use_edge_weights, 2);
573   *use_edge_weights = part->use_edge_weights;
574   PetscFunctionReturn(PETSC_SUCCESS);
575 }
576 
577 /*@
578   MatPartitioningCreate - Creates a partitioning context.
579 
580   Collective
581 
582   Input Parameter:
583 . comm - MPI communicator
584 
585   Output Parameter:
586 . newp - location to put the context
587 
588   Level: beginner
589 
590 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
591           `MatPartitioningSetAdjacency()`
592 @*/
593 PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
594 {
595   MatPartitioning part;
596   PetscMPIInt     size;
597 
598   PetscFunctionBegin;
599   *newp = NULL;
600 
601   PetscCall(MatInitializePackage());
602   PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatGraphOperations", comm, MatPartitioningDestroy, MatPartitioningView));
603   part->vertex_weights   = NULL;
604   part->part_weights     = NULL;
605   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
606 
607   PetscCallMPI(MPI_Comm_size(comm, &size));
608   part->n    = (PetscInt)size;
609   part->ncon = 1;
610 
611   *newp = part;
612   PetscFunctionReturn(PETSC_SUCCESS);
613 }
614 
615 /*@C
616   MatPartitioningViewFromOptions - View a partitioning context from the options database
617 
618   Collective
619 
620   Input Parameters:
621 + A    - the partitioning context
622 . obj  - Optional object that provides the prefix used in the options database check
623 - name - command line option
624 
625   Options Database Key:
626 . -mat_partitioning_view [viewertype]:... - the viewer and its options
627 
628   Level: intermediate
629 
630   Note:
631 .vb
632     If no value is provided ascii:stdout is used
633        ascii[:[filename][:[format][:append]]]    defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
634                                                   for example ascii::ascii_info prints just the information about the object not all details
635                                                   unless :append is given filename opens in write mode, overwriting what was already there
636        binary[:[filename][:[format][:append]]]   defaults to the file binaryoutput
637        draw[:drawtype[:filename]]                for example, draw:tikz, draw:tikz:figure.tex  or draw:x
638        socket[:port]                             defaults to the standard output port
639        saws[:communicatorname]                    publishes object to the Scientific Application Webserver (SAWs)
640 .ve
641 
642 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
643 @*/
644 PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
645 {
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(A, MAT_PARTITIONING_CLASSID, 1);
648   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
649   PetscFunctionReturn(PETSC_SUCCESS);
650 }
651 
652 /*@C
653   MatPartitioningView - Prints the partitioning data structure.
654 
655   Collective
656 
657   Input Parameters:
658 + part   - the partitioning context
659 - viewer - optional visualization context
660 
661   Level: intermediate
662 
663   Note:
664   The available visualization contexts include
665 +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
666 -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
667   output where only the first processor opens
668   the file.  All other processors send their
669   data to the first processor to print.
670 
671   The user can open alternative visualization contexts with
672 .     `PetscViewerASCIIOpen()` - output to a specified file
673 
674 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
675 @*/
676 PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
677 {
678   PetscBool iascii;
679 
680   PetscFunctionBegin;
681   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
682   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
683   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
684   PetscCheckSameComm(part, 1, viewer, 2);
685 
686   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
687   if (iascii) {
688     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
689     if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, "  Using vertex weights\n"));
690   }
691   PetscCall(PetscViewerASCIIPushTab(viewer));
692   PetscTryTypeMethod(part, view, viewer);
693   PetscCall(PetscViewerASCIIPopTab(viewer));
694   PetscFunctionReturn(PETSC_SUCCESS);
695 }
696 
697 /*@C
698   MatPartitioningSetType - Sets the type of partitioner to use
699 
700   Collective
701 
702   Input Parameters:
703 + part - the partitioning context.
704 - type - a known method
705 
706   Options Database Key:
707 . -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods or see  `MatPartitioningType`
708 
709   Level: intermediate
710 
711 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
712 @*/
713 PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
714 {
715   PetscBool match;
716   PetscErrorCode (*r)(MatPartitioning);
717 
718   PetscFunctionBegin;
719   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
720   PetscAssertPointer(type, 2);
721 
722   PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
723   if (match) PetscFunctionReturn(PETSC_SUCCESS);
724 
725   PetscTryTypeMethod(part, destroy);
726   part->ops->destroy = NULL;
727 
728   part->setupcalled = 0;
729   part->data        = NULL;
730   PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));
731 
732   PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
733   PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);
734 
735   PetscCall((*r)(part));
736 
737   PetscCall(PetscFree(((PetscObject)part)->type_name));
738   PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
739   PetscFunctionReturn(PETSC_SUCCESS);
740 }
741 
742 /*@
743   MatPartitioningSetFromOptions - Sets various partitioning options from the
744   options database for the partitioning object
745 
746   Collective
747 
748   Input Parameter:
749 . part - the partitioning context.
750 
751   Options Database Keys:
752 + -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods
753 - -mat_partitioning_nparts       - number of subgraphs
754 
755   Level: beginner
756 
757   Note:
758   If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
759   no installed partitioners it does no repartioning.
760 
761 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
762 @*/
763 PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
764 {
765   PetscBool   flag;
766   char        type[256];
767   const char *def;
768 
769   PetscFunctionBegin;
770   PetscObjectOptionsBegin((PetscObject)part);
771   if (!((PetscObject)part)->type_name) {
772 #if defined(PETSC_HAVE_PARMETIS)
773     def = MATPARTITIONINGPARMETIS;
774 #elif defined(PETSC_HAVE_CHACO)
775     def = MATPARTITIONINGCHACO;
776 #elif defined(PETSC_HAVE_PARTY)
777     def = MATPARTITIONINGPARTY;
778 #elif defined(PETSC_HAVE_PTSCOTCH)
779     def = MATPARTITIONINGPTSCOTCH;
780 #else
781     def = MATPARTITIONINGCURRENT;
782 #endif
783   } else {
784     def = ((PetscObject)part)->type_name;
785   }
786   PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
787   if (flag) PetscCall(MatPartitioningSetType(part, type));
788 
789   PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));
790 
791   PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));
792 
793   /*
794     Set the type if it was never set.
795   */
796   if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));
797 
798   PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
799   PetscOptionsEnd();
800   PetscFunctionReturn(PETSC_SUCCESS);
801 }
802 
803 /*@C
804   MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex
805 
806   Not Collective
807 
808   Input Parameters:
809 + partitioning - the partitioning context
810 - ncon         - the number of weights
811 
812   Level: intermediate
813 
814 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
815 @*/
816 PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
817 {
818   PetscFunctionBegin;
819   PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
820   partitioning->ncon = ncon;
821   PetscFunctionReturn(PETSC_SUCCESS);
822 }
823