xref: /petsc/src/mat/graphops/partition/partition.c (revision d9acb416d05abeed0a33bde3a81aeb2ea0364f6a)
1 #include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
2 
3 /* Logging support */
4 PetscClassId MAT_PARTITIONING_CLASSID;
5 
6 /*
7    Simplest partitioning, keeps the current partitioning.
8 */
9 static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
10 {
11   PetscInt    m;
12   PetscMPIInt rank, size;
13 
14   PetscFunctionBegin;
15   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
16   if (part->n != size) {
17     const char *prefix;
18     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
19     SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
20   }
21   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
22 
23   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
24   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
25   PetscFunctionReturn(PETSC_SUCCESS);
26 }
27 
28 /*
29    partition an index to rebalance the computation
30 */
31 static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
32 {
33   PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;
34 
35   PetscFunctionBegin;
36   PetscCall(MatGetSize(part->adj, &M, NULL));
37   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
38   nparts = part->n;
39   PetscCall(PetscMalloc1(nparts, &parts));
40   d = M / nparts;
41   for (i = 0; i < nparts; i++) parts[i] = d;
42   r = M % nparts;
43   for (i = 0; i < r; i++) parts[i] += 1;
44   for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
45   PetscCall(PetscMalloc1(m, &indices));
46   PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
47   for (i = start; i < end; i++) {
48     PetscCall(PetscFindInt(i, nparts, parts, &loc));
49     if (loc < 0) loc = -(loc + 1);
50     else loc = loc + 1;
51     indices[i - start] = loc;
52   }
53   PetscCall(PetscFree(parts));
54   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
59 {
60   PetscInt    cell, n, N, p, rstart, rend, *color;
61   PetscMPIInt size;
62 
63   PetscFunctionBegin;
64   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
65   PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
66   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
67   PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");
68 
69   PetscCall(MatGetSize(part->adj, &N, NULL));
70   n = (PetscInt)PetscSqrtReal((PetscReal)N);
71   PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
72   PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
73   PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
74   PetscCall(PetscMalloc1(rend - rstart, &color));
75   /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
76   for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
77   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
78   PetscFunctionReturn(PETSC_SUCCESS);
79 }
80 
81 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
82 {
83   PetscFunctionBegin;
84   part->ops->apply   = MatPartitioningApply_Current;
85   part->ops->view    = NULL;
86   part->ops->destroy = NULL;
87   PetscFunctionReturn(PETSC_SUCCESS);
88 }
89 
90 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
91 {
92   PetscFunctionBegin;
93   part->ops->apply   = MatPartitioningApply_Average;
94   part->ops->view    = NULL;
95   part->ops->destroy = NULL;
96   PetscFunctionReturn(PETSC_SUCCESS);
97 }
98 
99 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
100 {
101   PetscFunctionBegin;
102   part->ops->apply   = MatPartitioningApply_Square;
103   part->ops->view    = NULL;
104   part->ops->destroy = NULL;
105   PetscFunctionReturn(PETSC_SUCCESS);
106 }
107 
108 /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
109        seps[  0 :         2*p) : the start and end node of each subdomain
110        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
111      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
112    The arrays must be large enough
113 */
114 PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
115 {
116   PetscInt l2p, i, pTree, pStartTree;
117 
118   PetscFunctionBegin;
119   l2p = PetscLog2Real(p);
120   PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
121   if (!p) PetscFunctionReturn(PETSC_SUCCESS);
122   PetscCall(PetscArrayzero(seps, 2 * p - 2));
123   PetscCall(PetscArrayzero(level, p - 1));
124   seps[2 * p - 2] = sizes[2 * p - 2];
125   pTree           = p;
126   pStartTree      = 0;
127   while (pTree != 1) {
128     for (i = pStartTree; i < pStartTree + pTree; i++) {
129       seps[i] += sizes[i];
130       seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
131     }
132     pStartTree += pTree;
133     pTree = pTree / 2;
134   }
135   seps[2 * p - 2] -= sizes[2 * p - 2];
136 
137   pStartTree = 2 * p - 2;
138   pTree      = 1;
139   while (pStartTree > 0) {
140     for (i = pStartTree; i < pStartTree + pTree; i++) {
141       PetscInt k = 2 * i - (pStartTree + 2 * pTree);
142       PetscInt n = seps[k + 1];
143 
144       seps[k + 1]  = seps[i] - sizes[k + 1];
145       seps[k]      = seps[k + 1] + sizes[k + 1] - n - sizes[k];
146       level[i - p] = -pTree - i + pStartTree;
147     }
148     pTree *= 2;
149     pStartTree -= pTree;
150   }
151   /* I know there should be a formula */
152   PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
153   for (i = 2 * p - 2; i >= 0; i--) {
154     seps[2 * i]     = seps[i];
155     seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
156   }
157   PetscFunctionReturn(PETSC_SUCCESS);
158 }
159 
160 PetscFunctionList MatPartitioningList              = NULL;
161 PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;
162 
163 /*@C
164   MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.
165 
166   Not Collective
167 
168   Input Parameters:
169 + sname    - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
170 - function - function pointer that creates the partitioning type
171 
172   Level: developer
173 
174   Example Usage:
175 .vb
176    MatPartitioningRegister("my_part", MyPartCreate);
177 .ve
178 
179   Then, your partitioner can be chosen with the procedural interface via
180 $     MatPartitioningSetType(part, "my_part")
181   or at runtime via the option
182 $     -mat_partitioning_type my_part
183 
184 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
185 @*/
186 PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
187 {
188   PetscFunctionBegin;
189   PetscCall(MatInitializePackage());
190   PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
191   PetscFunctionReturn(PETSC_SUCCESS);
192 }
193 
194 /*@C
195   MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
196   from the partitioning context.
197 
198   Not Collective
199 
200   Input Parameter:
201 . partitioning - the partitioning context
202 
203   Output Parameter:
204 . type - partitioner type
205 
206   Level: intermediate
207 
208 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
209 @*/
210 PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
211 {
212   PetscFunctionBegin;
213   PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
214   PetscAssertPointer(type, 2);
215   *type = ((PetscObject)partitioning)->type_name;
216   PetscFunctionReturn(PETSC_SUCCESS);
217 }
218 
219 /*@C
220   MatPartitioningSetNParts - Set how many partitions need to be created;
221   by default this is one per processor. Certain partitioning schemes may
222   in fact only support that option.
223 
224   Collective
225 
226   Input Parameters:
227 + part - the partitioning context
228 - n    - the number of partitions
229 
230   Level: intermediate
231 
232 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
233 @*/
234 PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
235 {
236   PetscFunctionBegin;
237   part->n = n;
238   PetscFunctionReturn(PETSC_SUCCESS);
239 }
240 
241 /*@
242   MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
243 
244   Collective
245 
246   Input Parameter:
247 . matp - the matrix partitioning object
248 
249   Output Parameter:
250 . partitioning - the partitioning. For each local node, a positive value indicates the processor
251                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).
252 
253   Level: intermediate
254 
255   Note:
256   The user can define additional partitionings; see `MatPartitioningRegister()`.
257 
258 .seealso: [](ch_matrices), `Mat`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
259           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
260           `ISPartitioningCount()`
261 @*/
262 PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
263 {
264   PetscFunctionBegin;
265   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
266   PetscAssertPointer(partitioning, 2);
267   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
268   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
269   PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
270   PetscUseTypeMethod(matp, applynd, partitioning);
271   PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));
272 
273   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
274   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
275   PetscFunctionReturn(PETSC_SUCCESS);
276 }
277 
278 /*@
279   MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.
280 
281   Collective
282 
283   Input Parameter:
284 . matp - the matrix partitioning object
285 
286   Output Parameter:
287 . partitioning - the partitioning. For each local node this tells the processor
288                    number that that node is assigned to.
289 
290   Options Database Keys:
291 + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
292 - -mat_partitioning_view        - display information about the partitioning object
293 
294   Level: beginner
295 
296    The user can define additional partitionings; see `MatPartitioningRegister()`.
297 
298 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
299           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
300           `ISPartitioningCount()`
301 @*/
302 PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
303 {
304   PetscBool viewbalance, improve;
305 
306   PetscFunctionBegin;
307   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
308   PetscAssertPointer(partitioning, 2);
309   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
310   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
311   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
312   PetscUseTypeMethod(matp, apply, partitioning);
313   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
314 
315   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
316   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
317 
318   PetscObjectOptionsBegin((PetscObject)matp);
319   viewbalance = PETSC_FALSE;
320   PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
321   improve = PETSC_FALSE;
322   PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
323   PetscOptionsEnd();
324 
325   if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));
326 
327   if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
328   PetscFunctionReturn(PETSC_SUCCESS);
329 }
330 
331 /*@
332   MatPartitioningImprove - Improves the quality of a given partition.
333 
334   Collective
335 
336   Input Parameters:
337 + matp         - the matrix partitioning object
338 - partitioning - the original partitioning. For each local node this tells the processor
339                    number that that node is assigned to.
340 
341   Options Database Key:
342 . -mat_partitioning_improve - improve the quality of the given partition
343 
344   Level: beginner
345 
346 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
347           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
348           `ISPartitioningCount()`
349 @*/
350 PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
351 {
352   PetscFunctionBegin;
353   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
354   PetscAssertPointer(partitioning, 2);
355   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
356   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
357   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
358   PetscTryTypeMethod(matp, improve, partitioning);
359   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
360   PetscFunctionReturn(PETSC_SUCCESS);
361 }
362 
363 /*@
364   MatPartitioningViewImbalance - Display partitioning imbalance information.
365 
366   Collective
367 
368   Input Parameters:
369 + matp         - the matrix partitioning object
370 - partitioning - the partitioning. For each local node this tells the processor
371                    number that that node is assigned to.
372 
373   Options Database Key:
374 . -mat_partitioning_view_balance - view the balance information from the last partitioning
375 
376   Level: beginner
377 
378 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
379 @*/
380 PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
381 {
382   PetscInt        nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
383   const PetscInt *indices;
384   PetscViewer     viewer;
385 
386   PetscFunctionBegin;
387   PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
388   PetscValidHeaderSpecific(partitioning, IS_CLASSID, 2);
389   nparts = matp->n;
390   PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
391   PetscCall(ISGetLocalSize(partitioning, &nlocal));
392   PetscCall(ISGetIndices(partitioning, &indices));
393   for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
394   PetscCall(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
395   PetscCall(ISRestoreIndices(partitioning, &indices));
396   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
397   for (i = 0; i < nparts; i++) {
398     minsub = PetscMin(minsub, subdomainsizes[i]);
399     maxsub = PetscMax(maxsub, subdomainsizes[i]);
400     avgsub += subdomainsizes[i];
401   }
402   avgsub /= nparts;
403   PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
404   PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
405   PetscCall(MatPartitioningView(matp, viewer));
406   PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
407   PetscFunctionReturn(PETSC_SUCCESS);
408 }
409 
410 /*@
411   MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
412   partitioned.
413 
414   Collective
415 
416   Input Parameters:
417 + part - the partitioning context
418 - adj  - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`
419 
420   Level: beginner
421 
422 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
423 @*/
424 PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
425 {
426   PetscFunctionBegin;
427   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
428   PetscValidHeaderSpecific(adj, MAT_CLASSID, 2);
429   part->adj = adj;
430   PetscFunctionReturn(PETSC_SUCCESS);
431 }
432 
433 /*@
434   MatPartitioningDestroy - Destroys the partitioning context.
435 
436   Collective
437 
438   Input Parameter:
439 . part - the partitioning context
440 
441   Level: beginner
442 
443 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
444 @*/
445 PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
446 {
447   PetscFunctionBegin;
448   if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
449   PetscValidHeaderSpecific((*part), MAT_PARTITIONING_CLASSID, 1);
450   if (--((PetscObject)(*part))->refct > 0) {
451     *part = NULL;
452     PetscFunctionReturn(PETSC_SUCCESS);
453   }
454 
455   PetscTryTypeMethod(*part, destroy);
456   PetscCall(PetscFree((*part)->vertex_weights));
457   PetscCall(PetscFree((*part)->part_weights));
458   PetscCall(PetscHeaderDestroy(part));
459   PetscFunctionReturn(PETSC_SUCCESS);
460 }
461 
462 /*@C
463   MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
464 
465   Logically Collective
466 
467   Input Parameters:
468 + part    - the partitioning context
469 - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
470              1 if that is not provided
471 
472   Level: beginner
473 
474   Notes:
475   The array weights is freed by PETSc so the user should not free the array. In C/C++
476   the array must be obtained with a call to `PetscMalloc()`, not malloc().
477 
478   The weights may not be used by some partitioners
479 
480 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
481 @*/
482 PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
483 {
484   PetscFunctionBegin;
485   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
486   PetscCall(PetscFree(part->vertex_weights));
487   part->vertex_weights = (PetscInt *)weights;
488   PetscFunctionReturn(PETSC_SUCCESS);
489 }
490 
491 /*@C
492   MatPartitioningSetPartitionWeights - Sets the weights for each partition.
493 
494   Logically Collective
495 
496   Input Parameters:
497 + part    - the partitioning context
498 - weights - An array of size nparts that is used to specify the fraction of
499              vertex weight that should be distributed to each sub-domain for
500              the balance constraint. If all of the sub-domains are to be of
501              the same size, then each of the nparts elements should be set
502              to a value of 1/nparts. Note that the sum of all of the weights
503              should be one.
504 
505   Level: beginner
506 
507   Note:
508   The array weights is freed by PETSc so the user should not free the array. In C/C++
509   the array must be obtained with a call to `PetscMalloc()`, not malloc().
510 
511 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`
512 @*/
513 PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
514 {
515   PetscFunctionBegin;
516   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
517   PetscCall(PetscFree(part->part_weights));
518   part->part_weights = (PetscReal *)weights;
519   PetscFunctionReturn(PETSC_SUCCESS);
520 }
521 
522 /*@
523   MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
524 
525   Logically Collective
526 
527   Input Parameters:
528 + part             - the partitioning context
529 - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
530                       that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
531                       edge weights are stored in an ADJ matrix.
532 
533   Options Database Key:
534 . -mat_partitioning_use_edge_weights - (true or false)
535 
536   Level: beginner
537 
538 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
539 @*/
540 PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
541 {
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
544   part->use_edge_weights = use_edge_weights;
545   PetscFunctionReturn(PETSC_SUCCESS);
546 }
547 
548 /*@
549   MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
550 
551   Logically Collective
552 
553   Input Parameter:
554 . part - the partitioning context
555 
556   Output Parameter:
557 . use_edge_weights - the flag indicateing whether or not to edge weights are used.
558 
559   Level: beginner
560 
561 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
562           `MatPartitioningSetUseEdgeWeights`
563 @*/
564 PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
565 {
566   PetscFunctionBegin;
567   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
568   PetscAssertPointer(use_edge_weights, 2);
569   *use_edge_weights = part->use_edge_weights;
570   PetscFunctionReturn(PETSC_SUCCESS);
571 }
572 
573 /*@
574   MatPartitioningCreate - Creates a partitioning context.
575 
576   Collective
577 
578   Input Parameter:
579 . comm - MPI communicator
580 
581   Output Parameter:
582 . newp - location to put the context
583 
584   Level: beginner
585 
586 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
587           `MatPartitioningSetAdjacency()`
588 @*/
589 PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
590 {
591   MatPartitioning part;
592   PetscMPIInt     size;
593 
594   PetscFunctionBegin;
595   *newp = NULL;
596 
597   PetscCall(MatInitializePackage());
598   PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatGraphOperations", comm, MatPartitioningDestroy, MatPartitioningView));
599   part->vertex_weights   = NULL;
600   part->part_weights     = NULL;
601   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
602 
603   PetscCallMPI(MPI_Comm_size(comm, &size));
604   part->n    = (PetscInt)size;
605   part->ncon = 1;
606 
607   *newp = part;
608   PetscFunctionReturn(PETSC_SUCCESS);
609 }
610 
611 /*@C
612   MatPartitioningViewFromOptions - View a partitioning context from the options database
613 
614   Collective
615 
616   Input Parameters:
617 + A    - the partitioning context
618 . obj  - Optional object that provides the prefix used in the options database check
619 - name - command line option
620 
621   Options Database Key:
622 . -mat_partitioning_view [viewertype]:... - the viewer and its options
623 
624   Level: intermediate
625 
626   Note:
627 .vb
628     If no value is provided ascii:stdout is used
629        ascii[:[filename][:[format][:append]]]    defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
630                                                   for example ascii::ascii_info prints just the information about the object not all details
631                                                   unless :append is given filename opens in write mode, overwriting what was already there
632        binary[:[filename][:[format][:append]]]   defaults to the file binaryoutput
633        draw[:drawtype[:filename]]                for example, draw:tikz, draw:tikz:figure.tex  or draw:x
634        socket[:port]                             defaults to the standard output port
635        saws[:communicatorname]                    publishes object to the Scientific Application Webserver (SAWs)
636 .ve
637 
638 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
639 @*/
640 PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
641 {
642   PetscFunctionBegin;
643   PetscValidHeaderSpecific(A, MAT_PARTITIONING_CLASSID, 1);
644   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
645   PetscFunctionReturn(PETSC_SUCCESS);
646 }
647 
648 /*@C
649   MatPartitioningView - Prints the partitioning data structure.
650 
651   Collective
652 
653   Input Parameters:
654 + part   - the partitioning context
655 - viewer - optional visualization context
656 
657   Level: intermediate
658 
659   Note:
660   The available visualization contexts include
661 +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
662 -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
663   output where only the first processor opens
664   the file.  All other processors send their
665   data to the first processor to print.
666 
667   The user can open alternative visualization contexts with
668 .     `PetscViewerASCIIOpen()` - output to a specified file
669 
670 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
671 @*/
672 PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
673 {
674   PetscBool iascii;
675 
676   PetscFunctionBegin;
677   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
678   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
679   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
680   PetscCheckSameComm(part, 1, viewer, 2);
681 
682   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
683   if (iascii) {
684     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
685     if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, "  Using vertex weights\n"));
686   }
687   PetscCall(PetscViewerASCIIPushTab(viewer));
688   PetscTryTypeMethod(part, view, viewer);
689   PetscCall(PetscViewerASCIIPopTab(viewer));
690   PetscFunctionReturn(PETSC_SUCCESS);
691 }
692 
693 /*@C
694   MatPartitioningSetType - Sets the type of partitioner to use
695 
696   Collective
697 
698   Input Parameters:
699 + part - the partitioning context.
700 - type - a known method
701 
702   Options Database Key:
703 . -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods or see  `MatPartitioningType`
704 
705   Level: intermediate
706 
707 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
708 @*/
709 PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
710 {
711   PetscBool match;
712   PetscErrorCode (*r)(MatPartitioning);
713 
714   PetscFunctionBegin;
715   PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
716   PetscAssertPointer(type, 2);
717 
718   PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
719   if (match) PetscFunctionReturn(PETSC_SUCCESS);
720 
721   PetscTryTypeMethod(part, destroy);
722   part->ops->destroy = NULL;
723 
724   part->setupcalled = 0;
725   part->data        = NULL;
726   PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));
727 
728   PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
729   PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);
730 
731   PetscCall((*r)(part));
732 
733   PetscCall(PetscFree(((PetscObject)part)->type_name));
734   PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
735   PetscFunctionReturn(PETSC_SUCCESS);
736 }
737 
738 /*@
739   MatPartitioningSetFromOptions - Sets various partitioning options from the
740   options database for the partitioning object
741 
742   Collective
743 
744   Input Parameter:
745 . part - the partitioning context.
746 
747   Options Database Keys:
748 + -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods
749 - -mat_partitioning_nparts       - number of subgraphs
750 
751   Level: beginner
752 
753   Note:
754   If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
755   no installed partitioners it does no repartioning.
756 
757 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
758 @*/
759 PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
760 {
761   PetscBool   flag;
762   char        type[256];
763   const char *def;
764 
765   PetscFunctionBegin;
766   PetscObjectOptionsBegin((PetscObject)part);
767   if (!((PetscObject)part)->type_name) {
768 #if defined(PETSC_HAVE_PARMETIS)
769     def = MATPARTITIONINGPARMETIS;
770 #elif defined(PETSC_HAVE_CHACO)
771     def = MATPARTITIONINGCHACO;
772 #elif defined(PETSC_HAVE_PARTY)
773     def = MATPARTITIONINGPARTY;
774 #elif defined(PETSC_HAVE_PTSCOTCH)
775     def = MATPARTITIONINGPTSCOTCH;
776 #else
777     def = MATPARTITIONINGCURRENT;
778 #endif
779   } else {
780     def = ((PetscObject)part)->type_name;
781   }
782   PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
783   if (flag) PetscCall(MatPartitioningSetType(part, type));
784 
785   PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));
786 
787   PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));
788 
789   /*
790     Set the type if it was never set.
791   */
792   if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));
793 
794   PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
795   PetscOptionsEnd();
796   PetscFunctionReturn(PETSC_SUCCESS);
797 }
798 
799 /*@C
800   MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex
801 
802   Not Collective
803 
804   Input Parameters:
805 + partitioning - the partitioning context
806 - ncon         - the number of weights
807 
808   Level: intermediate
809 
810 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
811 @*/
812 PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
813 {
814   PetscFunctionBegin;
815   PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
816   partitioning->ncon = ncon;
817   PetscFunctionReturn(PETSC_SUCCESS);
818 }
819