1 #include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
2
3 /* Logging support */
4 PetscClassId MAT_PARTITIONING_CLASSID;
5
6 /*
7 Simplest partitioning, keeps the current partitioning.
8 */
MatPartitioningApply_Current(MatPartitioning part,IS * partitioning)9 static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
10 {
11 PetscInt m;
12 PetscMPIInt rank, size;
13
14 PetscFunctionBegin;
15 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
16 if (part->n != size) {
17 const char *prefix;
18 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
19 SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
20 }
21 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
22
23 PetscCall(MatGetLocalSize(part->adj, &m, NULL));
24 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
25 PetscFunctionReturn(PETSC_SUCCESS);
26 }
27
28 /*
29 partition an index to rebalance the computation
30 */
MatPartitioningApply_Average(MatPartitioning part,IS * partitioning)31 static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
32 {
33 PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;
34
35 PetscFunctionBegin;
36 PetscCall(MatGetSize(part->adj, &M, NULL));
37 PetscCall(MatGetLocalSize(part->adj, &m, NULL));
38 nparts = part->n;
39 PetscCall(PetscMalloc1(nparts, &parts));
40 d = M / nparts;
41 for (i = 0; i < nparts; i++) parts[i] = d;
42 r = M % nparts;
43 for (i = 0; i < r; i++) parts[i] += 1;
44 for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
45 PetscCall(PetscMalloc1(m, &indices));
46 PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
47 for (i = start; i < end; i++) {
48 PetscCall(PetscFindInt(i, nparts, parts, &loc));
49 if (loc < 0) loc = -(loc + 1);
50 else loc = loc + 1;
51 indices[i - start] = loc;
52 }
53 PetscCall(PetscFree(parts));
54 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
55 PetscFunctionReturn(PETSC_SUCCESS);
56 }
57
MatPartitioningApply_Square(MatPartitioning part,IS * partitioning)58 static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
59 {
60 PetscInt cell, n, N, p, rstart, rend, *color;
61 PetscMPIInt size;
62
63 PetscFunctionBegin;
64 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
65 PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
66 p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
67 PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");
68
69 PetscCall(MatGetSize(part->adj, &N, NULL));
70 n = (PetscInt)PetscSqrtReal((PetscReal)N);
71 PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
72 PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
73 PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
74 PetscCall(PetscMalloc1(rend - rstart, &color));
75 /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
76 for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
77 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
78 PetscFunctionReturn(PETSC_SUCCESS);
79 }
80
MatPartitioningCreate_Current(MatPartitioning part)81 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
82 {
83 PetscFunctionBegin;
84 part->ops->apply = MatPartitioningApply_Current;
85 part->ops->view = NULL;
86 part->ops->destroy = NULL;
87 PetscFunctionReturn(PETSC_SUCCESS);
88 }
89
MatPartitioningCreate_Average(MatPartitioning part)90 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
91 {
92 PetscFunctionBegin;
93 part->ops->apply = MatPartitioningApply_Average;
94 part->ops->view = NULL;
95 part->ops->destroy = NULL;
96 PetscFunctionReturn(PETSC_SUCCESS);
97 }
98
MatPartitioningCreate_Square(MatPartitioning part)99 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
100 {
101 PetscFunctionBegin;
102 part->ops->apply = MatPartitioningApply_Square;
103 part->ops->view = NULL;
104 part->ops->destroy = NULL;
105 PetscFunctionReturn(PETSC_SUCCESS);
106 }
107
108 /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
109 seps[ 0 : 2*p) : the start and end node of each subdomain
110 seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
111 levels[ 0 : p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
112 The arrays must be large enough
113 */
MatPartitioningSizesToSep_Private(PetscInt p,PetscInt sizes[],PetscInt seps[],PetscInt level[])114 PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
115 {
116 PetscInt l2p, i, pTree, pStartTree;
117
118 PetscFunctionBegin;
119 l2p = PetscLog2Real(p);
120 PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
121 if (!p) PetscFunctionReturn(PETSC_SUCCESS);
122 PetscCall(PetscArrayzero(seps, 2 * p - 2));
123 PetscCall(PetscArrayzero(level, p - 1));
124 seps[2 * p - 2] = sizes[2 * p - 2];
125 pTree = p;
126 pStartTree = 0;
127 while (pTree != 1) {
128 for (i = pStartTree; i < pStartTree + pTree; i++) {
129 seps[i] += sizes[i];
130 seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
131 }
132 pStartTree += pTree;
133 pTree = pTree / 2;
134 }
135 seps[2 * p - 2] -= sizes[2 * p - 2];
136
137 pStartTree = 2 * p - 2;
138 pTree = 1;
139 while (pStartTree > 0) {
140 for (i = pStartTree; i < pStartTree + pTree; i++) {
141 PetscInt k = 2 * i - (pStartTree + 2 * pTree);
142 PetscInt n = seps[k + 1];
143
144 seps[k + 1] = seps[i] - sizes[k + 1];
145 seps[k] = seps[k + 1] + sizes[k + 1] - n - sizes[k];
146 level[i - p] = -pTree - i + pStartTree;
147 }
148 pTree *= 2;
149 pStartTree -= pTree;
150 }
151 /* I know there should be a formula */
152 PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
153 for (i = 2 * p - 2; i >= 0; i--) {
154 seps[2 * i] = seps[i];
155 seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
156 }
157 PetscFunctionReturn(PETSC_SUCCESS);
158 }
159
160 PetscFunctionList MatPartitioningList = NULL;
161 PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
162
163 /*@C
164 MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
165
166 Not Collective, No Fortran Support
167
168 Input Parameters:
169 + sname - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
170 - function - function pointer that creates the partitioning type
171
172 Level: developer
173
174 Example Usage:
175 .vb
176 MatPartitioningRegister("my_part", MyPartCreate);
177 .ve
178
179 Then, your partitioner can be chosen with the procedural interface via `MatPartitioningSetType(part, "my_part")` or at runtime via the option
180 `-mat_partitioning_type my_part`
181
182 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
183 @*/
MatPartitioningRegister(const char sname[],PetscErrorCode (* function)(MatPartitioning))184 PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
185 {
186 PetscFunctionBegin;
187 PetscCall(MatInitializePackage());
188 PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
189 PetscFunctionReturn(PETSC_SUCCESS);
190 }
191
192 /*@
193 MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
194 from the partitioning context.
195
196 Not Collective
197
198 Input Parameter:
199 . partitioning - the partitioning context
200
201 Output Parameter:
202 . type - partitioner type
203
204 Level: intermediate
205
206 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
207 @*/
MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType * type)208 PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
209 {
210 PetscFunctionBegin;
211 PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
212 PetscAssertPointer(type, 2);
213 *type = ((PetscObject)partitioning)->type_name;
214 PetscFunctionReturn(PETSC_SUCCESS);
215 }
216
217 /*@
218 MatPartitioningSetNParts - Set how many partitions need to be created;
219 by default this is one per processor. Certain partitioning schemes may
220 in fact only support that option.
221
222 Collective
223
224 Input Parameters:
225 + part - the partitioning context
226 - n - the number of partitions
227
228 Level: intermediate
229
230 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
231 @*/
MatPartitioningSetNParts(MatPartitioning part,PetscInt n)232 PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
233 {
234 PetscFunctionBegin;
235 part->n = n;
236 PetscFunctionReturn(PETSC_SUCCESS);
237 }
238
239 /*@
240 MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
241
242 Collective
243
244 Input Parameter:
245 . matp - the matrix partitioning object
246
247 Output Parameter:
248 . partitioning - the partitioning. For each local node, a positive value indicates the processor
249 number the node has been assigned to. Negative x values indicate the separator level -(x+1).
250
251 Level: intermediate
252
253 Note:
254 The user can define additional partitionings; see `MatPartitioningRegister()`.
255
256 .seealso: [](ch_matrices), `Mat`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
257 `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
258 `ISPartitioningCount()`
259 @*/
MatPartitioningApplyND(MatPartitioning matp,IS * partitioning)260 PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
261 {
262 PetscFunctionBegin;
263 PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
264 PetscAssertPointer(partitioning, 2);
265 PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
266 PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
267 PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
268 PetscUseTypeMethod(matp, applynd, partitioning);
269 PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));
270
271 PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
272 PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
273 PetscFunctionReturn(PETSC_SUCCESS);
274 }
275
276 /*@
277 MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.
278
279 Collective
280
281 Input Parameter:
282 . matp - the matrix partitioning object
283
284 Output Parameter:
285 . partitioning - the partitioning. For each local node this tells the MPI rank that that node is assigned to.
286
287 Options Database Keys:
288 + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
289 - -mat_partitioning_view - display information about the partitioning object
290
291 Level: beginner
292
293 The user can define additional partitionings; see `MatPartitioningRegister()`.
294
295 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
296 `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
297 `ISPartitioningCount()`
298 @*/
MatPartitioningApply(MatPartitioning matp,IS * partitioning)299 PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
300 {
301 PetscBool viewbalance, improve;
302
303 PetscFunctionBegin;
304 PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
305 PetscAssertPointer(partitioning, 2);
306 PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
307 PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
308 PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
309 PetscUseTypeMethod(matp, apply, partitioning);
310 PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
311
312 PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
313 PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
314
315 PetscObjectOptionsBegin((PetscObject)matp);
316 viewbalance = PETSC_FALSE;
317 PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
318 improve = PETSC_FALSE;
319 PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
320 PetscOptionsEnd();
321
322 if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));
323
324 if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
325 PetscFunctionReturn(PETSC_SUCCESS);
326 }
327
328 /*@
329 MatPartitioningImprove - Improves the quality of a given partition.
330
331 Collective
332
333 Input Parameters:
334 + matp - the matrix partitioning object
335 - partitioning - the original partitioning. For each local node this tells the processor
336 number that that node is assigned to.
337
338 Options Database Key:
339 . -mat_partitioning_improve - improve the quality of the given partition
340
341 Level: beginner
342
343 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
344 `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
345 `ISPartitioningCount()`
346 @*/
MatPartitioningImprove(MatPartitioning matp,IS * partitioning)347 PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
348 {
349 PetscFunctionBegin;
350 PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
351 PetscAssertPointer(partitioning, 2);
352 PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
353 PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
354 PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
355 PetscTryTypeMethod(matp, improve, partitioning);
356 PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
357 PetscFunctionReturn(PETSC_SUCCESS);
358 }
359
360 /*@
361 MatPartitioningViewImbalance - Display partitioning imbalance information.
362
363 Collective
364
365 Input Parameters:
366 + matp - the matrix partitioning object
367 - partitioning - the partitioning. For each local node this tells the MPI rank that that node is assigned to.
368
369 Options Database Key:
370 . -mat_partitioning_view_balance - view the balance information from the last partitioning
371
372 Level: beginner
373
374 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
375 @*/
MatPartitioningViewImbalance(MatPartitioning matp,IS partitioning)376 PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
377 {
378 PetscMPIInt nparts;
379 PetscInt *subdomainsizes, *subdomainsizes_tmp, nlocal, maxsub, minsub, avgsub;
380 const PetscInt *indices;
381 PetscViewer viewer;
382
383 PetscFunctionBegin;
384 PetscValidHeaderSpecific(matp, MAT_PARTITIONING_CLASSID, 1);
385 PetscValidHeaderSpecific(partitioning, IS_CLASSID, 2);
386 PetscCall(PetscMPIIntCast(matp->n, &nparts));
387 PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
388 PetscCall(ISGetLocalSize(partitioning, &nlocal));
389 PetscCall(ISGetIndices(partitioning, &indices));
390 for (PetscInt i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
391 PetscCallMPI(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
392 PetscCall(ISRestoreIndices(partitioning, &indices));
393 minsub = PETSC_INT_MAX, maxsub = PETSC_INT_MIN, avgsub = 0;
394 for (PetscMPIInt i = 0; i < nparts; i++) {
395 minsub = PetscMin(minsub, subdomainsizes[i]);
396 maxsub = PetscMax(maxsub, subdomainsizes[i]);
397 avgsub += subdomainsizes[i];
398 }
399 avgsub /= nparts;
400 PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
401 PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
402 PetscCall(MatPartitioningView(matp, viewer));
403 PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
404 PetscFunctionReturn(PETSC_SUCCESS);
405 }
406
407 /*@
408 MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
409 partitioned.
410
411 Collective
412
413 Input Parameters:
414 + part - the partitioning context
415 - adj - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`
416
417 Level: beginner
418
419 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
420 @*/
MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)421 PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
422 {
423 PetscFunctionBegin;
424 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
425 PetscValidHeaderSpecific(adj, MAT_CLASSID, 2);
426 part->adj = adj;
427 PetscFunctionReturn(PETSC_SUCCESS);
428 }
429
430 /*@
431 MatPartitioningDestroy - Destroys the partitioning context.
432
433 Collective
434
435 Input Parameter:
436 . part - the partitioning context
437
438 Level: beginner
439
440 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
441 @*/
MatPartitioningDestroy(MatPartitioning * part)442 PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
443 {
444 PetscFunctionBegin;
445 if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
446 PetscValidHeaderSpecific(*part, MAT_PARTITIONING_CLASSID, 1);
447 if (--((PetscObject)*part)->refct > 0) {
448 *part = NULL;
449 PetscFunctionReturn(PETSC_SUCCESS);
450 }
451
452 PetscTryTypeMethod(*part, destroy);
453 PetscCall(PetscFree((*part)->vertex_weights));
454 PetscCall(PetscFree((*part)->part_weights));
455 PetscCall(PetscHeaderDestroy(part));
456 PetscFunctionReturn(PETSC_SUCCESS);
457 }
458
459 /*@C
460 MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
461
462 Logically Collective
463
464 Input Parameters:
465 + part - the partitioning context
466 - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
467 1 if that is not provided
468
469 Level: beginner
470
471 Notes:
472 The array weights is freed by PETSc so the user should not free the array. In C/C++
473 the array must be obtained with a call to `PetscMalloc()`, not malloc().
474
475 The weights may not be used by some partitioners
476
477 Fortran Note:
478 The array `weights` is copied during this function call.
479
480 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
481 @*/
MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])482 PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
483 {
484 PetscFunctionBegin;
485 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
486 PetscCall(PetscFree(part->vertex_weights));
487 part->vertex_weights = (PetscInt *)weights;
488 PetscFunctionReturn(PETSC_SUCCESS);
489 }
490
491 /*@C
492 MatPartitioningSetPartitionWeights - Sets the weights for each partition.
493
494 Logically Collective
495
496 Input Parameters:
497 + part - the partitioning context
498 - weights - An array of size nparts that is used to specify the fraction of
499 vertex weight that should be distributed to each sub-domain for
500 the balance constraint. If all of the sub-domains are to be of
501 the same size, then each of the nparts elements should be set
502 to a value of 1/nparts. Note that the sum of all of the weights
503 should be one.
504
505 Level: beginner
506
507 Note:
508 The array weights is freed by PETSc so the user should not free the array. In C/C++
509 the array must be obtained with a call to `PetscMalloc()`, not malloc().
510
511 Fortran Note:
512 The array `weights` is copied during this function call.
513
514 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`
515 @*/
MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])516 PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
517 {
518 PetscFunctionBegin;
519 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
520 PetscCall(PetscFree(part->part_weights));
521 part->part_weights = (PetscReal *)weights;
522 PetscFunctionReturn(PETSC_SUCCESS);
523 }
524
525 /*@
526 MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
527
528 Logically Collective
529
530 Input Parameters:
531 + part - the partitioning context
532 - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
533 that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
534 edge weights are stored in an ADJ matrix.
535
536 Options Database Key:
537 . -mat_partitioning_use_edge_weights - (true or false)
538
539 Level: beginner
540
541 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
542 @*/
MatPartitioningSetUseEdgeWeights(MatPartitioning part,PetscBool use_edge_weights)543 PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
544 {
545 PetscFunctionBegin;
546 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
547 part->use_edge_weights = use_edge_weights;
548 PetscFunctionReturn(PETSC_SUCCESS);
549 }
550
551 /*@
552 MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
553
554 Logically Collective
555
556 Input Parameter:
557 . part - the partitioning context
558
559 Output Parameter:
560 . use_edge_weights - the flag indicateing whether or not to edge weights are used.
561
562 Level: beginner
563
564 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
565 `MatPartitioningSetUseEdgeWeights`
566 @*/
MatPartitioningGetUseEdgeWeights(MatPartitioning part,PetscBool * use_edge_weights)567 PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
568 {
569 PetscFunctionBegin;
570 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
571 PetscAssertPointer(use_edge_weights, 2);
572 *use_edge_weights = part->use_edge_weights;
573 PetscFunctionReturn(PETSC_SUCCESS);
574 }
575
576 /*@
577 MatPartitioningCreate - Creates a partitioning context.
578
579 Collective
580
581 Input Parameter:
582 . comm - MPI communicator
583
584 Output Parameter:
585 . newp - location to put the context
586
587 Level: beginner
588
589 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
590 `MatPartitioningSetAdjacency()`
591 @*/
MatPartitioningCreate(MPI_Comm comm,MatPartitioning * newp)592 PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
593 {
594 MatPartitioning part;
595 PetscMPIInt size;
596
597 PetscFunctionBegin;
598 PetscAssertPointer(newp, 2);
599 PetscCall(MatInitializePackage());
600
601 PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatGraphOperations", comm, MatPartitioningDestroy, MatPartitioningView));
602 part->vertex_weights = NULL;
603 part->part_weights = NULL;
604 part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
605
606 PetscCallMPI(MPI_Comm_size(comm, &size));
607 part->n = size;
608 part->ncon = 1;
609
610 *newp = part;
611 PetscFunctionReturn(PETSC_SUCCESS);
612 }
613
614 /*@
615 MatPartitioningViewFromOptions - View a partitioning context from the options database
616
617 Collective
618
619 Input Parameters:
620 + A - the partitioning context
621 . obj - Optional object that provides the prefix used in the options database check
622 - name - command line option
623
624 Options Database Key:
625 . -mat_partitioning_view [viewertype]:... - the viewer and its options
626
627 Level: intermediate
628
629 Note:
630 .vb
631 If no value is provided ascii:stdout is used
632 ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
633 for example ascii::ascii_info prints just the information about the object not all details
634 unless :append is given filename opens in write mode, overwriting what was already there
635 binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
636 draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
637 socket[:port] defaults to the standard output port
638 saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
639 .ve
640
641 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
642 @*/
MatPartitioningViewFromOptions(MatPartitioning A,PetscObject obj,const char name[])643 PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
644 {
645 PetscFunctionBegin;
646 PetscValidHeaderSpecific(A, MAT_PARTITIONING_CLASSID, 1);
647 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
648 PetscFunctionReturn(PETSC_SUCCESS);
649 }
650
651 /*@
652 MatPartitioningView - Prints the partitioning data structure.
653
654 Collective
655
656 Input Parameters:
657 + part - the partitioning context
658 - viewer - optional visualization context
659
660 Level: intermediate
661
662 Note:
663 The available visualization contexts include
664 + `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
665 - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
666 output where only the first processor opens
667 the file. All other processors send their
668 data to the first processor to print.
669
670 The user can open alternative visualization contexts with
671 . `PetscViewerASCIIOpen()` - output to a specified file
672
673 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
674 @*/
MatPartitioningView(MatPartitioning part,PetscViewer viewer)675 PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
676 {
677 PetscBool isascii;
678
679 PetscFunctionBegin;
680 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
681 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
682 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
683 PetscCheckSameComm(part, 1, viewer, 2);
684
685 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
686 if (isascii) {
687 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
688 if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, " Using vertex weights\n"));
689 }
690 PetscCall(PetscViewerASCIIPushTab(viewer));
691 PetscTryTypeMethod(part, view, viewer);
692 PetscCall(PetscViewerASCIIPopTab(viewer));
693 PetscFunctionReturn(PETSC_SUCCESS);
694 }
695
696 /*@
697 MatPartitioningSetType - Sets the type of partitioner to use
698
699 Collective
700
701 Input Parameters:
702 + part - the partitioning context.
703 - type - a known method
704
705 Options Database Key:
706 . -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods or see `MatPartitioningType`
707
708 Level: intermediate
709
710 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
711 @*/
MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)712 PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
713 {
714 PetscBool match;
715 PetscErrorCode (*r)(MatPartitioning);
716
717 PetscFunctionBegin;
718 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
719 PetscAssertPointer(type, 2);
720
721 PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
722 if (match) PetscFunctionReturn(PETSC_SUCCESS);
723
724 PetscTryTypeMethod(part, destroy);
725 part->ops->destroy = NULL;
726
727 part->data = NULL;
728 PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));
729
730 PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
731 PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);
732
733 PetscCall((*r)(part));
734
735 PetscCall(PetscFree(((PetscObject)part)->type_name));
736 PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
737 PetscFunctionReturn(PETSC_SUCCESS);
738 }
739
740 /*@
741 MatPartitioningSetFromOptions - Sets various partitioning options from the
742 options database for the partitioning object
743
744 Collective
745
746 Input Parameter:
747 . part - the partitioning context.
748
749 Options Database Keys:
750 + -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods
751 - -mat_partitioning_nparts - number of subgraphs
752
753 Level: beginner
754
755 Note:
756 If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
757 no installed partitioners it does no repartioning.
758
759 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
760 @*/
MatPartitioningSetFromOptions(MatPartitioning part)761 PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
762 {
763 PetscBool flag;
764 char type[256];
765 const char *def;
766
767 PetscFunctionBegin;
768 PetscObjectOptionsBegin((PetscObject)part);
769 if (!((PetscObject)part)->type_name) {
770 #if defined(PETSC_HAVE_PARMETIS)
771 def = MATPARTITIONINGPARMETIS;
772 #elif defined(PETSC_HAVE_CHACO)
773 def = MATPARTITIONINGCHACO;
774 #elif defined(PETSC_HAVE_PARTY)
775 def = MATPARTITIONINGPARTY;
776 #elif defined(PETSC_HAVE_PTSCOTCH)
777 def = MATPARTITIONINGPTSCOTCH;
778 #else
779 def = MATPARTITIONINGCURRENT;
780 #endif
781 } else {
782 def = ((PetscObject)part)->type_name;
783 }
784 PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
785 if (flag) PetscCall(MatPartitioningSetType(part, type));
786
787 PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));
788
789 PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));
790
791 /*
792 Set the type if it was never set.
793 */
794 if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));
795
796 PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
797 PetscOptionsEnd();
798 PetscFunctionReturn(PETSC_SUCCESS);
799 }
800
801 /*@
802 MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex
803
804 Not Collective
805
806 Input Parameters:
807 + partitioning - the partitioning context
808 - ncon - the number of weights
809
810 Level: intermediate
811
812 .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
813 @*/
MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning,PetscInt ncon)814 PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
815 {
816 PetscFunctionBegin;
817 PetscValidHeaderSpecific(partitioning, MAT_PARTITIONING_CLASSID, 1);
818 partitioning->ncon = ncon;
819 PetscFunctionReturn(PETSC_SUCCESS);
820 }
821