1 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
2
3 EXTERN_C_BEGIN
4 #include <ptscotch.h>
5 #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
6 /* we define the prototype instead of include SCOTCH's parmetis.h */
7 void SCOTCH_ParMETIS_V3_NodeND(const SCOTCH_Num *const, SCOTCH_Num *const, SCOTCH_Num *const, const SCOTCH_Num *const, const SCOTCH_Num *const, SCOTCH_Num *const, SCOTCH_Num *const, MPI_Comm *);
8 #endif
9 EXTERN_C_END
10
11 typedef struct {
12 double imbalance;
13 SCOTCH_Num strategy;
14 } MatPartitioning_PTScotch;
15
16 /*@
17 MatPartitioningPTScotchSetImbalance - Sets the value of the load imbalance
18 ratio to be used during strategy selection.
19
20 Collective
21
22 Input Parameters:
23 + part - the partitioning context
24 - imb - the load imbalance ratio
25
26 Options Database Key:
27 . -mat_partitioning_ptscotch_imbalance <imb> - set load imbalance ratio
28
29 Note:
30 Must be in the range [0,1]. The default value is 0.01.
31
32 Level: advanced
33
34 .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetStrategy()`, `MatPartitioningPTScotchGetImbalance()`
35 @*/
MatPartitioningPTScotchSetImbalance(MatPartitioning part,PetscReal imb)36 PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning part, PetscReal imb)
37 {
38 PetscFunctionBegin;
39 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
40 PetscValidLogicalCollectiveReal(part, imb, 2);
41 PetscTryMethod(part, "MatPartitioningPTScotchSetImbalance_C", (MatPartitioning, PetscReal), (part, imb));
42 PetscFunctionReturn(PETSC_SUCCESS);
43 }
44
MatPartitioningPTScotchSetImbalance_PTScotch(MatPartitioning part,PetscReal imb)45 static PetscErrorCode MatPartitioningPTScotchSetImbalance_PTScotch(MatPartitioning part, PetscReal imb)
46 {
47 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
48
49 PetscFunctionBegin;
50 if (imb == PETSC_DEFAULT) scotch->imbalance = 0.01;
51 else {
52 PetscCheck(imb >= 0.0 && imb <= 1.0, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Illegal value of imb. Must be in range [0,1]");
53 scotch->imbalance = (double)imb;
54 }
55 PetscFunctionReturn(PETSC_SUCCESS);
56 }
57
58 /*@
59 MatPartitioningPTScotchGetImbalance - Gets the value of the load imbalance
60 ratio used during strategy selection.
61
62 Not Collective
63
64 Input Parameter:
65 . part - the partitioning context
66
67 Output Parameter:
68 . imb - the load imbalance ratio
69
70 Level: advanced
71
72 .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetImbalance()`
73 @*/
MatPartitioningPTScotchGetImbalance(MatPartitioning part,PetscReal * imb)74 PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning part, PetscReal *imb)
75 {
76 PetscFunctionBegin;
77 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
78 PetscAssertPointer(imb, 2);
79 PetscUseMethod(part, "MatPartitioningPTScotchGetImbalance_C", (MatPartitioning, PetscReal *), (part, imb));
80 PetscFunctionReturn(PETSC_SUCCESS);
81 }
82
MatPartitioningPTScotchGetImbalance_PTScotch(MatPartitioning part,PetscReal * imb)83 static PetscErrorCode MatPartitioningPTScotchGetImbalance_PTScotch(MatPartitioning part, PetscReal *imb)
84 {
85 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
86
87 PetscFunctionBegin;
88 *imb = scotch->imbalance;
89 PetscFunctionReturn(PETSC_SUCCESS);
90 }
91
92 /*@
93 MatPartitioningPTScotchSetStrategy - Sets the strategy to be used in PTScotch.
94
95 Collective
96
97 Input Parameters:
98 + part - the partitioning context
99 - strategy - the strategy, one of
100 .vb
101 MP_PTSCOTCH_DEFAULT - Default behavior
102 MP_PTSCOTCH_QUALITY - Prioritize quality over speed
103 MP_PTSCOTCH_SPEED - Prioritize speed over quality
104 MP_PTSCOTCH_BALANCE - Enforce load balance
105 MP_PTSCOTCH_SAFETY - Avoid methods that may fail
106 MP_PTSCOTCH_SCALABILITY - Favor scalability as much as possible
107 .ve
108
109 Options Database Key:
110 . -mat_partitioning_ptscotch_strategy [quality,speed,balance,safety,scalability] - strategy
111
112 Level: advanced
113
114 Note:
115 The default is `MP_SCOTCH_QUALITY`. See the PTScotch documentation for more information.
116
117 .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetImbalance()`, `MatPartitioningPTScotchGetStrategy()`
118 @*/
MatPartitioningPTScotchSetStrategy(MatPartitioning part,MPPTScotchStrategyType strategy)119 PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning part, MPPTScotchStrategyType strategy)
120 {
121 PetscFunctionBegin;
122 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
123 PetscValidLogicalCollectiveEnum(part, strategy, 2);
124 PetscTryMethod(part, "MatPartitioningPTScotchSetStrategy_C", (MatPartitioning, MPPTScotchStrategyType), (part, strategy));
125 PetscFunctionReturn(PETSC_SUCCESS);
126 }
127
MatPartitioningPTScotchSetStrategy_PTScotch(MatPartitioning part,MPPTScotchStrategyType strategy)128 static PetscErrorCode MatPartitioningPTScotchSetStrategy_PTScotch(MatPartitioning part, MPPTScotchStrategyType strategy)
129 {
130 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
131
132 PetscFunctionBegin;
133 switch (strategy) {
134 case MP_PTSCOTCH_QUALITY:
135 scotch->strategy = SCOTCH_STRATQUALITY;
136 break;
137 case MP_PTSCOTCH_SPEED:
138 scotch->strategy = SCOTCH_STRATSPEED;
139 break;
140 case MP_PTSCOTCH_BALANCE:
141 scotch->strategy = SCOTCH_STRATBALANCE;
142 break;
143 case MP_PTSCOTCH_SAFETY:
144 scotch->strategy = SCOTCH_STRATSAFETY;
145 break;
146 case MP_PTSCOTCH_SCALABILITY:
147 scotch->strategy = SCOTCH_STRATSCALABILITY;
148 break;
149 default:
150 scotch->strategy = SCOTCH_STRATDEFAULT;
151 break;
152 }
153 PetscFunctionReturn(PETSC_SUCCESS);
154 }
155
156 /*@
157 MatPartitioningPTScotchGetStrategy - Gets the strategy used in PTScotch.
158
159 Not Collective
160
161 Input Parameter:
162 . part - the partitioning context
163
164 Output Parameter:
165 . strategy - the strategy
166
167 Level: advanced
168
169 .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetStrategy()`
170 @*/
MatPartitioningPTScotchGetStrategy(MatPartitioning part,MPPTScotchStrategyType * strategy)171 PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning part, MPPTScotchStrategyType *strategy)
172 {
173 PetscFunctionBegin;
174 PetscValidHeaderSpecific(part, MAT_PARTITIONING_CLASSID, 1);
175 PetscAssertPointer(strategy, 2);
176 PetscUseMethod(part, "MatPartitioningPTScotchGetStrategy_C", (MatPartitioning, MPPTScotchStrategyType *), (part, strategy));
177 PetscFunctionReturn(PETSC_SUCCESS);
178 }
179
MatPartitioningPTScotchGetStrategy_PTScotch(MatPartitioning part,MPPTScotchStrategyType * strategy)180 static PetscErrorCode MatPartitioningPTScotchGetStrategy_PTScotch(MatPartitioning part, MPPTScotchStrategyType *strategy)
181 {
182 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
183
184 PetscFunctionBegin;
185 switch (scotch->strategy) {
186 case SCOTCH_STRATQUALITY:
187 *strategy = MP_PTSCOTCH_QUALITY;
188 break;
189 case SCOTCH_STRATSPEED:
190 *strategy = MP_PTSCOTCH_SPEED;
191 break;
192 case SCOTCH_STRATBALANCE:
193 *strategy = MP_PTSCOTCH_BALANCE;
194 break;
195 case SCOTCH_STRATSAFETY:
196 *strategy = MP_PTSCOTCH_SAFETY;
197 break;
198 case SCOTCH_STRATSCALABILITY:
199 *strategy = MP_PTSCOTCH_SCALABILITY;
200 break;
201 default:
202 *strategy = MP_PTSCOTCH_DEFAULT;
203 break;
204 }
205 PetscFunctionReturn(PETSC_SUCCESS);
206 }
207
MatPartitioningView_PTScotch(MatPartitioning part,PetscViewer viewer)208 static PetscErrorCode MatPartitioningView_PTScotch(MatPartitioning part, PetscViewer viewer)
209 {
210 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
211 PetscBool isascii;
212 const char *str = NULL;
213
214 PetscFunctionBegin;
215 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
216 if (isascii) {
217 switch (scotch->strategy) {
218 case SCOTCH_STRATQUALITY:
219 str = "Prioritize quality over speed";
220 break;
221 case SCOTCH_STRATSPEED:
222 str = "Prioritize speed over quality";
223 break;
224 case SCOTCH_STRATBALANCE:
225 str = "Enforce load balance";
226 break;
227 case SCOTCH_STRATSAFETY:
228 str = "Avoid methods that may fail";
229 break;
230 case SCOTCH_STRATSCALABILITY:
231 str = "Favor scalability as much as possible";
232 break;
233 default:
234 str = "Default behavior";
235 break;
236 }
237 PetscCall(PetscViewerASCIIPrintf(viewer, " Strategy=%s\n", str));
238 PetscCall(PetscViewerASCIIPrintf(viewer, " Load imbalance ratio=%g\n", scotch->imbalance));
239 }
240 PetscFunctionReturn(PETSC_SUCCESS);
241 }
242
MatPartitioningSetFromOptions_PTScotch(MatPartitioning part,PetscOptionItems PetscOptionsObject)243 static PetscErrorCode MatPartitioningSetFromOptions_PTScotch(MatPartitioning part, PetscOptionItems PetscOptionsObject)
244 {
245 PetscBool flag;
246 PetscReal r;
247 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
248 MPPTScotchStrategyType strat;
249
250 PetscFunctionBegin;
251 PetscCall(MatPartitioningPTScotchGetStrategy(part, &strat));
252 PetscOptionsHeadBegin(PetscOptionsObject, "PTScotch partitioning options");
253 PetscCall(PetscOptionsEnum("-mat_partitioning_ptscotch_strategy", "Strategy", "MatPartitioningPTScotchSetStrategy", MPPTScotchStrategyTypes, (PetscEnum)strat, (PetscEnum *)&strat, &flag));
254 if (flag) PetscCall(MatPartitioningPTScotchSetStrategy(part, strat));
255 PetscCall(PetscOptionsReal("-mat_partitioning_ptscotch_imbalance", "Load imbalance ratio", "MatPartitioningPTScotchSetImbalance", scotch->imbalance, &r, &flag));
256 if (flag) PetscCall(MatPartitioningPTScotchSetImbalance(part, r));
257 PetscOptionsHeadEnd();
258 PetscFunctionReturn(PETSC_SUCCESS);
259 }
260
MatPartitioningApply_PTScotch_Private(MatPartitioning part,PetscBool useND,IS * partitioning)261 static PetscErrorCode MatPartitioningApply_PTScotch_Private(MatPartitioning part, PetscBool useND, IS *partitioning)
262 {
263 MPI_Comm pcomm, comm;
264 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
265 PetscMPIInt rank;
266 Mat mat = part->adj;
267 Mat_MPIAdj *adj = (Mat_MPIAdj *)mat->data;
268 PetscBool flg, distributed;
269 PetscBool proc_weight_flg;
270 PetscInt i, j, p, bs = 1, nold;
271 PetscInt *NDorder = NULL;
272 PetscReal *vwgttab, deltval;
273 SCOTCH_Num *locals, *velotab, *veloloctab, *edloloctab, vertlocnbr, edgelocnbr, nparts = part->n;
274
275 PetscFunctionBegin;
276 PetscCall(PetscObjectGetComm((PetscObject)part, &pcomm));
277 /* Duplicate the communicator to be sure that PTSCOTCH attribute caching does not interfere with PETSc. */
278 PetscCallMPI(MPI_Comm_dup(pcomm, &comm));
279 PetscCallMPI(MPI_Comm_rank(comm, &rank));
280 PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
281 if (!flg) {
282 /* bs indicates if the converted matrix is "reduced" from the original and hence the
283 resulting partition results need to be stretched to match the original matrix */
284 nold = mat->rmap->n;
285 PetscCall(MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &mat));
286 if (mat->rmap->n > 0) bs = nold / mat->rmap->n;
287 adj = (Mat_MPIAdj *)mat->data;
288 }
289
290 proc_weight_flg = part->part_weights ? PETSC_TRUE : PETSC_FALSE;
291 PetscCall(PetscOptionsGetBool(NULL, NULL, "-mat_partitioning_ptscotch_proc_weight", &proc_weight_flg, NULL));
292
293 PetscCall(PetscMalloc1(mat->rmap->n + 1, &locals));
294
295 if (useND) {
296 #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
297 PetscInt *sizes, *seps, log2size, subd, *level, base = 0;
298 PetscMPIInt size;
299
300 PetscCallMPI(MPI_Comm_size(comm, &size));
301 log2size = PetscLog2Real(size);
302 subd = PetscPowInt(2, log2size);
303 PetscCheck(subd == size, comm, PETSC_ERR_SUP, "Only power of 2 communicator sizes");
304 PetscCall(PetscMalloc1(mat->rmap->n, &NDorder));
305 PetscCall(PetscMalloc3(2 * size, &sizes, 4 * size, &seps, size, &level));
306 SCOTCH_ParMETIS_V3_NodeND(mat->rmap->range, adj->i, adj->j, &base, NULL, NDorder, sizes, &comm);
307 PetscCall(MatPartitioningSizesToSep_Private(subd, sizes, seps, level));
308 for (i = 0; i < mat->rmap->n; i++) {
309 PetscInt loc;
310
311 PetscCall(PetscFindInt(NDorder[i], 2 * subd, seps, &loc));
312 if (loc < 0) {
313 loc = -(loc + 1);
314 if (loc % 2) { /* part of subdomain */
315 locals[i] = loc / 2;
316 } else {
317 PetscCall(PetscFindInt(NDorder[i], 2 * (subd - 1), seps + 2 * subd, &loc));
318 loc = loc < 0 ? -(loc + 1) / 2 : loc / 2;
319 locals[i] = level[loc];
320 }
321 } else locals[i] = loc / 2;
322 }
323 PetscCall(PetscFree3(sizes, seps, level));
324 #else
325 SETERRQ(pcomm, PETSC_ERR_SUP, "Need libptscotchparmetis.a compiled with -DSCOTCH_METIS_PREFIX");
326 #endif
327 } else {
328 velotab = NULL;
329 if (proc_weight_flg) {
330 PetscCall(PetscMalloc1(nparts, &vwgttab));
331 PetscCall(PetscMalloc1(nparts, &velotab));
332 for (j = 0; j < nparts; j++) {
333 if (part->part_weights) vwgttab[j] = part->part_weights[j] * nparts;
334 else vwgttab[j] = 1.0;
335 }
336 for (i = 0; i < nparts; i++) {
337 deltval = PetscAbsReal(vwgttab[i] - PetscFloorReal(vwgttab[i] + 0.5));
338 if (deltval > 0.01) {
339 for (j = 0; j < nparts; j++) vwgttab[j] /= deltval;
340 }
341 }
342 for (i = 0; i < nparts; i++) velotab[i] = (SCOTCH_Num)(vwgttab[i] + 0.5);
343 PetscCall(PetscFree(vwgttab));
344 }
345
346 vertlocnbr = mat->rmap->range[rank + 1] - mat->rmap->range[rank];
347 edgelocnbr = adj->i[vertlocnbr];
348 veloloctab = part->vertex_weights;
349 edloloctab = part->use_edge_weights ? adj->values : NULL;
350
351 /* detect whether all vertices are located at the same process in original graph */
352 for (p = 0; !mat->rmap->range[p + 1] && p < nparts; ++p);
353 distributed = (mat->rmap->range[p + 1] == mat->rmap->N) ? PETSC_FALSE : PETSC_TRUE;
354 if (distributed) {
355 SCOTCH_Arch archdat;
356 SCOTCH_Dgraph grafdat;
357 SCOTCH_Dmapping mappdat;
358 SCOTCH_Strat stradat;
359
360 PetscCallExternal(SCOTCH_dgraphInit, &grafdat, comm);
361 PetscCallExternal(SCOTCH_dgraphBuild, &grafdat, 0, vertlocnbr, vertlocnbr, adj->i, adj->i + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adj->j, NULL, edloloctab);
362
363 if (PetscDefined(USE_DEBUG)) PetscCallExternal(SCOTCH_dgraphCheck, &grafdat);
364
365 PetscCallExternal(SCOTCH_archInit, &archdat);
366 PetscCallExternal(SCOTCH_stratInit, &stradat);
367 PetscCallExternal(SCOTCH_stratDgraphMapBuild, &stradat, scotch->strategy, nparts, nparts, scotch->imbalance);
368
369 if (velotab) {
370 PetscCallExternal(SCOTCH_archCmpltw, &archdat, nparts, velotab);
371 } else {
372 PetscCallExternal(SCOTCH_archCmplt, &archdat, nparts);
373 }
374 PetscCallExternal(SCOTCH_dgraphMapInit, &grafdat, &mappdat, &archdat, locals);
375 PetscCallExternal(SCOTCH_dgraphMapCompute, &grafdat, &mappdat, &stradat);
376
377 SCOTCH_dgraphMapExit(&grafdat, &mappdat);
378 SCOTCH_archExit(&archdat);
379 SCOTCH_stratExit(&stradat);
380 SCOTCH_dgraphExit(&grafdat);
381
382 } else if (rank == p) {
383 SCOTCH_Graph grafdat;
384 SCOTCH_Strat stradat;
385
386 PetscCallExternal(SCOTCH_graphInit, &grafdat);
387 PetscCallExternal(SCOTCH_graphBuild, &grafdat, 0, vertlocnbr, adj->i, adj->i + 1, veloloctab, NULL, edgelocnbr, adj->j, edloloctab);
388 if (PetscDefined(USE_DEBUG)) PetscCallExternal(SCOTCH_graphCheck, &grafdat);
389 PetscCallExternal(SCOTCH_stratInit, &stradat);
390 PetscCallExternal(SCOTCH_stratGraphMapBuild, &stradat, scotch->strategy, nparts, scotch->imbalance);
391 if (velotab) {
392 SCOTCH_Arch archdat;
393 PetscCallExternal(SCOTCH_archInit, &archdat);
394 PetscCallExternal(SCOTCH_archCmpltw, &archdat, nparts, velotab);
395 PetscCallExternal(SCOTCH_graphMap, &grafdat, &archdat, &stradat, locals);
396 SCOTCH_archExit(&archdat);
397 } else {
398 PetscCallExternal(SCOTCH_graphPart, &grafdat, nparts, &stradat, locals);
399 }
400 SCOTCH_stratExit(&stradat);
401 SCOTCH_graphExit(&grafdat);
402 }
403
404 PetscCall(PetscFree(velotab));
405 }
406 PetscCallMPI(MPI_Comm_free(&comm));
407
408 if (bs > 1) {
409 PetscInt *newlocals;
410 PetscCall(PetscMalloc1(bs * mat->rmap->n, &newlocals));
411 for (i = 0; i < mat->rmap->n; i++) {
412 for (j = 0; j < bs; j++) newlocals[bs * i + j] = locals[i];
413 }
414 PetscCall(PetscFree(locals));
415 PetscCall(ISCreateGeneral(pcomm, bs * mat->rmap->n, newlocals, PETSC_OWN_POINTER, partitioning));
416 } else {
417 PetscCall(ISCreateGeneral(pcomm, mat->rmap->n, locals, PETSC_OWN_POINTER, partitioning));
418 }
419 if (useND) {
420 IS ndis;
421
422 if (bs > 1) {
423 PetscCall(ISCreateBlock(pcomm, bs, mat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
424 } else {
425 PetscCall(ISCreateGeneral(pcomm, mat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
426 }
427 PetscCall(ISSetPermutation(ndis));
428 PetscCall(PetscObjectCompose((PetscObject)*partitioning, "_petsc_matpartitioning_ndorder", (PetscObject)ndis));
429 PetscCall(ISDestroy(&ndis));
430 }
431
432 if (!flg) PetscCall(MatDestroy(&mat));
433 PetscFunctionReturn(PETSC_SUCCESS);
434 }
435
MatPartitioningApply_PTScotch(MatPartitioning part,IS * partitioning)436 static PetscErrorCode MatPartitioningApply_PTScotch(MatPartitioning part, IS *partitioning)
437 {
438 PetscFunctionBegin;
439 PetscCall(MatPartitioningApply_PTScotch_Private(part, PETSC_FALSE, partitioning));
440 PetscFunctionReturn(PETSC_SUCCESS);
441 }
442
MatPartitioningApplyND_PTScotch(MatPartitioning part,IS * partitioning)443 static PetscErrorCode MatPartitioningApplyND_PTScotch(MatPartitioning part, IS *partitioning)
444 {
445 PetscFunctionBegin;
446 PetscCall(MatPartitioningApply_PTScotch_Private(part, PETSC_TRUE, partitioning));
447 PetscFunctionReturn(PETSC_SUCCESS);
448 }
449
MatPartitioningDestroy_PTScotch(MatPartitioning part)450 static PetscErrorCode MatPartitioningDestroy_PTScotch(MatPartitioning part)
451 {
452 MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
453
454 PetscFunctionBegin;
455 PetscCall(PetscFree(scotch));
456 /* clear composed functions */
457 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetImbalance_C", NULL));
458 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetImbalance_C", NULL));
459 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetStrategy_C", NULL));
460 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetStrategy_C", NULL));
461 PetscFunctionReturn(PETSC_SUCCESS);
462 }
463
464 /*MC
465 MATPARTITIONINGPTSCOTCH - Creates a partitioning context that uses the external package SCOTCH <http://www.labri.fr/perso/pelegrin/scotch/>.
466
467 Level: beginner
468
469 .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MatPartitioningPTScotchSetImbalance()`, `MatPartitioningPTScotchGetImbalance()`,
470 `MatPartitioningPTScotchSetStrategy()`, `MatPartitioningPTScotchGetStrategy()`, `MatPartitioning`
471 M*/
472
MatPartitioningCreate_PTScotch(MatPartitioning part)473 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_PTScotch(MatPartitioning part)
474 {
475 MatPartitioning_PTScotch *scotch;
476
477 PetscFunctionBegin;
478 PetscCall(PetscNew(&scotch));
479 part->data = (void *)scotch;
480
481 scotch->imbalance = 0.01;
482 scotch->strategy = SCOTCH_STRATDEFAULT;
483
484 part->ops->apply = MatPartitioningApply_PTScotch;
485 part->ops->applynd = MatPartitioningApplyND_PTScotch;
486 part->ops->view = MatPartitioningView_PTScotch;
487 part->ops->setfromoptions = MatPartitioningSetFromOptions_PTScotch;
488 part->ops->destroy = MatPartitioningDestroy_PTScotch;
489
490 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetImbalance_C", MatPartitioningPTScotchSetImbalance_PTScotch));
491 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetImbalance_C", MatPartitioningPTScotchGetImbalance_PTScotch));
492 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetStrategy_C", MatPartitioningPTScotchSetStrategy_PTScotch));
493 PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetStrategy_C", MatPartitioningPTScotchGetStrategy_PTScotch));
494 PetscFunctionReturn(PETSC_SUCCESS);
495 }
496