1 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/
2 #include <petscviewer.h>
3 #include <petscsf.h>
4
5 const char *const ISColoringTypes[] = {"global", "ghosted", "ISColoringType", "IS_COLORING_", NULL};
6
ISColoringReference(ISColoring coloring)7 PetscErrorCode ISColoringReference(ISColoring coloring)
8 {
9 PetscFunctionBegin;
10 coloring->refct++;
11 PetscFunctionReturn(PETSC_SUCCESS);
12 }
13
14 /*@
15 ISColoringSetType - indicates if the coloring is for the local representation (including ghost points) or the global representation of a `Mat`
16
17 Collective
18
19 Input Parameters:
20 + coloring - the coloring object
21 - type - either `IS_COLORING_LOCAL` or `IS_COLORING_GLOBAL`
22
23 Level: intermediate
24
25 Notes:
26 `IS_COLORING_LOCAL` can lead to faster computations since parallel ghost point updates are not needed for each color
27
28 With `IS_COLORING_LOCAL` the coloring is in the numbering of the local vector, for `IS_COLORING_GLOBAL` it is in the numbering of the global vector
29
30 .seealso: `MatFDColoringCreate()`, `ISColoring`, `ISColoringType`, `ISColoringCreate()`, `IS_COLORING_LOCAL`, `IS_COLORING_GLOBAL`, `ISColoringGetType()`
31 @*/
ISColoringSetType(ISColoring coloring,ISColoringType type)32 PetscErrorCode ISColoringSetType(ISColoring coloring, ISColoringType type)
33 {
34 PetscFunctionBegin;
35 coloring->ctype = type;
36 PetscFunctionReturn(PETSC_SUCCESS);
37 }
38
39 /*@
40 ISColoringGetType - gets if the coloring is for the local representation (including ghost points) or the global representation
41
42 Collective
43
44 Input Parameter:
45 . coloring - the coloring object
46
47 Output Parameter:
48 . type - either `IS_COLORING_LOCAL` or `IS_COLORING_GLOBAL`
49
50 Level: intermediate
51
52 .seealso: `MatFDColoringCreate()`, `ISColoring`, `ISColoringType`, `ISColoringCreate()`, `IS_COLORING_LOCAL`, `IS_COLORING_GLOBAL`, `ISColoringSetType()`
53 @*/
ISColoringGetType(ISColoring coloring,ISColoringType * type)54 PetscErrorCode ISColoringGetType(ISColoring coloring, ISColoringType *type)
55 {
56 PetscFunctionBegin;
57 *type = coloring->ctype;
58 PetscFunctionReturn(PETSC_SUCCESS);
59 }
60
61 /*@
62 ISColoringDestroy - Destroys an `ISColoring` coloring context.
63
64 Collective
65
66 Input Parameter:
67 . iscoloring - the coloring context
68
69 Level: advanced
70
71 .seealso: `ISColoring`, `ISColoringView()`, `MatColoring`
72 @*/
ISColoringDestroy(ISColoring * iscoloring)73 PetscErrorCode ISColoringDestroy(ISColoring *iscoloring)
74 {
75 PetscInt i;
76
77 PetscFunctionBegin;
78 if (!*iscoloring) PetscFunctionReturn(PETSC_SUCCESS);
79 PetscAssertPointer(*iscoloring, 1);
80 if (--(*iscoloring)->refct > 0) {
81 *iscoloring = NULL;
82 PetscFunctionReturn(PETSC_SUCCESS);
83 }
84
85 if ((*iscoloring)->is) {
86 for (i = 0; i < (*iscoloring)->n; i++) PetscCall(ISDestroy(&(*iscoloring)->is[i]));
87 PetscCall(PetscFree((*iscoloring)->is));
88 }
89 if ((*iscoloring)->allocated) PetscCall(PetscFree((*iscoloring)->colors));
90 PetscCall(PetscCommDestroy(&(*iscoloring)->comm));
91 PetscCall(PetscFree(*iscoloring));
92 PetscFunctionReturn(PETSC_SUCCESS);
93 }
94
95 /*@
96 ISColoringViewFromOptions - Processes command line options to determine if/how an `ISColoring` object is to be viewed.
97
98 Collective
99
100 Input Parameters:
101 + obj - the `ISColoring` object
102 . bobj - prefix to use for viewing, or `NULL` to use prefix of `mat`
103 - optionname - option to activate viewing
104
105 Level: intermediate
106
107 Developer Notes:
108 This cannot use `PetscObjectViewFromOptions()` because `ISColoring` is not a `PetscObject`
109
110 .seealso: `ISColoring`, `ISColoringView()`
111 @*/
ISColoringViewFromOptions(ISColoring obj,PetscObject bobj,const char optionname[])112 PetscErrorCode ISColoringViewFromOptions(ISColoring obj, PetscObject bobj, const char optionname[])
113 {
114 PetscViewer viewer;
115 PetscBool flg;
116 PetscViewerFormat format;
117 char *prefix;
118
119 PetscFunctionBegin;
120 prefix = bobj ? bobj->prefix : NULL;
121 PetscCall(PetscOptionsCreateViewer(obj->comm, NULL, prefix, optionname, &viewer, &format, &flg));
122 if (flg) {
123 PetscCall(PetscViewerPushFormat(viewer, format));
124 PetscCall(ISColoringView(obj, viewer));
125 PetscCall(PetscViewerPopFormat(viewer));
126 PetscCall(PetscViewerDestroy(&viewer));
127 }
128 PetscFunctionReturn(PETSC_SUCCESS);
129 }
130
131 /*@
132 ISColoringView - Views an `ISColoring` coloring context.
133
134 Collective
135
136 Input Parameters:
137 + iscoloring - the coloring context
138 - viewer - the viewer
139
140 Level: advanced
141
142 .seealso: `ISColoring()`, `ISColoringViewFromOptions()`, `ISColoringDestroy()`, `ISColoringGetIS()`, `MatColoring`
143 @*/
ISColoringView(ISColoring iscoloring,PetscViewer viewer)144 PetscErrorCode ISColoringView(ISColoring iscoloring, PetscViewer viewer)
145 {
146 PetscInt i;
147 PetscBool isascii;
148 IS *is;
149
150 PetscFunctionBegin;
151 PetscAssertPointer(iscoloring, 1);
152 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(iscoloring->comm, &viewer));
153 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
154
155 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
156 if (isascii) {
157 MPI_Comm comm;
158 PetscMPIInt size, rank;
159
160 PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
161 PetscCallMPI(MPI_Comm_size(comm, &size));
162 PetscCallMPI(MPI_Comm_rank(comm, &rank));
163 PetscCall(PetscViewerASCIIPrintf(viewer, "ISColoring Object: %d MPI processes\n", size));
164 PetscCall(PetscViewerASCIIPrintf(viewer, "ISColoringType: %s\n", ISColoringTypes[iscoloring->ctype]));
165 PetscCall(PetscViewerASCIIPushSynchronized(viewer));
166 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Number of colors %" PetscInt_FMT "\n", rank, iscoloring->n));
167 PetscCall(PetscViewerFlush(viewer));
168 PetscCall(PetscViewerASCIIPopSynchronized(viewer));
169 }
170
171 PetscCall(ISColoringGetIS(iscoloring, PETSC_USE_POINTER, PETSC_IGNORE, &is));
172 for (i = 0; i < iscoloring->n; i++) PetscCall(ISView(iscoloring->is[i], viewer));
173 PetscCall(ISColoringRestoreIS(iscoloring, PETSC_USE_POINTER, &is));
174 PetscFunctionReturn(PETSC_SUCCESS);
175 }
176
177 /*@C
178 ISColoringGetColors - Returns an array with the color for each local node
179
180 Not Collective
181
182 Input Parameter:
183 . iscoloring - the coloring context
184
185 Output Parameters:
186 + n - number of nodes
187 . nc - number of colors
188 - colors - color for each node
189
190 Level: advanced
191
192 Notes:
193 Do not free the `colors` array.
194
195 The `colors` array will only be valid for the lifetime of the `ISColoring`
196
197 .seealso: `ISColoring`, `ISColoringValue`, `ISColoringRestoreIS()`, `ISColoringView()`, `ISColoringGetIS()`
198 @*/
ISColoringGetColors(ISColoring iscoloring,PetscInt * n,PetscInt * nc,const ISColoringValue ** colors)199 PetscErrorCode ISColoringGetColors(ISColoring iscoloring, PetscInt *n, PetscInt *nc, const ISColoringValue **colors)
200 {
201 PetscFunctionBegin;
202 PetscAssertPointer(iscoloring, 1);
203
204 if (n) *n = iscoloring->N;
205 if (nc) *nc = iscoloring->n;
206 if (colors) *colors = iscoloring->colors;
207 PetscFunctionReturn(PETSC_SUCCESS);
208 }
209
210 /*@C
211 ISColoringGetIS - Extracts index sets from the coloring context. Each is contains the nodes of one color
212
213 Collective
214
215 Input Parameters:
216 + iscoloring - the coloring context
217 - mode - if this value is `PETSC_OWN_POINTER` then the caller owns the pointer and must free the array of `IS` and each `IS` in the array
218
219 Output Parameters:
220 + nn - number of index sets in the coloring context
221 - isis - array of index sets
222
223 Level: advanced
224
225 Note:
226 If mode is `PETSC_USE_POINTER` then `ISColoringRestoreIS()` must be called when the `IS` are no longer needed
227
228 .seealso: `ISColoring`, `IS`, `ISColoringRestoreIS()`, `ISColoringView()`, `ISColoringGetColoring()`, `ISColoringGetColors()`
229 @*/
ISColoringGetIS(ISColoring iscoloring,PetscCopyMode mode,PetscInt * nn,IS * isis[])230 PetscErrorCode ISColoringGetIS(ISColoring iscoloring, PetscCopyMode mode, PetscInt *nn, IS *isis[])
231 {
232 PetscFunctionBegin;
233 PetscAssertPointer(iscoloring, 1);
234
235 if (nn) *nn = iscoloring->n;
236 if (isis) {
237 if (!iscoloring->is) {
238 PetscInt *mcolors, **ii, nc = iscoloring->n, i, base, n = iscoloring->N;
239 ISColoringValue *colors = iscoloring->colors;
240 IS *is;
241
242 if (PetscDefined(USE_DEBUG)) {
243 for (i = 0; i < n; i++) PetscCheck(((PetscInt)colors[i]) < nc, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Coloring is our of range index %" PetscInt_FMT "value %d number colors %" PetscInt_FMT, i, (int)colors[i], nc);
244 }
245
246 /* generate the lists of nodes for each color */
247 PetscCall(PetscCalloc1(nc, &mcolors));
248 for (i = 0; i < n; i++) mcolors[colors[i]]++;
249
250 PetscCall(PetscMalloc1(nc, &ii));
251 PetscCall(PetscMalloc1(n, &ii[0]));
252 for (i = 1; i < nc; i++) ii[i] = ii[i - 1] + mcolors[i - 1];
253 PetscCall(PetscArrayzero(mcolors, nc));
254
255 if (iscoloring->ctype == IS_COLORING_GLOBAL) {
256 PetscCallMPI(MPI_Scan(&iscoloring->N, &base, 1, MPIU_INT, MPI_SUM, iscoloring->comm));
257 base -= iscoloring->N;
258 for (i = 0; i < n; i++) ii[colors[i]][mcolors[colors[i]]++] = i + base; /* global idx */
259 } else if (iscoloring->ctype == IS_COLORING_LOCAL) {
260 for (i = 0; i < n; i++) ii[colors[i]][mcolors[colors[i]]++] = i; /* local idx */
261 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Not provided for this ISColoringType type");
262
263 PetscCall(PetscMalloc1(nc, &is));
264 for (i = 0; i < nc; i++) PetscCall(ISCreateGeneral(iscoloring->comm, mcolors[i], ii[i], PETSC_COPY_VALUES, is + i));
265
266 if (mode != PETSC_OWN_POINTER) iscoloring->is = is;
267 *isis = is;
268 PetscCall(PetscFree(ii[0]));
269 PetscCall(PetscFree(ii));
270 PetscCall(PetscFree(mcolors));
271 } else {
272 *isis = iscoloring->is;
273 if (mode == PETSC_OWN_POINTER) iscoloring->is = NULL;
274 }
275 }
276 PetscFunctionReturn(PETSC_SUCCESS);
277 }
278
279 /*@C
280 ISColoringRestoreIS - Restores the index sets extracted from the coloring context with `ISColoringGetIS()` using `PETSC_USE_POINTER`
281
282 Collective
283
284 Input Parameters:
285 + iscoloring - the coloring context
286 . mode - who retains ownership of the is
287 - is - array of index sets
288
289 Level: advanced
290
291 .seealso: `ISColoring()`, `IS`, `ISColoringGetIS()`, `ISColoringView()`, `PetscCopyMode`
292 @*/
ISColoringRestoreIS(ISColoring iscoloring,PetscCopyMode mode,IS * is[])293 PetscErrorCode ISColoringRestoreIS(ISColoring iscoloring, PetscCopyMode mode, IS *is[])
294 {
295 PetscFunctionBegin;
296 PetscAssertPointer(iscoloring, 1);
297
298 /* currently nothing is done here */
299 PetscFunctionReturn(PETSC_SUCCESS);
300 }
301
302 /*@
303 ISColoringCreate - Generates an `ISColoring` context from lists (provided by each MPI process) of colors for each node.
304
305 Collective
306
307 Input Parameters:
308 + comm - communicator for the processors creating the coloring
309 . ncolors - max color value
310 . n - number of nodes on this processor
311 . colors - array containing the colors for this MPI rank, color numbers begin at 0, for each local node
312 - mode - see `PetscCopyMode` for meaning of this flag.
313
314 Output Parameter:
315 . iscoloring - the resulting coloring data structure
316
317 Options Database Key:
318 . -is_coloring_view - Activates `ISColoringView()`
319
320 Level: advanced
321
322 Notes:
323 By default sets coloring type to `IS_COLORING_GLOBAL`
324
325 .seealso: `ISColoring`, `ISColoringValue`, `MatColoringCreate()`, `ISColoringView()`, `ISColoringDestroy()`, `ISColoringSetType()`
326 @*/
ISColoringCreate(MPI_Comm comm,PetscInt ncolors,PetscInt n,const ISColoringValue colors[],PetscCopyMode mode,ISColoring * iscoloring)327 PetscErrorCode ISColoringCreate(MPI_Comm comm, PetscInt ncolors, PetscInt n, const ISColoringValue colors[], PetscCopyMode mode, ISColoring *iscoloring)
328 {
329 PetscMPIInt size, rank, tag;
330 PetscInt base, top, i;
331 PetscInt nc, ncwork;
332 MPI_Status status;
333
334 PetscFunctionBegin;
335 if (ncolors != PETSC_DECIDE && ncolors > IS_COLORING_MAX) {
336 PetscCheck(ncolors <= PETSC_UINT16_MAX, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Max color value exceeds %d limit. This number is unrealistic. Perhaps a bug in code? Current max: %d user requested: %" PetscInt_FMT, PETSC_UINT16_MAX, PETSC_IS_COLORING_MAX, ncolors);
337 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Max color value exceeds limit. Perhaps reconfigure PETSc with --with-is-color-value-type=short? Current max: %d user requested: %" PetscInt_FMT, PETSC_IS_COLORING_MAX, ncolors);
338 }
339 PetscCall(PetscNew(iscoloring));
340 PetscCall(PetscCommDuplicate(comm, &(*iscoloring)->comm, &tag));
341 comm = (*iscoloring)->comm;
342
343 /* compute the number of the first node on my processor */
344 PetscCallMPI(MPI_Comm_size(comm, &size));
345
346 /* should use MPI_Scan() */
347 PetscCallMPI(MPI_Comm_rank(comm, &rank));
348 if (rank == 0) {
349 base = 0;
350 top = n;
351 } else {
352 PetscCallMPI(MPI_Recv(&base, 1, MPIU_INT, rank - 1, tag, comm, &status));
353 top = base + n;
354 }
355 if (rank < size - 1) PetscCallMPI(MPI_Send(&top, 1, MPIU_INT, rank + 1, tag, comm));
356
357 /* compute the total number of colors */
358 ncwork = 0;
359 for (i = 0; i < n; i++) {
360 if (ncwork < colors[i]) ncwork = colors[i];
361 }
362 ncwork++;
363 PetscCallMPI(MPIU_Allreduce(&ncwork, &nc, 1, MPIU_INT, MPI_MAX, comm));
364 PetscCheck(nc <= ncolors, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of colors passed in %" PetscInt_FMT " is less than the actual number of colors in array %" PetscInt_FMT, ncolors, nc);
365 (*iscoloring)->n = nc;
366 (*iscoloring)->is = NULL;
367 (*iscoloring)->N = n;
368 (*iscoloring)->refct = 1;
369 (*iscoloring)->ctype = IS_COLORING_GLOBAL;
370 if (mode == PETSC_COPY_VALUES) {
371 PetscCall(PetscMalloc1(n, &(*iscoloring)->colors));
372 PetscCall(PetscArraycpy((*iscoloring)->colors, colors, n));
373 (*iscoloring)->allocated = PETSC_TRUE;
374 } else if (mode == PETSC_OWN_POINTER) {
375 (*iscoloring)->colors = (ISColoringValue *)colors;
376 (*iscoloring)->allocated = PETSC_TRUE;
377 } else {
378 (*iscoloring)->colors = (ISColoringValue *)colors;
379 (*iscoloring)->allocated = PETSC_FALSE;
380 }
381 PetscCall(ISColoringViewFromOptions(*iscoloring, NULL, "-is_coloring_view"));
382 PetscCall(PetscInfo(0, "Number of colors %" PetscInt_FMT "\n", nc));
383 PetscFunctionReturn(PETSC_SUCCESS);
384 }
385
386 /*@
387 ISBuildTwoSided - Takes an `IS` that describes where each element will be mapped globally over all ranks.
388 Generates an `IS` that contains new numbers from remote or local on the `IS`.
389
390 Collective
391
392 Input Parameters:
393 + ito - an `IS` describes to which rank each entry will be mapped. Negative target rank will be ignored
394 - toindx - an `IS` describes what indices should send. `NULL` means sending natural numbering
395
396 Output Parameter:
397 . rows - contains new numbers from remote or local
398
399 Level: advanced
400
401 Developer Notes:
402 This manual page is incomprehensible and still needs to be fixed
403
404 .seealso: [](sec_scatter), `IS`, `MatPartitioningCreate()`, `ISPartitioningToNumbering()`, `ISPartitioningCount()`
405 @*/
ISBuildTwoSided(IS ito,IS toindx,IS * rows)406 PetscErrorCode ISBuildTwoSided(IS ito, IS toindx, IS *rows)
407 {
408 const PetscInt *ito_indices, *toindx_indices;
409 PetscInt *send_indices, rstart, *recv_indices, nrecvs, nsends;
410 PetscInt *tosizes, *fromsizes, j, *tosizes_tmp, *tooffsets_tmp, ito_ln;
411 PetscMPIInt *toranks, *fromranks, size, target_rank, *fromperm_newtoold, nto, nfrom;
412 PetscLayout isrmap;
413 MPI_Comm comm;
414 PetscSF sf;
415 PetscSFNode *iremote;
416
417 PetscFunctionBegin;
418 PetscCall(PetscObjectGetComm((PetscObject)ito, &comm));
419 PetscCallMPI(MPI_Comm_size(comm, &size));
420 PetscCall(ISGetLocalSize(ito, &ito_ln));
421 PetscCall(ISGetLayout(ito, &isrmap));
422 PetscCall(PetscLayoutGetRange(isrmap, &rstart, NULL));
423 PetscCall(ISGetIndices(ito, &ito_indices));
424 PetscCall(PetscCalloc2(size, &tosizes_tmp, size + 1, &tooffsets_tmp));
425 for (PetscInt i = 0; i < ito_ln; i++) {
426 if (ito_indices[i] < 0) continue;
427 else PetscCheck(ito_indices[i] < size, comm, PETSC_ERR_ARG_OUTOFRANGE, "target rank %" PetscInt_FMT " is larger than communicator size %d ", ito_indices[i], size);
428 tosizes_tmp[ito_indices[i]]++;
429 }
430 nto = 0;
431 for (PetscMPIInt i = 0; i < size; i++) {
432 tooffsets_tmp[i + 1] = tooffsets_tmp[i] + tosizes_tmp[i];
433 if (tosizes_tmp[i] > 0) nto++;
434 }
435 PetscCall(PetscCalloc2(nto, &toranks, 2 * nto, &tosizes));
436 nto = 0;
437 for (PetscMPIInt i = 0; i < size; i++) {
438 if (tosizes_tmp[i] > 0) {
439 toranks[nto] = i;
440 tosizes[2 * nto] = tosizes_tmp[i]; /* size */
441 tosizes[2 * nto + 1] = tooffsets_tmp[i]; /* offset */
442 nto++;
443 }
444 }
445 nsends = tooffsets_tmp[size];
446 PetscCall(PetscCalloc1(nsends, &send_indices));
447 if (toindx) PetscCall(ISGetIndices(toindx, &toindx_indices));
448 for (PetscInt i = 0; i < ito_ln; i++) {
449 if (ito_indices[i] < 0) continue;
450 PetscCall(PetscMPIIntCast(ito_indices[i], &target_rank));
451 send_indices[tooffsets_tmp[target_rank]] = toindx ? toindx_indices[i] : (i + rstart);
452 tooffsets_tmp[target_rank]++;
453 }
454 if (toindx) PetscCall(ISRestoreIndices(toindx, &toindx_indices));
455 PetscCall(ISRestoreIndices(ito, &ito_indices));
456 PetscCall(PetscFree2(tosizes_tmp, tooffsets_tmp));
457 PetscCall(PetscCommBuildTwoSided(comm, 2, MPIU_INT, nto, toranks, tosizes, &nfrom, &fromranks, &fromsizes));
458 PetscCall(PetscFree2(toranks, tosizes));
459 PetscCall(PetscMalloc1(nfrom, &fromperm_newtoold));
460 for (PetscMPIInt i = 0; i < nfrom; i++) fromperm_newtoold[i] = i;
461 PetscCall(PetscSortMPIIntWithArray(nfrom, fromranks, fromperm_newtoold));
462 nrecvs = 0;
463 for (PetscMPIInt i = 0; i < nfrom; i++) nrecvs += fromsizes[i * 2];
464 PetscCall(PetscCalloc1(nrecvs, &recv_indices));
465 PetscCall(PetscMalloc1(nrecvs, &iremote));
466 nrecvs = 0;
467 for (PetscMPIInt i = 0; i < nfrom; i++) {
468 for (j = 0; j < fromsizes[2 * fromperm_newtoold[i]]; j++) {
469 iremote[nrecvs].rank = fromranks[i];
470 iremote[nrecvs++].index = fromsizes[2 * fromperm_newtoold[i] + 1] + j;
471 }
472 }
473 PetscCall(PetscSFCreate(comm, &sf));
474 PetscCall(PetscSFSetGraph(sf, nsends, nrecvs, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
475 PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
476 /* how to put a prefix ? */
477 PetscCall(PetscSFSetFromOptions(sf));
478 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, send_indices, recv_indices, MPI_REPLACE));
479 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, send_indices, recv_indices, MPI_REPLACE));
480 PetscCall(PetscSFDestroy(&sf));
481 PetscCall(PetscFree(fromranks));
482 PetscCall(PetscFree(fromsizes));
483 PetscCall(PetscFree(fromperm_newtoold));
484 PetscCall(PetscFree(send_indices));
485 if (rows) {
486 PetscCall(PetscSortInt(nrecvs, recv_indices));
487 PetscCall(ISCreateGeneral(comm, nrecvs, recv_indices, PETSC_OWN_POINTER, rows));
488 } else {
489 PetscCall(PetscFree(recv_indices));
490 }
491 PetscFunctionReturn(PETSC_SUCCESS);
492 }
493
494 /*@
495 ISPartitioningToNumbering - Takes an `IS' that represents a partitioning (the MPI rank that each local entry belongs to) and on each MPI process
496 generates an `IS` that contains a new global node number in the new ordering for each entry
497
498 Collective
499
500 Input Parameter:
501 . part - a partitioning as generated by `MatPartitioningApply()` or `MatPartitioningApplyND()`
502
503 Output Parameter:
504 . is - on each processor the index set that defines the global numbers
505 (in the new numbering) for all the nodes currently (before the partitioning)
506 on that processor
507
508 Level: advanced
509
510 Note:
511 The resulting `IS` tells where each local entry is mapped to in a new global ordering
512
513 .seealso: [](sec_scatter), `IS`, `MatPartitioningCreate()`, `AOCreateBasic()`, `ISPartitioningCount()`
514 @*/
ISPartitioningToNumbering(IS part,IS * is)515 PetscErrorCode ISPartitioningToNumbering(IS part, IS *is)
516 {
517 MPI_Comm comm;
518 IS ndorder;
519 PetscInt n, *starts = NULL, *sums = NULL, *lsizes = NULL, *newi = NULL;
520 const PetscInt *indices = NULL;
521 PetscMPIInt np, npt;
522
523 PetscFunctionBegin;
524 PetscValidHeaderSpecific(part, IS_CLASSID, 1);
525 PetscAssertPointer(is, 2);
526 /* see if the partitioning comes from nested dissection */
527 PetscCall(PetscObjectQuery((PetscObject)part, "_petsc_matpartitioning_ndorder", (PetscObject *)&ndorder));
528 if (ndorder) {
529 PetscCall(PetscObjectReference((PetscObject)ndorder));
530 *is = ndorder;
531 PetscFunctionReturn(PETSC_SUCCESS);
532 }
533
534 PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
535 /* count the number of partitions, i.e., virtual processors */
536 PetscCall(ISGetLocalSize(part, &n));
537 PetscCall(ISGetIndices(part, &indices));
538 np = 0;
539 for (PetscInt i = 0; i < n; i++) PetscCall(PetscMPIIntCast(PetscMax(np, indices[i]), &np));
540 PetscCallMPI(MPIU_Allreduce(&np, &npt, 1, MPI_INT, MPI_MAX, comm));
541 np = npt + 1; /* so that it looks like a MPI_Comm_size output */
542
543 /*
544 lsizes - number of elements of each partition on this particular processor
545 sums - total number of "previous" nodes for any particular partition
546 starts - global number of first element in each partition on this processor
547 */
548 PetscCall(PetscMalloc3(np, &lsizes, np, &starts, np, &sums));
549 PetscCall(PetscArrayzero(lsizes, np));
550 for (PetscInt i = 0; i < n; i++) lsizes[indices[i]]++;
551 PetscCallMPI(MPIU_Allreduce(lsizes, sums, np, MPIU_INT, MPI_SUM, comm));
552 PetscCallMPI(MPI_Scan(lsizes, starts, np, MPIU_INT, MPI_SUM, comm));
553 for (PetscMPIInt i = 0; i < np; i++) starts[i] -= lsizes[i];
554 for (PetscMPIInt i = 1; i < np; i++) {
555 sums[i] += sums[i - 1];
556 starts[i] += sums[i - 1];
557 }
558
559 /*
560 For each local index give it the new global number
561 */
562 PetscCall(PetscMalloc1(n, &newi));
563 for (PetscInt i = 0; i < n; i++) newi[i] = starts[indices[i]]++;
564 PetscCall(PetscFree3(lsizes, starts, sums));
565
566 PetscCall(ISRestoreIndices(part, &indices));
567 PetscCall(ISCreateGeneral(comm, n, newi, PETSC_OWN_POINTER, is));
568 PetscCall(ISSetPermutation(*is));
569 PetscFunctionReturn(PETSC_SUCCESS);
570 }
571
572 /*@
573 ISPartitioningCount - Takes a `IS` that represents a partitioning (the MPI rank that each local entry belongs to) and determines the number of
574 resulting elements on each (partition) rank
575
576 Collective
577
578 Input Parameters:
579 + part - a partitioning as generated by `MatPartitioningApply()` or `MatPartitioningApplyND()`
580 - len - length of the array count, this is the total number of partitions
581
582 Output Parameter:
583 . count - array of length size, to contain the number of elements assigned
584 to each partition, where size is the number of partitions generated
585 (see notes below).
586
587 Level: advanced
588
589 Notes:
590 By default the number of partitions generated (and thus the length
591 of count) is the size of the communicator associated with `IS`,
592 but it can be set by `MatPartitioningSetNParts()`.
593
594 The resulting array of lengths can for instance serve as input of `PCBJacobiSetTotalBlocks()`.
595
596 If the partitioning has been obtained by `MatPartitioningApplyND()`, the returned count does not include the separators.
597
598 .seealso: [](sec_scatter), `IS`, `MatPartitioningCreate()`, `AOCreateBasic()`, `ISPartitioningToNumbering()`,
599 `MatPartitioningSetNParts()`, `MatPartitioningApply()`, `MatPartitioningApplyND()`
600 @*/
ISPartitioningCount(IS part,PetscInt len,PetscInt count[])601 PetscErrorCode ISPartitioningCount(IS part, PetscInt len, PetscInt count[])
602 {
603 MPI_Comm comm;
604 PetscInt i, n, *lsizes;
605 const PetscInt *indices;
606
607 PetscFunctionBegin;
608 PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
609 if (len == PETSC_DEFAULT) {
610 PetscMPIInt size;
611
612 PetscCallMPI(MPI_Comm_size(comm, &size));
613 len = size;
614 }
615
616 /* count the number of partitions */
617 PetscCall(ISGetLocalSize(part, &n));
618 PetscCall(ISGetIndices(part, &indices));
619 if (PetscDefined(USE_DEBUG)) {
620 PetscInt np = 0, npt;
621 for (i = 0; i < n; i++) np = PetscMax(np, indices[i]);
622 PetscCallMPI(MPIU_Allreduce(&np, &npt, 1, MPIU_INT, MPI_MAX, comm));
623 np = npt + 1; /* so that it looks like a MPI_Comm_size output */
624 PetscCheck(np <= len, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Length of count array %" PetscInt_FMT " is less than number of partitions %" PetscInt_FMT, len, np);
625 }
626
627 /*
628 lsizes - number of elements of each partition on this particular processor
629 sums - total number of "previous" nodes for any particular partition
630 starts - global number of first element in each partition on this processor
631 */
632 PetscCall(PetscCalloc1(len, &lsizes));
633 for (i = 0; i < n; i++) {
634 if (indices[i] > -1) lsizes[indices[i]]++;
635 }
636 PetscCall(ISRestoreIndices(part, &indices));
637 PetscCallMPI(MPIU_Allreduce(lsizes, count, len, MPIU_INT, MPI_SUM, comm));
638 PetscCall(PetscFree(lsizes));
639 PetscFunctionReturn(PETSC_SUCCESS);
640 }
641
642 /*@
643 ISAllGather - Given an index set `IS` on each processor, generates a large
644 index set (same on each processor) by concatenating together each
645 processors index set.
646
647 Collective
648
649 Input Parameter:
650 . is - the distributed index set
651
652 Output Parameter:
653 . isout - the concatenated index set (same on all processors)
654
655 Level: intermediate
656
657 Notes:
658 `ISAllGather()` is clearly not scalable for large index sets.
659
660 The `IS` created on each processor must be created with a common
661 communicator (e.g., `PETSC_COMM_WORLD`). If the index sets were created
662 with `PETSC_COMM_SELF`, this routine will not work as expected, since
663 each process will generate its own new `IS` that consists only of
664 itself.
665
666 The communicator for this new `IS` is `PETSC_COMM_SELF`
667
668 .seealso: [](sec_scatter), `IS`, `ISCreateGeneral()`, `ISCreateStride()`, `ISCreateBlock()`
669 @*/
ISAllGather(IS is,IS * isout)670 PetscErrorCode ISAllGather(IS is, IS *isout)
671 {
672 PetscInt *indices, n, i, N, step, first;
673 const PetscInt *lindices;
674 MPI_Comm comm;
675 PetscMPIInt size, *sizes = NULL, *offsets = NULL, nn;
676 PetscBool stride;
677
678 PetscFunctionBegin;
679 PetscValidHeaderSpecific(is, IS_CLASSID, 1);
680 PetscAssertPointer(isout, 2);
681
682 PetscCall(PetscObjectGetComm((PetscObject)is, &comm));
683 PetscCallMPI(MPI_Comm_size(comm, &size));
684 PetscCall(ISGetLocalSize(is, &n));
685 PetscCall(PetscObjectTypeCompare((PetscObject)is, ISSTRIDE, &stride));
686 if (size == 1 && stride) { /* should handle parallel ISStride also */
687 PetscCall(ISStrideGetInfo(is, &first, &step));
688 PetscCall(ISCreateStride(PETSC_COMM_SELF, n, first, step, isout));
689 } else {
690 PetscCall(PetscMalloc2(size, &sizes, size, &offsets));
691
692 PetscCall(PetscMPIIntCast(n, &nn));
693 PetscCallMPI(MPI_Allgather(&nn, 1, MPI_INT, sizes, 1, MPI_INT, comm));
694 offsets[0] = 0;
695 for (i = 1; i < size; i++) {
696 PetscInt s = offsets[i - 1] + sizes[i - 1];
697 PetscCall(PetscMPIIntCast(s, &offsets[i]));
698 }
699 N = offsets[size - 1] + sizes[size - 1];
700
701 PetscCall(PetscMalloc1(N, &indices));
702 PetscCall(ISGetIndices(is, &lindices));
703 PetscCallMPI(MPI_Allgatherv((void *)lindices, nn, MPIU_INT, indices, sizes, offsets, MPIU_INT, comm));
704 PetscCall(ISRestoreIndices(is, &lindices));
705 PetscCall(PetscFree2(sizes, offsets));
706
707 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, N, indices, PETSC_OWN_POINTER, isout));
708 }
709 PetscFunctionReturn(PETSC_SUCCESS);
710 }
711
712 /*@C
713 ISAllGatherColors - Given a set of colors on each processor, generates a large
714 set (same on each processor) by concatenating together each processors colors
715
716 Collective
717
718 Input Parameters:
719 + comm - communicator to share the indices
720 . n - local size of set
721 - lindices - local colors
722
723 Output Parameters:
724 + outN - total number of indices
725 - outindices - all of the colors
726
727 Level: intermediate
728
729 Note:
730 `ISAllGatherColors()` is clearly not scalable for large index sets.
731
732 .seealso: `ISColoringValue`, `ISColoring()`, `ISCreateGeneral()`, `ISCreateStride()`, `ISCreateBlock()`, `ISAllGather()`
733 @*/
ISAllGatherColors(MPI_Comm comm,PetscInt n,ISColoringValue lindices[],PetscInt * outN,ISColoringValue * outindices[])734 PetscErrorCode ISAllGatherColors(MPI_Comm comm, PetscInt n, ISColoringValue lindices[], PetscInt *outN, ISColoringValue *outindices[])
735 {
736 ISColoringValue *indices;
737 PetscInt N;
738 PetscMPIInt size, *offsets = NULL, *sizes = NULL, nn;
739
740 PetscFunctionBegin;
741 PetscCall(PetscMPIIntCast(n, &nn));
742 PetscCallMPI(MPI_Comm_size(comm, &size));
743 PetscCall(PetscMalloc2(size, &sizes, size, &offsets));
744
745 PetscCallMPI(MPI_Allgather(&nn, 1, MPI_INT, sizes, 1, MPI_INT, comm));
746 offsets[0] = 0;
747 for (PetscMPIInt i = 1; i < size; i++) offsets[i] = offsets[i - 1] + sizes[i - 1];
748 N = offsets[size - 1] + sizes[size - 1];
749 PetscCall(PetscFree2(sizes, offsets));
750
751 PetscCall(PetscMalloc1(N + 1, &indices));
752 PetscCallMPI(MPI_Allgatherv(lindices, nn, MPIU_COLORING_VALUE, indices, sizes, offsets, MPIU_COLORING_VALUE, comm));
753
754 *outindices = indices;
755 if (outN) *outN = N;
756 PetscFunctionReturn(PETSC_SUCCESS);
757 }
758
759 /*@
760 ISComplement - Given an index set `IS` generates the complement index set. That is
761 all indices that are NOT in the given set.
762
763 Collective
764
765 Input Parameters:
766 + is - the index set
767 . nmin - the first index desired in the local part of the complement
768 - nmax - the largest index desired in the local part of the complement (note that all indices in `is` must be greater or equal to `nmin` and less than `nmax`)
769
770 Output Parameter:
771 . isout - the complement
772
773 Level: intermediate
774
775 Notes:
776 The communicator for `isout` is the same as for the input `is`
777
778 For a parallel `is`, this will generate the local part of the complement on each process
779
780 To generate the entire complement (on each process) of a parallel `is`, first call `ISAllGather()` and then
781 call this routine.
782
783 .seealso: [](sec_scatter), `IS`, `ISCreateGeneral()`, `ISCreateStride()`, `ISCreateBlock()`, `ISAllGather()`
784 @*/
ISComplement(IS is,PetscInt nmin,PetscInt nmax,IS * isout)785 PetscErrorCode ISComplement(IS is, PetscInt nmin, PetscInt nmax, IS *isout)
786 {
787 const PetscInt *indices;
788 PetscInt n, i, j, unique, cnt, *nindices;
789 PetscBool sorted;
790
791 PetscFunctionBegin;
792 PetscValidHeaderSpecific(is, IS_CLASSID, 1);
793 PetscAssertPointer(isout, 4);
794 PetscCheck(nmin >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nmin %" PetscInt_FMT " cannot be negative", nmin);
795 PetscCheck(nmin <= nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nmin %" PetscInt_FMT " cannot be greater than nmax %" PetscInt_FMT, nmin, nmax);
796 PetscCall(ISSorted(is, &sorted));
797 PetscCheck(sorted, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Index set must be sorted");
798
799 PetscCall(ISGetLocalSize(is, &n));
800 PetscCall(ISGetIndices(is, &indices));
801 if (PetscDefined(USE_DEBUG)) {
802 for (i = 0; i < n; i++) {
803 PetscCheck(indices[i] >= nmin, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT "'s value %" PetscInt_FMT " is smaller than minimum given %" PetscInt_FMT, i, indices[i], nmin);
804 PetscCheck(indices[i] < nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT "'s value %" PetscInt_FMT " is larger than maximum given %" PetscInt_FMT, i, indices[i], nmax);
805 }
806 }
807 /* Count number of unique entries */
808 unique = (n > 0);
809 for (i = 0; i < n - 1; i++) {
810 if (indices[i + 1] != indices[i]) unique++;
811 }
812 PetscCall(PetscMalloc1(nmax - nmin - unique, &nindices));
813 cnt = 0;
814 for (i = nmin, j = 0; i < nmax; i++) {
815 if (j < n && i == indices[j]) do {
816 j++;
817 } while (j < n && i == indices[j]);
818 else nindices[cnt++] = i;
819 }
820 PetscCheck(cnt == nmax - nmin - unique, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of entries found in complement %" PetscInt_FMT " does not match expected %" PetscInt_FMT, cnt, nmax - nmin - unique);
821 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), cnt, nindices, PETSC_OWN_POINTER, isout));
822 PetscCall(ISSetInfo(*isout, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
823 PetscCall(ISRestoreIndices(is, &indices));
824 PetscFunctionReturn(PETSC_SUCCESS);
825 }
826