xref: /petsc/src/dm/impls/network/networkview.c (revision bcf2175cac7dc81b5f4a5442d0188fda3c69dbf5)
1 #include <petscconf.h>
2 // We need to define this ahead of any other includes to make sure mkstemp is actually defined
3 #if defined(PETSC_HAVE_MKSTEMP)
4   #define _XOPEN_SOURCE 600
5 #endif
6 #include "petsc/private/petscimpl.h"
7 #include "petscerror.h"
8 #include "petscis.h"
9 #include "petscstring.h"
10 #include "petscsys.h"
11 #include "petscsystypes.h"
12 #include <petsc/private/dmnetworkimpl.h> /*I  "petscdmnetwork.h"  I*/
13 #include <petscdraw.h>
14 
15 static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer)
16 {
17   DM              dmcoords;
18   PetscInt        nsubnets, i, subnet, nvertices, nedges, vertex, edge, gidx, ncomp;
19   PetscInt        vertexOffsets[2], globalEdgeVertices[2];
20   PetscScalar     vertexCoords[2], *color_ptr, color;
21   const PetscInt *vertices, *edges, *edgeVertices;
22   Vec             allVertexCoords;
23   PetscMPIInt     rank;
24   MPI_Comm        comm;
25 
26   PetscFunctionBegin;
27   // Get the coordinate information from dmcoords
28   PetscCheck(dm->coordinates[0].dm, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_NULL, "CoordinateDM not created");
29   PetscCall(DMGetCoordinateDM(dm, &dmcoords));
30 
31   PetscCall(DMGetCoordinateDim(dmcoords, &i));
32   PetscCheck(i == 2, PETSC_COMM_WORLD, PETSC_ERR_SUP, "dim %" PetscInt_FMT " != 2 is not supporte yet", i);
33 
34   // Get the coordinate vector from dm
35   PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords));
36 
37   // Get the MPI communicator and this process' rank
38   PetscCall(PetscObjectGetComm((PetscObject)dmcoords, &comm));
39   PetscCallMPI(MPI_Comm_rank(comm, &rank));
40 
41   // Start synchronized printing
42   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
43 
44   // Write the header
45   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n"));
46 
47   // Iterate each subnetwork (Note: We need to get the global number of subnets apparently)
48   PetscCall(DMNetworkGetNumSubNetworks(dmcoords, NULL, &nsubnets));
49   for (subnet = 0; subnet < nsubnets; subnet++) {
50     // Get the subnetwork's vertices and edges
51     PetscCall(DMNetworkGetSubnetwork(dmcoords, subnet, &nvertices, &nedges, &vertices, &edges));
52 
53     // Write out each vertex
54     for (i = 0; i < nvertices; i++) {
55       vertex = vertices[i];
56 
57       // Get the offset into the coordinate vector for the vertex
58       PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets));
59       vertexOffsets[1] = vertexOffsets[0] + 1;
60       // Remap vertex to the global value
61       PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, vertex, &gidx));
62       // Get the vertex position from the coordinate vector
63       PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords));
64 
65       // Get vertex color; TODO: name
66       PetscCall(DMNetworkGetNumComponents(dmcoords, vertex, &ncomp));
67       PetscCheck(ncomp <= 1, PETSC_COMM_WORLD, PETSC_ERR_SUP, "num of components %" PetscInt_FMT " must be <= 1", ncomp);
68       color = 0.0;
69       if (ncomp == 1) {
70         PetscCall(DMNetworkGetComponent(dmcoords, vertex, 0, NULL, (void **)&color_ptr, NULL));
71         color = *color_ptr;
72       }
73       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT ",%lf\n", (PetscInt)rank, gidx, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), gidx, (double)PetscRealPart(color)));
74     }
75 
76     // Write out each edge
77     for (i = 0; i < nedges; i++) {
78       edge = edges[i];
79       PetscCall(DMNetworkGetConnectedVertices(dmcoords, edge, &edgeVertices));
80       PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, edgeVertices[0], &globalEdgeVertices[0]));
81       PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, edgeVertices[1], &globalEdgeVertices[1]));
82       PetscCall(DMNetworkGetGlobalEdgeIndex(dmcoords, edge, &edge));
83 
84       // TODO: Determine edge color/name
85       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge));
86     }
87   }
88   // End synchronized printing
89   PetscCall(PetscViewerFlush(viewer));
90   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
91   PetscFunctionReturn(PETSC_SUCCESS);
92 }
93 
94 static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer)
95 {
96   PetscMPIInt rank, size;
97   MPI_Comm    comm;
98   char        filename[PETSC_MAX_PATH_LEN + 1], options[512], proccall[PETSC_MAX_PATH_LEN + 512], scriptFile[PETSC_MAX_PATH_LEN + 1], buffer[256];
99   PetscViewer csvViewer;
100   FILE       *processFile = NULL;
101   PetscBool   isnull, optionShowRanks = PETSC_FALSE, optionRankIsSet = PETSC_FALSE, showNoNodes = PETSC_FALSE, showNoNumbering = PETSC_FALSE;
102   PetscDraw   draw;
103   DM_Network *network = (DM_Network *)dm->data;
104   PetscReal   drawPause;
105   PetscInt    i;
106 #if defined(PETSC_HAVE_MKSTEMP)
107   PetscBool isSharedTmp;
108 #endif
109 
110   PetscFunctionBegin;
111   // Deal with the PetscDraw we are given
112   PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw));
113   PetscCall(PetscDrawIsNull(draw, &isnull));
114   PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE));
115 
116   // Clear the file name buffer so all communicated bytes are well-defined
117   PetscCall(PetscMemzero(filename, sizeof(filename)));
118 
119   // Get the MPI communicator and this process' rank
120   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
121   PetscCallMPI(MPI_Comm_rank(comm, &rank));
122   PetscCallMPI(MPI_Comm_size(comm, &size));
123 
124 #if defined(PETSC_HAVE_MKSTEMP)
125   // Get if the temporary directory is shared
126   // Note: This must be done collectively on every rank, it cannot be done on a single rank
127   PetscCall(PetscSharedTmp(comm, &isSharedTmp));
128 #endif
129 
130   /* Process Options */
131   optionShowRanks = network->vieweroptions.showallranks;
132   showNoNodes     = network->vieweroptions.shownovertices;
133   showNoNumbering = network->vieweroptions.shownonumbering;
134 
135   /*
136     TODO:  if the option -dmnetwork_view_tmpdir can be moved up here that would be good as well.
137   */
138   PetscOptionsBegin(PetscObjectComm((PetscObject)dm), ((PetscObject)dm)->prefix, "MatPlotLib PetscViewer DMNetwork Options", "PetscViewer");
139   PetscCall(PetscOptionsBool("-dmnetwork_view_all_ranks", "View all ranks in the DMNetwork", NULL, optionShowRanks, &optionShowRanks, NULL));
140   PetscCall(PetscOptionsString("-dmnetwork_view_rank_range", "Set of ranks to view the DMNetwork on", NULL, buffer, buffer, sizeof(buffer), &optionRankIsSet));
141   PetscCall(PetscOptionsBool("-dmnetwork_view_no_vertices", "Do not view vertices", NULL, showNoNodes, &showNoNodes, NULL));
142   PetscCall(PetscOptionsBool("-dmnetwork_view_no_numbering", "Do not view edge and vertex numbering", NULL, showNoNumbering, &showNoNumbering, NULL));
143   PetscOptionsEnd();
144 
145   // Generate and broadcast the temporary file name from rank 0
146   if (rank == 0) {
147 #if defined(PETSC_HAVE_TMPNAM_S)
148     // Acquire a temporary file to write to and open an ASCII/CSV viewer
149     PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
150 #elif defined(PETSC_HAVE_MKSTEMP)
151     PetscBool isTmpOverridden;
152     size_t    numChars;
153     // Same thing, but for POSIX systems on which tmpnam is deprecated
154     // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it
155     // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it
156     PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), &isTmpOverridden));
157     // If not specified by option try using a shared tmp on the system
158     if (!isTmpOverridden) {
159       // Validate that if tmp is not overridden it is at least shared
160       PetscCheck(isSharedTmp, comm, PETSC_ERR_SUP_SYS, "Temporary file directory is not shared between ranks, try using -dmnetwork_view_tmpdir to specify a shared directory");
161       PetscCall(PetscGetTmp(PETSC_COMM_SELF, filename, sizeof(filename)));
162     }
163     // Make sure the filename ends with a '/'
164     PetscCall(PetscStrlen(filename, &numChars));
165     if (filename[numChars - 1] != '/') {
166       filename[numChars]     = '/';
167       filename[numChars + 1] = 0;
168     }
169     // Perform the actual temporary file creation
170     PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename)));
171     PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
172 #else
173     // Same thing, but for older C versions which don't have the safe form
174     PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
175 #endif
176   }
177 
178   // Broadcast the filename to all other MPI ranks
179   PetscCallMPI(MPI_Bcast(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, comm));
180 
181   PetscCall(PetscViewerASCIIOpen(comm, filename, &csvViewer));
182   PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV));
183 
184   // Use the CSV viewer to write out the local network
185   PetscCall(DMView_Network_CSV(dm, csvViewer));
186 
187   // Close the viewer
188   PetscCall(PetscViewerDestroy(&csvViewer));
189 
190   // Generate options string
191   PetscCall(PetscMemzero(options, sizeof(options)));
192   // If the draw is null run as a "test execute" ie. do nothing just test that the script was called correctly
193   PetscCall(PetscStrlcat(options, isnull ? " -tx " : " ", sizeof(options)));
194   PetscCall(PetscDrawGetPause(draw, &drawPause));
195   if (drawPause > 0) {
196     char pausebuffer[64];
197     PetscCall(PetscSNPrintf(pausebuffer, sizeof(pausebuffer), "%f", (double)drawPause));
198     PetscCall(PetscStrlcat(options, " -dt ", sizeof(options)));
199     PetscCall(PetscStrlcat(options, pausebuffer, sizeof(options)));
200   }
201   if (optionShowRanks || optionRankIsSet) {
202     // Show all ranks only if the option is set in code or by the user AND not showing specific ranks AND there is more than one process
203     if (optionShowRanks && !optionRankIsSet && size != 1) PetscCall(PetscStrlcat(options, " -dar ", sizeof(options)));
204     // Do not show the global plot if the user requests it OR if one specific rank is requested
205     if (network->vieweroptions.dontshowglobal || optionRankIsSet) PetscCall(PetscStrlcat(options, " -ncp ", sizeof(options)));
206 
207     if (optionRankIsSet) {
208       // If a range of ranks to draw is specified append it
209       PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
210       PetscCall(PetscStrlcat(options, buffer, sizeof(options)));
211     } else {
212       // Otherwise, use the options provided in code
213       if (network->vieweroptions.viewranks) {
214         const PetscInt *viewranks;
215         PetscInt        viewrankssize;
216         char            rankbuffer[64];
217         PetscCall(ISGetTotalIndices(network->vieweroptions.viewranks, &viewranks));
218         PetscCall(ISGetSize(network->vieweroptions.viewranks, &viewrankssize));
219         PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
220         for (i = 0; i < viewrankssize; i++) {
221           PetscCall(PetscSNPrintf(rankbuffer, sizeof(rankbuffer), "%" PetscInt_FMT, viewranks[i]));
222           PetscCall(PetscStrlcat(options, rankbuffer, sizeof(options)));
223         }
224         PetscCall(ISRestoreTotalIndices(network->vieweroptions.viewranks, &viewranks));
225       } // if not provided an IS of viewing ranks, skip viewing
226     }
227   }
228 
229   // Check for options for visibility...
230   if (showNoNodes) PetscCall(PetscStrlcat(options, " -nn ", sizeof(options)));
231   if (showNoNumbering) PetscCall(PetscStrlcat(options, " -nnl -nel ", sizeof(options)));
232 
233   // Get the value of $PETSC_DIR
234   PetscCall(PetscStrreplace(comm, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile)));
235   PetscCall(PetscFixFilename(scriptFile, scriptFile));
236   // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py <options> <file>'
237   PetscCall(PetscArrayzero(proccall, sizeof(proccall)));
238   PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, options, filename));
239 
240 #if defined(PETSC_HAVE_POPEN)
241   // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0)
242   PetscCall(PetscPOpen(comm, NULL, proccall, "r", &processFile));
243   if (processFile != NULL) {
244     while (fgets(buffer, sizeof(buffer), processFile) != NULL) PetscCall(PetscPrintf(comm, "%s", buffer));
245   }
246   PetscCall(PetscPClose(comm, processFile));
247 #else
248   // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0)
249   if (rank == 0) PetscCheck(system(proccall) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to call viewer script");
250   // Barrier so that all ranks wait until the call completes
251   PetscCallMPI(MPI_Barrier(comm));
252 #endif
253   // Clean up the temporary file we used using rank 0
254   if (rank == 0) PetscCheck(remove(filename) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to delete temporary file");
255   PetscFunctionReturn(PETSC_SUCCESS);
256 }
257 
258 PetscErrorCode DMView_Network(DM dm, PetscViewer viewer)
259 {
260   PetscBool         iascii, isdraw;
261   PetscViewerFormat format;
262 
263   PetscFunctionBegin;
264   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
265   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
266   PetscCall(PetscViewerGetFormat(viewer, &format));
267 
268   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
269   if (isdraw) {
270     PetscCall(DMView_Network_Matplotlib(dm, viewer));
271     PetscFunctionReturn(PETSC_SUCCESS);
272   }
273 
274   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
275   if (iascii) {
276     const PetscInt *cone, *vtx, *edges;
277     PetscInt        vfrom, vto, i, j, nv, ne, nsv, p, nsubnet;
278     DM_Network     *network = (DM_Network *)dm->data;
279     PetscMPIInt     rank;
280 
281     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
282     if (format == PETSC_VIEWER_ASCII_CSV) {
283       PetscCall(DMView_Network_CSV(dm, viewer));
284       PetscFunctionReturn(PETSC_SUCCESS);
285     }
286 
287     nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */
288     if (!rank) {
289       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices,
290                             network->cloneshared->Nsvtx));
291     }
292 
293     PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL));
294     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
295     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv));
296 
297     for (i = 0; i < nsubnet; i++) {
298       PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges));
299       if (ne) {
300         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv));
301         for (j = 0; j < ne; j++) {
302           p = edges[j];
303           PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone));
304           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom));
305           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto));
306           PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p));
307           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto));
308         }
309       }
310     }
311 
312     /* Shared vertices */
313     PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx));
314     if (nsv) {
315       PetscInt        gidx;
316       PetscBool       ghost;
317       const PetscInt *sv = NULL;
318 
319       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     SharedVertices:\n"));
320       for (i = 0; i < nsv; i++) {
321         PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost));
322         if (ghost) continue;
323 
324         PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv));
325         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1]));
326         for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "                                           ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1]));
327       }
328     }
329     PetscCall(PetscViewerFlush(viewer));
330     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
331   } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name);
332   PetscFunctionReturn(PETSC_SUCCESS);
333 }
334 
335 /*@
336   DMNetworkViewSetShowRanks - Sets viewing the `DMETNWORK` on each rank individually.
337 
338   Logically Collective
339 
340   Input Parameter:
341 . dm - the `DMNETWORK` object
342 
343   Output Parameter:
344 . showranks - `PETSC_TRUE` if viewing each rank's sub network individually
345 
346   Level: beginner
347 
348 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
349 @*/
350 PetscErrorCode DMNetworkViewSetShowRanks(DM dm, PetscBool showranks)
351 {
352   DM_Network *network = (DM_Network *)dm->data;
353 
354   PetscFunctionBegin;
355   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
356   network->vieweroptions.showallranks = showranks;
357   PetscFunctionReturn(PETSC_SUCCESS);
358 }
359 
360 /*@
361   DMNetworkViewSetShowGlobal - Set viewing the global network.
362 
363   Logically Collective
364 
365   Input Parameter:
366 . dm - the `DMNETWORK` object
367 
368   Output Parameter:
369 . showglobal - `PETSC_TRUE` if viewing the global network
370 
371   Level: beginner
372 
373 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
374 @*/
375 PetscErrorCode DMNetworkViewSetShowGlobal(DM dm, PetscBool showglobal)
376 {
377   DM_Network *network = (DM_Network *)dm->data;
378 
379   PetscFunctionBegin;
380   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
381   network->vieweroptions.dontshowglobal = (PetscBool)(!showglobal);
382   PetscFunctionReturn(PETSC_SUCCESS);
383 }
384 
385 /*@
386   DMNetworkViewSetShowVertices - Sets whether to display the vertices in viewing routines.
387 
388   Logically Collective
389 
390   Input Parameter:
391 . dm - the `DMNETWORK` object
392 
393   Output Parameter:
394 . showvertices - `PETSC_TRUE` if visualizing the vertices
395 
396   Level: beginner
397 
398 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
399 @*/
400 PetscErrorCode DMNetworkViewSetShowVertices(DM dm, PetscBool showvertices)
401 {
402   DM_Network *network = (DM_Network *)dm->data;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
406   network->vieweroptions.shownovertices = (PetscBool)(!showvertices);
407   PetscFunctionReturn(PETSC_SUCCESS);
408 }
409 
410 /*@
411   DMNetworkViewSetShowNumbering - Set displaying the numbering of edges and vertices in viewing routines.
412 
413   Logically Collective
414 
415   Input Parameter:
416 . dm - the `DMNETWORK` object
417 
418   Output Parameter:
419 . shownumbering - `PETSC_TRUE` if displaying the numbering of edges and vertices
420 
421   Level: beginner
422 
423 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetViewRanks()`
424 @*/
425 PetscErrorCode DMNetworkViewSetShowNumbering(DM dm, PetscBool shownumbering)
426 {
427   DM_Network *network = (DM_Network *)dm->data;
428 
429   PetscFunctionBegin;
430   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
431   network->vieweroptions.shownonumbering = (PetscBool)(!shownumbering);
432   PetscFunctionReturn(PETSC_SUCCESS);
433 }
434 
435 /*@
436   DMNetworkViewSetViewRanks - View the `DMNETWORK` on each of the specified ranks individually.
437 
438   Collective
439 
440   Input Parameter:
441 . dm - the `DMNETWORK` object
442 
443   Output Parameter:
444 . viewranks - set of ranks to view the `DMNETWORK` on individually
445 
446   Level: beginner
447 
448   Note:
449   `DMNetwork` takes ownership of the input viewranks `IS`, it should be destroyed by the caller.
450 
451 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`
452 @*/
453 PetscErrorCode DMNetworkViewSetViewRanks(DM dm, IS viewranks)
454 {
455   DM_Network *network = (DM_Network *)dm->data;
456 
457   PetscFunctionBegin;
458   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
459   PetscValidHeaderSpecific(viewranks, IS_CLASSID, 2);
460   PetscCheckSameComm(dm, 1, viewranks, 2);
461   PetscCall(ISDestroy(&network->vieweroptions.viewranks));
462   PetscCall(PetscObjectReference((PetscObject)viewranks));
463   network->vieweroptions.viewranks = viewranks;
464   PetscFunctionReturn(PETSC_SUCCESS);
465 }
466