xref: /petsc/src/dm/impls/network/networkview.c (revision 4bb2516a9f08f79a30c7544bad46962d66afb2db)
1 #include <petscconf.h>
2 // We need to define this ahead of any other includes to make sure mkstemp is actually defined
3 #if defined(PETSC_HAVE_MKSTEMP)
4   #define _XOPEN_SOURCE 600
5 #endif
6 #include "petsc/private/petscimpl.h"
7 #include "petscerror.h"
8 #include "petscis.h"
9 #include "petscstring.h"
10 #include "petscsys.h"
11 #include "petscsystypes.h"
12 #include <petsc/private/dmnetworkimpl.h> /*I  "petscdmnetwork.h"  I*/
13 #include <petscdraw.h>
14 
15 static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer)
16 {
17   DM              dmcoords;
18   PetscInt        nsubnets, i, subnet, nvertices, nedges, vertex, edge;
19   PetscInt        vertexOffsets[2], globalEdgeVertices[2];
20   PetscScalar     vertexCoords[2];
21   const PetscInt *vertices, *edges, *edgeVertices;
22   Vec             allVertexCoords;
23   PetscMPIInt     rank;
24   MPI_Comm        comm;
25 
26   PetscFunctionBegin;
27   // Get the network containing coordinate information
28   PetscCall(DMGetCoordinateDM(dm, &dmcoords));
29   // Get the coordinate vector for the network
30   PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords));
31   // Get the MPI communicator and this process' rank
32   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
33   PetscCallMPI(MPI_Comm_rank(comm, &rank));
34   // Start synchronized printing
35   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
36 
37   // Write the header
38   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n"));
39 
40   // Iterate each subnetwork (Note: We need to get the global number of subnets apparently)
41   PetscCall(DMNetworkGetNumSubNetworks(dm, NULL, &nsubnets));
42   for (subnet = 0; subnet < nsubnets; subnet++) {
43     // Get the subnetwork's vertices and edges
44     PetscCall(DMNetworkGetSubnetwork(dm, subnet, &nvertices, &nedges, &vertices, &edges));
45 
46     // Write out each vertex
47     for (i = 0; i < nvertices; i++) {
48       vertex = vertices[i];
49       // Get the offset into the coordinate vector for the vertex
50       PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets));
51       vertexOffsets[1] = vertexOffsets[0] + 1;
52       // Remap vertex to the global value
53       PetscCall(DMNetworkGetGlobalVertexIndex(dm, vertex, &vertex));
54       // Get the vertex position from the coordinate vector
55       PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords));
56 
57       // TODO: Determine vertex color/name
58       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT "\n", (PetscInt)rank, vertex, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), vertex));
59     }
60 
61     // Write out each edge
62     for (i = 0; i < nedges; i++) {
63       edge = edges[i];
64       PetscCall(DMNetworkGetConnectedVertices(dm, edge, &edgeVertices));
65       PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[0], &globalEdgeVertices[0]));
66       PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[1], &globalEdgeVertices[1]));
67       PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edge, &edge));
68 
69       // TODO: Determine edge color/name
70       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge));
71     }
72   }
73   // End synchronized printing
74   PetscCall(PetscViewerFlush(viewer));
75   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer)
80 {
81   PetscMPIInt rank, size;
82   MPI_Comm    comm;
83   char        filename[PETSC_MAX_PATH_LEN + 1], options[512], proccall[PETSC_MAX_PATH_LEN + 512], scriptFile[PETSC_MAX_PATH_LEN + 1], buffer[256];
84   PetscViewer csvViewer;
85   FILE       *processFile = NULL;
86   PetscBool   isnull, optionShowRanks = PETSC_FALSE, optionRankIsSet = PETSC_FALSE, showNoNodes = PETSC_FALSE, showNoNumbering = PETSC_FALSE;
87   PetscDraw   draw;
88   DM_Network *network = (DM_Network *)dm->data;
89   PetscReal   drawPause;
90   PetscInt    i;
91 #if defined(PETSC_HAVE_MKSTEMP)
92   PetscBool isSharedTmp;
93 #endif
94 
95   PetscFunctionBegin;
96   // Deal with the PetscDraw we are given
97   PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw));
98   PetscCall(PetscDrawIsNull(draw, &isnull));
99   PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE));
100 
101   // Clear the file name buffer so all communicated bytes are well-defined
102   PetscCall(PetscMemzero(filename, sizeof(filename)));
103 
104   // Get the MPI communicator and this process' rank
105   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
106   PetscCallMPI(MPI_Comm_rank(comm, &rank));
107   PetscCallMPI(MPI_Comm_size(comm, &size));
108 
109 #if defined(PETSC_HAVE_MKSTEMP)
110   // Get if the temporary directory is shared
111   // Note: This must be done collectively on every rank, it cannot be done on a single rank
112   PetscCall(PetscSharedTmp(comm, &isSharedTmp));
113 #endif
114 
115   /* Process Options */
116   optionShowRanks = network->vieweroptions.showallranks;
117   showNoNodes     = network->vieweroptions.shownovertices;
118   showNoNumbering = network->vieweroptions.shownonumbering;
119 
120   /*
121     TODO:  if the option -dmnetwork_view_tmpdir can be moved up here that would be good as well.
122   */
123   PetscOptionsBegin(PetscObjectComm((PetscObject)dm), ((PetscObject)dm)->prefix, "MatPlotLib PetscViewer DMNetwork Options", "PetscViewer");
124   PetscCall(PetscOptionsBool("-dmnetwork_view_all_ranks", "View all ranks in the DMNetwork", NULL, optionShowRanks, &optionShowRanks, NULL));
125   PetscCall(PetscOptionsString("-dmnetwork_view_rank_range", "Set of ranks to view the DMNetwork on", NULL, buffer, buffer, sizeof(buffer), &optionRankIsSet));
126   PetscCall(PetscOptionsBool("-dmnetwork_view_no_vertices", "Do not view vertices", NULL, showNoNodes, &showNoNodes, NULL));
127   PetscCall(PetscOptionsBool("-dmnetwork_view_no_numbering", "Do not view edge and vertex numbering", NULL, showNoNumbering, &showNoNumbering, NULL));
128   PetscOptionsEnd();
129 
130   // Generate and broadcast the temporary file name from rank 0
131   if (rank == 0) {
132 #if defined(PETSC_HAVE_TMPNAM_S)
133     // Acquire a temporary file to write to and open an ASCII/CSV viewer
134     PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
135 #elif defined(PETSC_HAVE_MKSTEMP)
136     PetscBool isTmpOverridden;
137     size_t    numChars;
138     // Same thing, but for POSIX systems on which tmpnam is deprecated
139     // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it
140     // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it
141     PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), &isTmpOverridden));
142     // If not specified by option try using a shared tmp on the system
143     if (!isTmpOverridden) {
144       // Validate that if tmp is not overridden it is at least shared
145       PetscCheck(isSharedTmp, comm, PETSC_ERR_SUP_SYS, "Temporary file directory is not shared between ranks, try using -dmnetwork_view_tmpdir to specify a shared directory");
146       PetscCall(PetscGetTmp(PETSC_COMM_SELF, filename, sizeof(filename)));
147     }
148     // Make sure the filename ends with a '/'
149     PetscCall(PetscStrlen(filename, &numChars));
150     if (filename[numChars - 1] != '/') {
151       filename[numChars]     = '/';
152       filename[numChars + 1] = 0;
153     }
154     // Perform the actual temporary file creation
155     PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename)));
156     PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
157 #else
158     // Same thing, but for older C versions which don't have the safe form
159     PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
160 #endif
161   }
162 
163   // Broadcast the filename to all other MPI ranks
164   PetscCallMPI(MPI_Bcast(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, comm));
165 
166   PetscCall(PetscViewerASCIIOpen(comm, filename, &csvViewer));
167   PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV));
168 
169   // Use the CSV viewer to write out the local network
170   PetscCall(DMView_Network_CSV(dm, csvViewer));
171 
172   // Close the viewer
173   PetscCall(PetscViewerDestroy(&csvViewer));
174 
175   // Generate options string
176   PetscCall(PetscMemzero(options, sizeof(options)));
177   // If the draw is null run as a "test execute" ie. do nothing just test that the script was called correctly
178   PetscCall(PetscStrlcat(options, isnull ? " -tx " : " ", sizeof(options)));
179   PetscCall(PetscDrawGetPause(draw, &drawPause));
180   if (drawPause > 0) {
181     char pausebuffer[64];
182     PetscCall(PetscSNPrintf(pausebuffer, sizeof(pausebuffer), "%f", (double)drawPause));
183     PetscCall(PetscStrlcat(options, " -dt ", sizeof(options)));
184     PetscCall(PetscStrlcat(options, pausebuffer, sizeof(options)));
185   }
186   if (optionShowRanks || optionRankIsSet) {
187     // Show all ranks only if the option is set in code or by the user AND not showing specific ranks AND there is more than one process
188     if (optionShowRanks && !optionRankIsSet && size != 1) PetscCall(PetscStrlcat(options, " -dar ", sizeof(options)));
189     // Do not show the global plot if the user requests it OR if one specific rank is requested
190     if (network->vieweroptions.dontshowglobal || optionRankIsSet) PetscCall(PetscStrlcat(options, " -ncp ", sizeof(options)));
191 
192     if (optionRankIsSet) {
193       // If a range of ranks to draw is specified append it
194       PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
195       PetscCall(PetscStrlcat(options, buffer, sizeof(options)));
196     } else {
197       // Otherwise, use the options provided in code
198       if (network->vieweroptions.viewranks) {
199         const PetscInt *viewranks;
200         PetscInt        viewrankssize;
201         char            rankbuffer[64];
202         PetscCall(ISGetTotalIndices(network->vieweroptions.viewranks, &viewranks));
203         PetscCall(ISGetSize(network->vieweroptions.viewranks, &viewrankssize));
204         PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
205         for (i = 0; i < viewrankssize; i++) {
206           PetscCall(PetscSNPrintf(rankbuffer, sizeof(rankbuffer), "%" PetscInt_FMT, viewranks[i]));
207           PetscCall(PetscStrlcat(options, rankbuffer, sizeof(options)));
208         }
209         PetscCall(ISRestoreTotalIndices(network->vieweroptions.viewranks, &viewranks));
210       } // if not provided an IS of viewing ranks, skip viewing
211     }
212   }
213 
214   // Check for options for visibility...
215   if (showNoNodes) PetscCall(PetscStrlcat(options, " -nn ", sizeof(options)));
216   if (showNoNumbering) PetscCall(PetscStrlcat(options, " -nnl -nel ", sizeof(options)));
217 
218   // Get the value of $PETSC_DIR
219   PetscCall(PetscStrreplace(comm, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile)));
220   PetscCall(PetscFixFilename(scriptFile, scriptFile));
221   // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py <options> <file>'
222   PetscCall(PetscArrayzero(proccall, sizeof(proccall)));
223   PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, options, filename));
224 
225 #if defined(PETSC_HAVE_POPEN)
226   // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0)
227   PetscCall(PetscPOpen(comm, NULL, proccall, "r", &processFile));
228   if (processFile != NULL) {
229     while (fgets(buffer, sizeof(buffer), processFile) != NULL) PetscCall(PetscPrintf(comm, "%s", buffer));
230   }
231   PetscCall(PetscPClose(comm, processFile));
232 #else
233   // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0)
234   if (rank == 0) PetscCheck(system(proccall) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to call viewer script");
235   // Barrier so that all ranks wait until the call completes
236   PetscCallMPI(MPI_Barrier(comm));
237 #endif
238   // Clean up the temporary file we used using rank 0
239   if (rank == 0) PetscCheck(remove(filename) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to delete temporary file");
240   PetscFunctionReturn(PETSC_SUCCESS);
241 }
242 
243 PetscErrorCode DMView_Network(DM dm, PetscViewer viewer)
244 {
245   PetscBool         iascii, isdraw;
246   PetscViewerFormat format;
247 
248   PetscFunctionBegin;
249   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
250   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
251   PetscCall(PetscViewerGetFormat(viewer, &format));
252 
253   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
254   if (isdraw) {
255     PetscCall(DMView_Network_Matplotlib(dm, viewer));
256     PetscFunctionReturn(PETSC_SUCCESS);
257   }
258 
259   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
260   if (iascii) {
261     const PetscInt *cone, *vtx, *edges;
262     PetscInt        vfrom, vto, i, j, nv, ne, nsv, p, nsubnet;
263     DM_Network     *network = (DM_Network *)dm->data;
264     PetscMPIInt     rank;
265 
266     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
267     if (format == PETSC_VIEWER_ASCII_CSV) {
268       PetscCall(DMView_Network_CSV(dm, viewer));
269       PetscFunctionReturn(PETSC_SUCCESS);
270     }
271 
272     nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */
273     if (!rank) {
274       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices,
275                             network->cloneshared->Nsvtx));
276     }
277 
278     PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL));
279     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
280     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv));
281 
282     for (i = 0; i < nsubnet; i++) {
283       PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges));
284       if (ne) {
285         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv));
286         for (j = 0; j < ne; j++) {
287           p = edges[j];
288           PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone));
289           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom));
290           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto));
291           PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p));
292           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto));
293         }
294       }
295     }
296 
297     /* Shared vertices */
298     PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx));
299     if (nsv) {
300       PetscInt        gidx;
301       PetscBool       ghost;
302       const PetscInt *sv = NULL;
303 
304       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     SharedVertices:\n"));
305       for (i = 0; i < nsv; i++) {
306         PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost));
307         if (ghost) continue;
308 
309         PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv));
310         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1]));
311         for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "                                           ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1]));
312       }
313     }
314     PetscCall(PetscViewerFlush(viewer));
315     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
316   } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name);
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
320 /*@
321   DMNetworkViewSetShowRanks - Sets viewing the `DMETNWORK` on each rank individually.
322 
323   Logically Collective
324 
325   Input Parameter:
326 . dm - the `DMNETWORK` object
327 
328   Output Parameter:
329 . showranks - `PETSC_TRUE` if viewing each rank's sub network individually
330 
331   Level: beginner
332 
333 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
334 @*/
335 PetscErrorCode DMNetworkViewSetShowRanks(DM dm, PetscBool showranks)
336 {
337   DM_Network *network = (DM_Network *)dm->data;
338 
339   PetscFunctionBegin;
340   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
341   network->vieweroptions.showallranks = showranks;
342   PetscFunctionReturn(PETSC_SUCCESS);
343 }
344 
345 /*@
346   DMNetworkViewSetShowGlobal - Set viewing the global network.
347 
348   Logically Collective
349 
350   Input Parameter:
351 . dm - the `DMNETWORK` object
352 
353   Output Parameter:
354 . showglobal - `PETSC_TRUE` if viewing the global network
355 
356   Level: beginner
357 
358 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
359 @*/
360 PetscErrorCode DMNetworkViewSetShowGlobal(DM dm, PetscBool showglobal)
361 {
362   DM_Network *network = (DM_Network *)dm->data;
363 
364   PetscFunctionBegin;
365   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
366   network->vieweroptions.dontshowglobal = (PetscBool)(!showglobal);
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 /*@
371   DMNetworkViewSetShowVertices - Sets whether to display the vertices in viewing routines.
372 
373   Logically Collective
374 
375   Input Parameter:
376 . dm - the `DMNETWORK` object
377 
378   Output Parameter:
379 . showvertices - `PETSC_TRUE` if visualizing the vertices
380 
381   Level: beginner
382 
383 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
384 @*/
385 PetscErrorCode DMNetworkViewSetShowVertices(DM dm, PetscBool showvertices)
386 {
387   DM_Network *network = (DM_Network *)dm->data;
388 
389   PetscFunctionBegin;
390   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
391   network->vieweroptions.shownovertices = (PetscBool)(!showvertices);
392   PetscFunctionReturn(PETSC_SUCCESS);
393 }
394 
395 /*@
396   DMNetworkViewSetShowNumbering - Set displaying the numbering of edges and vertices in viewing routines.
397 
398   Logically Collective
399 
400   Input Parameter:
401 . dm - the `DMNETWORK` object
402 
403   Output Parameter:
404 . shownumbering - `PETSC_TRUE` if displaying the numbering of edges and vertices
405 
406   Level: beginner
407 
408 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetViewRanks()`
409 @*/
410 PetscErrorCode DMNetworkViewSetShowNumbering(DM dm, PetscBool shownumbering)
411 {
412   DM_Network *network = (DM_Network *)dm->data;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
416   network->vieweroptions.shownonumbering = (PetscBool)(!shownumbering);
417   PetscFunctionReturn(PETSC_SUCCESS);
418 }
419 
420 /*@
421   DMNetworkViewSetViewRanks - View the `DMNETWORK` on each of the specified ranks individually.
422 
423   Collective
424 
425   Input Parameter:
426 . dm - the `DMNETWORK` object
427 
428   Output Parameter:
429 . viewranks - set of ranks to view the `DMNETWORK` on individually
430 
431   Level: beginner
432 
433   Note:
434   `DMNetwork` takes ownership of the input viewranks `IS`, it should be destroyed by the caller.
435 
436 .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`
437 @*/
438 PetscErrorCode DMNetworkViewSetViewRanks(DM dm, IS viewranks)
439 {
440   DM_Network *network = (DM_Network *)dm->data;
441 
442   PetscFunctionBegin;
443   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMNETWORK);
444   PetscValidHeaderSpecific(viewranks, IS_CLASSID, 2);
445   PetscCheckSameComm(dm, 1, viewranks, 2);
446   PetscCall(ISDestroy(&network->vieweroptions.viewranks));
447   PetscCall(PetscObjectReference((PetscObject)viewranks));
448   network->vieweroptions.viewranks = viewranks;
449   PetscFunctionReturn(PETSC_SUCCESS);
450 }
451