1 #include <petscconf.h> 2 // We need to define this ahead of any other includes to make sure mkstemp is actually defined 3 #if defined(PETSC_HAVE_MKSTEMP) 4 #define _XOPEN_SOURCE 600 5 #endif 6 #include <petsc/private/dmnetworkimpl.h> /*I "petscdmnetwork.h" I*/ 7 #include <petscdraw.h> 8 9 static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer) 10 { 11 DM dmcoords; 12 PetscInt nsubnets, i, subnet, nvertices, nedges, vertex, edge; 13 PetscInt vertexOffsets[2], globalEdgeVertices[2]; 14 PetscScalar vertexCoords[2]; 15 const PetscInt *vertices, *edges, *edgeVertices; 16 Vec allVertexCoords; 17 PetscMPIInt rank; 18 MPI_Comm comm; 19 20 PetscFunctionBegin; 21 // Get the network containing coordinate information 22 PetscCall(DMGetCoordinateDM(dm, &dmcoords)); 23 // Get the coordinate vector for the network 24 PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords)); 25 // Get the MPI communicator and this process' rank 26 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 27 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 28 // Start synchronized printing 29 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 30 31 // Write the header 32 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n")); 33 34 // Iterate each subnetwork (Note: We need to get the global number of subnets apparently) 35 PetscCall(DMNetworkGetNumSubNetworks(dm, NULL, &nsubnets)); 36 for (subnet = 0; subnet < nsubnets; subnet++) { 37 // Get the subnetwork's vertices and edges 38 PetscCall(DMNetworkGetSubnetwork(dm, subnet, &nvertices, &nedges, &vertices, &edges)); 39 40 // Write out each vertex 41 for (i = 0; i < nvertices; i++) { 42 vertex = vertices[i]; 43 // Get the offset into the coordinate vector for the vertex 44 PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets)); 45 vertexOffsets[1] = vertexOffsets[0] + 1; 46 // Remap vertex to the global value 47 PetscCall(DMNetworkGetGlobalVertexIndex(dm, vertex, &vertex)); 48 // Get the vertex position from the coordinate vector 49 PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords)); 50 51 // TODO: Determine vertex color/name 52 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT "\n", (PetscInt)rank, vertex, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), vertex)); 53 } 54 55 // Write out each edge 56 for (i = 0; i < nedges; i++) { 57 edge = edges[i]; 58 PetscCall(DMNetworkGetConnectedVertices(dm, edge, &edgeVertices)); 59 PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[0], &globalEdgeVertices[0])); 60 PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[1], &globalEdgeVertices[1])); 61 PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edge, &edge)); 62 63 // TODO: Determine edge color/name 64 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge)); 65 } 66 } 67 // End synchronized printing 68 PetscCall(PetscViewerFlush(viewer)); 69 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 70 PetscFunctionReturn(PETSC_SUCCESS); 71 } 72 73 static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer) 74 { 75 PetscMPIInt rank, size; 76 MPI_Comm comm; 77 char filename[PETSC_MAX_PATH_LEN + 1], proccall[PETSC_MAX_PATH_LEN + 500], scriptFile[PETSC_MAX_PATH_LEN + 1], streamBuffer[256]; 78 PetscViewer csvViewer; 79 FILE *processFile = NULL; 80 PetscBool isnull; 81 PetscDraw draw; 82 #if defined(PETSC_HAVE_MKSTEMP) 83 PetscBool isSharedTmp; 84 #endif 85 86 PetscFunctionBegin; 87 // Deal with the PetscDraw we are given 88 PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw)); 89 PetscCall(PetscDrawIsNull(draw, &isnull)); 90 PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE)); 91 92 // Clear the file name buffer so all communicated bytes are well-defined 93 PetscCall(PetscMemzero(filename, sizeof(filename))); 94 95 // Get the MPI communicator and this process' rank 96 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 97 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 98 PetscCallMPI(MPI_Comm_size(comm, &size)); 99 100 #if defined(PETSC_HAVE_MKSTEMP) 101 // Get if the temporary directory is shared 102 // Note: This must be done collectively on every rank, it cannot be done on a single rank 103 PetscCall(PetscSharedTmp(comm, &isSharedTmp)); 104 #endif 105 106 // Generate and broadcast the temporary file name from rank 0 107 if (rank == 0) { 108 #if defined(PETSC_HAVE_TMPNAM_S) 109 // Acquire a temporary file to write to and open an ASCII/CSV viewer 110 PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file"); 111 #elif defined(PETSC_HAVE_MKSTEMP) 112 PetscBool isTmpOverridden; 113 size_t numChars; 114 // Same thing, but for POSIX systems on which tmpnam is deprecated 115 // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it 116 // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it 117 PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), &isTmpOverridden)); 118 // If not specified by option try using a shared tmp on the system 119 if (!isTmpOverridden) { 120 // Validate that if tmp is not overridden it is at least shared 121 PetscCheck(isSharedTmp, comm, PETSC_ERR_SUP_SYS, "Temporary file directory is not shared between ranks, try using -dmnetwork_view_tmpdir to specify a shared directory"); 122 PetscCall(PetscGetTmp(PETSC_COMM_SELF, filename, sizeof(filename))); 123 } 124 // Make sure the filename ends with a '/' 125 PetscCall(PetscStrlen(filename, &numChars)); 126 if (filename[numChars - 1] != '/') { 127 filename[numChars] = '/'; 128 filename[numChars + 1] = 0; 129 } 130 // Perform the actual temporary file creation 131 PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename))); 132 PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file"); 133 #else 134 // Same thing, but for older C versions which don't have the safe form 135 PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file"); 136 #endif 137 } 138 139 // Broadcast the filename to all other MPI ranks 140 PetscCallMPI(MPI_Bcast(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, comm)); 141 142 PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, filename, &csvViewer)); 143 PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV)); 144 145 // Use the CSV viewer to write out the local network 146 PetscCall(DMView_Network_CSV(dm, csvViewer)); 147 148 // Close the viewer 149 PetscCall(PetscViewerDestroy(&csvViewer)); 150 151 // Get the value of $PETSC_DIR 152 PetscCall(PetscStrreplace(PETSC_COMM_WORLD, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile))); 153 PetscCall(PetscFixFilename(scriptFile, scriptFile)); 154 // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py <file>' 155 PetscCall(PetscArrayzero(proccall, sizeof(proccall))); 156 PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, (isnull ? "-tx" : ""), filename)); 157 158 #if defined(PETSC_HAVE_POPEN) 159 // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0) 160 PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, proccall, "r", &processFile)); 161 if (processFile != NULL) { 162 while (fgets(streamBuffer, sizeof(streamBuffer), processFile) != NULL) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%s", streamBuffer)); 163 } 164 PetscCall(PetscPClose(PETSC_COMM_WORLD, processFile)); 165 #else 166 // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0) 167 if (rank == 0) { 168 PetscCheck(system(proccall) == 0, comm, PETSC_ERR_SYS, "Failed to call viewer script"); 169 // Barrier so that all ranks wait until the call completes 170 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 171 } 172 #endif 173 // Clean up the temporary file we used using rank 0 174 if (rank == 0) PetscCheck(remove(filename) == 0, comm, PETSC_ERR_SYS, "Failed to delete temporary file"); 175 PetscFunctionReturn(PETSC_SUCCESS); 176 } 177 178 PetscErrorCode DMView_Network(DM dm, PetscViewer viewer) 179 { 180 PetscBool iascii, isdraw; 181 PetscViewerFormat format; 182 183 PetscFunctionBegin; 184 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 185 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 186 PetscCall(PetscViewerGetFormat(viewer, &format)); 187 188 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 189 if (isdraw) { 190 PetscCall(DMView_Network_Matplotlib(dm, viewer)); 191 PetscFunctionReturn(PETSC_SUCCESS); 192 } 193 194 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 195 if (iascii) { 196 const PetscInt *cone, *vtx, *edges; 197 PetscInt vfrom, vto, i, j, nv, ne, nsv, p, nsubnet; 198 DM_Network *network = (DM_Network *)dm->data; 199 PetscMPIInt rank; 200 201 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank)); 202 if (format == PETSC_VIEWER_ASCII_CSV) { 203 PetscCall(DMView_Network_CSV(dm, viewer)); 204 PetscFunctionReturn(PETSC_SUCCESS); 205 } 206 207 nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */ 208 if (!rank) { 209 PetscCall(PetscPrintf(PETSC_COMM_SELF, " NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices, 210 network->cloneshared->Nsvtx)); 211 } 212 213 PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL)); 214 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 215 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv)); 216 217 for (i = 0; i < nsubnet; i++) { 218 PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges)); 219 if (ne) { 220 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv)); 221 for (j = 0; j < ne; j++) { 222 p = edges[j]; 223 PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone)); 224 PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom)); 225 PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto)); 226 PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p)); 227 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto)); 228 } 229 } 230 } 231 232 /* Shared vertices */ 233 PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx)); 234 if (nsv) { 235 PetscInt gidx; 236 PetscBool ghost; 237 const PetscInt *sv = NULL; 238 239 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " SharedVertices:\n")); 240 for (i = 0; i < nsv; i++) { 241 PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost)); 242 if (ghost) continue; 243 244 PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv)); 245 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1])); 246 for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1])); 247 } 248 } 249 PetscCall(PetscViewerFlush(viewer)); 250 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 251 } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name); 252 PetscFunctionReturn(PETSC_SUCCESS); 253 } 254