1d2fd5932SHong Zhang #include <petsc/private/dmnetworkimpl.h> /*I "petscdmnetwork.h" I*/ 2d2fd5932SHong Zhang 3df1a93feSDuncan Campbell static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer) 4df1a93feSDuncan Campbell { 5df1a93feSDuncan Campbell DM dmcoords; 6df1a93feSDuncan Campbell PetscInt nsubnets, i, subnet, nvertices, nedges, vertex, edge; 7df1a93feSDuncan Campbell PetscInt vertexOffsets[2], globalEdgeVertices[2]; 8df1a93feSDuncan Campbell PetscScalar vertexCoords[2]; 9df1a93feSDuncan Campbell const PetscInt *vertices, *edges, *edgeVertices; 10df1a93feSDuncan Campbell Vec allVertexCoords; 11df1a93feSDuncan Campbell PetscMPIInt rank; 12df1a93feSDuncan Campbell MPI_Comm comm; 13df1a93feSDuncan Campbell 14df1a93feSDuncan Campbell PetscFunctionBegin; 15df1a93feSDuncan Campbell // Get the network containing coordinate information 16df1a93feSDuncan Campbell PetscCall(DMGetCoordinateDM(dm, &dmcoords)); 17df1a93feSDuncan Campbell // Get the coordinate vector for the network 18df1a93feSDuncan Campbell PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords)); 19df1a93feSDuncan Campbell // Get the MPI communicator and this process' rank 20df1a93feSDuncan Campbell PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 21df1a93feSDuncan Campbell PetscCallMPI(MPI_Comm_rank(comm, &rank)); 22df1a93feSDuncan Campbell // Start synchronized printing 23df1a93feSDuncan Campbell PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 24df1a93feSDuncan Campbell 25df1a93feSDuncan Campbell // Write the header 26df1a93feSDuncan Campbell PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n")); 27df1a93feSDuncan Campbell 28df1a93feSDuncan Campbell // Iterate each subnetwork (Note: We need to get the global number of subnets apparently) 29df1a93feSDuncan Campbell PetscCall(DMNetworkGetNumSubNetworks(dm, NULL, &nsubnets)); 30df1a93feSDuncan Campbell for (subnet = 0; subnet < nsubnets; subnet++) { 31df1a93feSDuncan Campbell // Get the subnetwork's vertices and edges 32df1a93feSDuncan Campbell PetscCall(DMNetworkGetSubnetwork(dm, subnet, &nvertices, &nedges, &vertices, &edges)); 33df1a93feSDuncan Campbell 34df1a93feSDuncan Campbell // Write out each vertex 35df1a93feSDuncan Campbell for (i = 0; i < nvertices; i++) { 36df1a93feSDuncan Campbell vertex = vertices[i]; 37df1a93feSDuncan Campbell // Get the offset into the coordinate vector for the vertex 38df1a93feSDuncan Campbell PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets)); 39df1a93feSDuncan Campbell vertexOffsets[1] = vertexOffsets[0] + 1; 40df1a93feSDuncan Campbell // Remap vertex to the global value 41df1a93feSDuncan Campbell PetscCall(DMNetworkGetGlobalVertexIndex(dm, vertex, &vertex)); 42df1a93feSDuncan Campbell // Get the vertex position from the coordinate vector 43df1a93feSDuncan Campbell PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords)); 44df1a93feSDuncan Campbell 45df1a93feSDuncan Campbell // TODO: Determine vertex color/name 46df1a93feSDuncan Campbell PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT "\n", (PetscInt)rank, vertex, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), vertex)); 47df1a93feSDuncan Campbell } 48df1a93feSDuncan Campbell 49df1a93feSDuncan Campbell // Write out each edge 50df1a93feSDuncan Campbell for (i = 0; i < nedges; i++) { 51df1a93feSDuncan Campbell edge = edges[i]; 52df1a93feSDuncan Campbell PetscCall(DMNetworkGetConnectedVertices(dm, edge, &edgeVertices)); 53df1a93feSDuncan Campbell PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[0], &globalEdgeVertices[0])); 54df1a93feSDuncan Campbell PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[1], &globalEdgeVertices[1])); 55df1a93feSDuncan Campbell PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edge, &edge)); 56df1a93feSDuncan Campbell 57df1a93feSDuncan Campbell // TODO: Determine edge color/name 58df1a93feSDuncan Campbell PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge)); 59df1a93feSDuncan Campbell } 60df1a93feSDuncan Campbell } 61df1a93feSDuncan Campbell // End synchronized printing 62df1a93feSDuncan Campbell PetscCall(PetscViewerFlush(viewer)); 63df1a93feSDuncan Campbell PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 64df1a93feSDuncan Campbell PetscFunctionReturn(PETSC_SUCCESS); 65df1a93feSDuncan Campbell } 66df1a93feSDuncan Campbell 67df1a93feSDuncan Campbell #include <petscdraw.h> 68df1a93feSDuncan Campbell static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer) 69df1a93feSDuncan Campbell { 70*cd2bb8e3SDuncan Campbell PetscMPIInt rank, size; 71df1a93feSDuncan Campbell MPI_Comm comm; 72df1a93feSDuncan Campbell char filename[PETSC_MAX_PATH_LEN + 1], proccall[PETSC_MAX_PATH_LEN + 500], scriptFile[PETSC_MAX_PATH_LEN + 1], streamBuffer[256]; 73df1a93feSDuncan Campbell PetscViewer csvViewer; 74df1a93feSDuncan Campbell FILE *processFile = NULL; 75df1a93feSDuncan Campbell PetscBool isnull; 76df1a93feSDuncan Campbell PetscDraw draw; 77*cd2bb8e3SDuncan Campbell #if defined(PETSC_HAVE_MKSTEMP) 78*cd2bb8e3SDuncan Campbell PetscBool isSharedTmp; 79*cd2bb8e3SDuncan Campbell #endif 80df1a93feSDuncan Campbell 81df1a93feSDuncan Campbell PetscFunctionBegin; 82df1a93feSDuncan Campbell // Deal with the PetscDraw we are given 83df1a93feSDuncan Campbell PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw)); 84df1a93feSDuncan Campbell PetscCall(PetscDrawIsNull(draw, &isnull)); 85df1a93feSDuncan Campbell PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE)); 86df1a93feSDuncan Campbell 87df1a93feSDuncan Campbell // Clear the file name buffer so all communicated bytes are well-defined 88df1a93feSDuncan Campbell PetscCall(PetscMemzero(filename, sizeof(filename))); 89df1a93feSDuncan Campbell 90df1a93feSDuncan Campbell // Get the MPI communicator and this process' rank 91df1a93feSDuncan Campbell PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 92df1a93feSDuncan Campbell PetscCallMPI(MPI_Comm_rank(comm, &rank)); 93df1a93feSDuncan Campbell PetscCallMPI(MPI_Comm_size(comm, &size)); 94df1a93feSDuncan Campbell 95*cd2bb8e3SDuncan Campbell #if defined(PETSC_HAVE_MKSTEMP) 96*cd2bb8e3SDuncan Campbell // Get if the temporary directory is shared 97*cd2bb8e3SDuncan Campbell // Note: This must be done collectively on every rank, it cannot be done on a single rank 98*cd2bb8e3SDuncan Campbell PetscCall(PetscSharedTmp(comm, &isSharedTmp)); 99*cd2bb8e3SDuncan Campbell #endif 100*cd2bb8e3SDuncan Campbell 101df1a93feSDuncan Campbell // Generate and broadcast the temporary file name from rank 0 102df1a93feSDuncan Campbell if (rank == 0) { 103df1a93feSDuncan Campbell #if defined(PETSC_HAVE_TMPNAM_S) 104df1a93feSDuncan Campbell // Acquire a temporary file to write to and open an ASCII/CSV viewer 105df1a93feSDuncan Campbell PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file"); 106df1a93feSDuncan Campbell #elif defined(PETSC_HAVE_MKSTEMP) && __STDC_VERSION__ > 199901L 107*cd2bb8e3SDuncan Campbell PetscBool isTmpOverridden; 108df1a93feSDuncan Campbell size_t numChars; 109df1a93feSDuncan Campbell // Same thing, but for POSIX systems on which tmpnam is deprecated 110df1a93feSDuncan Campbell // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it 111df1a93feSDuncan Campbell // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it 1126e4289a0SDuncan Campbell PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), &isTmpOverridden)); 1136e4289a0SDuncan Campbell // If not specified by option try using a shared tmp on the system 1146e4289a0SDuncan Campbell if (!isTmpOverridden) { 1156e4289a0SDuncan Campbell // Validate that if tmp is not overridden it is at least shared 116*cd2bb8e3SDuncan Campbell PetscCheck(isSharedTmp, comm, PETSC_ERR_SUP_SYS, "Temporary file directory is not shared between ranks, try using -dmnetwork_view_tmpdir to specify a shared directory"); 117*cd2bb8e3SDuncan Campbell PetscCall(PetscGetTmp(PETSC_COMM_SELF, filename, sizeof(filename))); 118*cd2bb8e3SDuncan Campbell } 119df1a93feSDuncan Campbell // Make sure the filename ends with a '/' 120df1a93feSDuncan Campbell PetscCall(PetscStrlen(filename, &numChars)); 121df1a93feSDuncan Campbell if (filename[numChars - 1] != '/') { 122df1a93feSDuncan Campbell filename[numChars] = '/'; 123df1a93feSDuncan Campbell filename[numChars + 1] = 0; 124df1a93feSDuncan Campbell } 125df1a93feSDuncan Campbell // Perform the actual temporary file creation 126c6a7a370SJeremy L Thompson PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename))); 127df1a93feSDuncan Campbell PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file"); 128df1a93feSDuncan Campbell #else 129df1a93feSDuncan Campbell // Same thing, but for older C versions which don't have the safe form 130df1a93feSDuncan Campbell PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file"); 131df1a93feSDuncan Campbell #endif 132df1a93feSDuncan Campbell } 133df1a93feSDuncan Campbell 134*cd2bb8e3SDuncan Campbell // Broadcast the filename to all other MPI ranks 135*cd2bb8e3SDuncan Campbell PetscCallMPI(MPI_Bcast(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, comm)); 136*cd2bb8e3SDuncan Campbell 137df1a93feSDuncan Campbell PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, filename, &csvViewer)); 138df1a93feSDuncan Campbell PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV)); 139df1a93feSDuncan Campbell 140df1a93feSDuncan Campbell // Use the CSV viewer to write out the local network 141df1a93feSDuncan Campbell PetscCall(DMView_Network_CSV(dm, csvViewer)); 142df1a93feSDuncan Campbell 143df1a93feSDuncan Campbell // Close the viewer 144df1a93feSDuncan Campbell PetscCall(PetscViewerDestroy(&csvViewer)); 145df1a93feSDuncan Campbell 146df1a93feSDuncan Campbell // Get the value of $PETSC_DIR 147df1a93feSDuncan Campbell PetscCall(PetscStrreplace(PETSC_COMM_WORLD, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile))); 148df1a93feSDuncan Campbell PetscCall(PetscFixFilename(scriptFile, scriptFile)); 149df1a93feSDuncan Campbell // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py <file>' 150df1a93feSDuncan Campbell PetscCall(PetscArrayzero(proccall, sizeof(proccall))); 151df1a93feSDuncan Campbell PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, (isnull ? "-tx" : ""), filename)); 152df1a93feSDuncan Campbell 153df1a93feSDuncan Campbell #if defined(PETSC_HAVE_POPEN) 154df1a93feSDuncan Campbell // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0) 155df1a93feSDuncan Campbell PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, proccall, "r", &processFile)); 156df1a93feSDuncan Campbell if (processFile != NULL) { 157df1a93feSDuncan Campbell while (fgets(streamBuffer, sizeof(streamBuffer), processFile) != NULL) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%s", streamBuffer)); 158df1a93feSDuncan Campbell } 159df1a93feSDuncan Campbell PetscCall(PetscPClose(PETSC_COMM_WORLD, processFile)); 160df1a93feSDuncan Campbell #else 161df1a93feSDuncan Campbell // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0) 162df1a93feSDuncan Campbell if (rank == 0) { 163df1a93feSDuncan Campbell PetscCheck(system(proccall) == 0, comm, PETSC_ERR_SYS, "Failed to call viewer script"); 164df1a93feSDuncan Campbell // Barrier so that all ranks wait until the call completes 165df1a93feSDuncan Campbell PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 166df1a93feSDuncan Campbell } 167df1a93feSDuncan Campbell #endif 168df1a93feSDuncan Campbell // Clean up the temporary file we used using rank 0 169df1a93feSDuncan Campbell if (rank == 0) PetscCheck(remove(filename) == 0, comm, PETSC_ERR_SYS, "Failed to delete temporary file"); 170df1a93feSDuncan Campbell PetscFunctionReturn(PETSC_SUCCESS); 171df1a93feSDuncan Campbell } 172df1a93feSDuncan Campbell 173d2fd5932SHong Zhang PetscErrorCode DMView_Network(DM dm, PetscViewer viewer) 174d2fd5932SHong Zhang { 175df1a93feSDuncan Campbell PetscBool iascii, isdraw; 176df1a93feSDuncan Campbell PetscViewerFormat format; 177d2fd5932SHong Zhang 178d2fd5932SHong Zhang PetscFunctionBegin; 179d2fd5932SHong Zhang PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 180d2fd5932SHong Zhang PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 181df1a93feSDuncan Campbell PetscCall(PetscViewerGetFormat(viewer, &format)); 182df1a93feSDuncan Campbell 183df1a93feSDuncan Campbell PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 184df1a93feSDuncan Campbell if (isdraw) { 185df1a93feSDuncan Campbell PetscCall(DMView_Network_Matplotlib(dm, viewer)); 186df1a93feSDuncan Campbell PetscFunctionReturn(PETSC_SUCCESS); 187df1a93feSDuncan Campbell } 188df1a93feSDuncan Campbell 189d2fd5932SHong Zhang PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 190d2fd5932SHong Zhang if (iascii) { 191d2fd5932SHong Zhang const PetscInt *cone, *vtx, *edges; 192d2fd5932SHong Zhang PetscInt vfrom, vto, i, j, nv, ne, nsv, p, nsubnet; 193d2fd5932SHong Zhang DM_Network *network = (DM_Network *)dm->data; 194df1a93feSDuncan Campbell PetscMPIInt rank; 195df1a93feSDuncan Campbell 196df1a93feSDuncan Campbell PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank)); 197df1a93feSDuncan Campbell if (format == PETSC_VIEWER_ASCII_CSV) { 198df1a93feSDuncan Campbell PetscCall(DMView_Network_CSV(dm, viewer)); 199df1a93feSDuncan Campbell PetscFunctionReturn(PETSC_SUCCESS); 200df1a93feSDuncan Campbell } 201d2fd5932SHong Zhang 202d2fd5932SHong Zhang nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */ 203df1a93feSDuncan Campbell if (!rank) { 204d2fd5932SHong Zhang PetscCall(PetscPrintf(PETSC_COMM_SELF, " NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices, 205d2fd5932SHong Zhang network->cloneshared->Nsvtx)); 206d2fd5932SHong Zhang } 207d2fd5932SHong Zhang 208d2fd5932SHong Zhang PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL)); 209d2fd5932SHong Zhang PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 210d2fd5932SHong Zhang PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv)); 211d2fd5932SHong Zhang 212d2fd5932SHong Zhang for (i = 0; i < nsubnet; i++) { 213d2fd5932SHong Zhang PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges)); 214d2fd5932SHong Zhang if (ne) { 215d2fd5932SHong Zhang PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv)); 216d2fd5932SHong Zhang for (j = 0; j < ne; j++) { 217d2fd5932SHong Zhang p = edges[j]; 218d2fd5932SHong Zhang PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone)); 219d2fd5932SHong Zhang PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom)); 220d2fd5932SHong Zhang PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto)); 221d2fd5932SHong Zhang PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p)); 222d2fd5932SHong Zhang PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto)); 223d2fd5932SHong Zhang } 224d2fd5932SHong Zhang } 225d2fd5932SHong Zhang } 226d2fd5932SHong Zhang 227d2fd5932SHong Zhang /* Shared vertices */ 228d2fd5932SHong Zhang PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx)); 229d2fd5932SHong Zhang if (nsv) { 230d2fd5932SHong Zhang PetscInt gidx; 231d2fd5932SHong Zhang PetscBool ghost; 232d2fd5932SHong Zhang const PetscInt *sv = NULL; 233d2fd5932SHong Zhang 234d2fd5932SHong Zhang PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " SharedVertices:\n")); 235d2fd5932SHong Zhang for (i = 0; i < nsv; i++) { 236d2fd5932SHong Zhang PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost)); 237d2fd5932SHong Zhang if (ghost) continue; 238d2fd5932SHong Zhang 239d2fd5932SHong Zhang PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv)); 240d2fd5932SHong Zhang PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1])); 241d2fd5932SHong Zhang for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1])); 242d2fd5932SHong Zhang } 243d2fd5932SHong Zhang } 244d2fd5932SHong Zhang PetscCall(PetscViewerFlush(viewer)); 245d2fd5932SHong Zhang PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 246d2fd5932SHong Zhang } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name); 247d2fd5932SHong Zhang PetscFunctionReturn(PETSC_SUCCESS); 248d2fd5932SHong Zhang } 249