xref: /petsc/src/sys/logging/handler/impls/nested/xmlviewer.c (revision b93211885051a68d5a4e2963b141f71fa7a24c4c)
1 /*************************************************************************************
2  *    M A R I T I M E  R E S E A R C H  I N S T I T U T E  N E T H E R L A N D S     *
3  *************************************************************************************
4  *    authors: Koos Huijssen, Christiaan M. Klaij                                    *
5  *************************************************************************************
6  *    content: Viewer for writing XML output                                         *
7  *************************************************************************************/
8 #include <petscviewer.h>
9 #include <petsc/private/logimpl.h>
10 #include <petsc/private/fortranimpl.h>
11 #include "xmlviewer.h"
12 #include "lognested.h"
13 
14 static PetscErrorCode PetscViewerXMLStartSection(PetscViewer viewer, const char *name, const char *desc)
15 {
16   PetscInt XMLSectionDepthPetsc;
17   int      XMLSectionDepth;
18 
19   PetscFunctionBegin;
20   PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
21   XMLSectionDepth = (int)XMLSectionDepthPetsc;
22   if (!desc) {
23     PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s>\n", 2 * XMLSectionDepth, "", name));
24   } else {
25     PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s desc=\"%s\">\n", 2 * XMLSectionDepth, "", name, desc));
26   }
27   PetscCall(PetscViewerASCIIPushTab(viewer));
28   PetscFunctionReturn(PETSC_SUCCESS);
29 }
30 
31 /* Initialize a viewer to XML, and initialize the XMLDepth static parameter */
32 static PetscErrorCode PetscViewerInitASCII_XML(PetscViewer viewer)
33 {
34   MPI_Comm comm;
35   char     PerfScript[PETSC_MAX_PATH_LEN + 40];
36 
37   PetscFunctionBegin;
38   PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
39   PetscCall(PetscViewerASCIIPrintf(viewer, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"));
40   PetscCall(PetscStrreplace(comm, "<?xml-stylesheet type=\"text/xsl\" href=\"performance_xml2html.xsl\"?>", PerfScript, sizeof(PerfScript)));
41   PetscCall(PetscViewerASCIIPrintf(viewer, "%s\n", PerfScript));
42   PetscCall(PetscViewerXMLStartSection(viewer, "root", NULL));
43   PetscFunctionReturn(PETSC_SUCCESS);
44 }
45 
46 static PetscErrorCode PetscViewerXMLEndSection(PetscViewer viewer, const char *name)
47 {
48   PetscInt XMLSectionDepthPetsc;
49   int      XMLSectionDepth;
50 
51   PetscFunctionBegin;
52   PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
53   XMLSectionDepth = (int)XMLSectionDepthPetsc;
54   if (XMLSectionDepth > 0) PetscCall(PetscViewerASCIIPopTab(viewer));
55   PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
56   XMLSectionDepth = (int)XMLSectionDepthPetsc;
57   PetscCall(PetscViewerASCIIPrintf(viewer, "%*s</%s>\n", 2 * XMLSectionDepth, "", name));
58   PetscFunctionReturn(PETSC_SUCCESS);
59 }
60 
61 /* Initialize a viewer to XML, and initialize the XMLDepth static parameter */
62 static PetscErrorCode PetscViewerFinalASCII_XML(PetscViewer viewer)
63 {
64   PetscFunctionBegin;
65   PetscCall(PetscViewerXMLEndSection(viewer, "root"));
66   PetscFunctionReturn(PETSC_SUCCESS);
67 }
68 
69 static PetscErrorCode PetscViewerXMLPutString(PetscViewer viewer, const char *name, const char *desc, const char *value)
70 {
71   PetscInt XMLSectionDepthPetsc;
72   int      XMLSectionDepth;
73 
74   PetscFunctionBegin;
75   PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
76   XMLSectionDepth = (int)XMLSectionDepthPetsc;
77   if (!desc) {
78     PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s>%s</%s>\n", 2 * XMLSectionDepth, "", name, value, name));
79   } else {
80     PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s desc=\"%s\">%s</%s>\n", 2 * XMLSectionDepth, "", name, desc, value, name));
81   }
82   PetscFunctionReturn(PETSC_SUCCESS);
83 }
84 
85 static PetscErrorCode PetscViewerXMLPutInt(PetscViewer viewer, const char *name, const char *desc, int value)
86 {
87   PetscInt XMLSectionDepthPetsc;
88   int      XMLSectionDepth;
89 
90   PetscFunctionBegin;
91   PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
92   XMLSectionDepth = (int)XMLSectionDepthPetsc;
93   if (!desc) {
94     PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s>%d</%s>\n", 2 * XMLSectionDepth, "", name, value, name));
95   } else {
96     PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s desc=\"%s\">%d</%s>\n", 2 * XMLSectionDepth, "", name, desc, value, name));
97   }
98   PetscFunctionReturn(PETSC_SUCCESS);
99 }
100 
101 static PetscErrorCode PetscViewerXMLPutDouble(PetscViewer viewer, const char *name, PetscLogDouble value, const char *format)
102 {
103   PetscInt XMLSectionDepthPetsc;
104   int      XMLSectionDepth;
105   char     buffer[1024];
106 
107   PetscFunctionBegin;
108   PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
109   XMLSectionDepth = (int)XMLSectionDepthPetsc;
110   PetscCall(PetscSNPrintf(buffer, sizeof(buffer), "%*s<%s>%s</%s>\n", 2 * XMLSectionDepth, "", name, format, name));
111   PetscCall(PetscViewerASCIIPrintf(viewer, buffer, value));
112   PetscFunctionReturn(PETSC_SUCCESS);
113 }
114 
115 static PetscErrorCode PetscPrintExeSpecs(PetscViewer viewer)
116 {
117   char        arch[128], hostname[128], username[128], pname[PETSC_MAX_PATH_LEN], date[128];
118   char        version[256], buildoptions[128] = "";
119   PetscMPIInt size;
120   size_t      len;
121 
122   PetscFunctionBegin;
123   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
124   PetscCall(PetscGetArchType(arch, sizeof(arch)));
125   PetscCall(PetscGetHostName(hostname, sizeof(hostname)));
126   PetscCall(PetscGetUserName(username, sizeof(username)));
127   PetscCall(PetscGetProgramName(pname, sizeof(pname)));
128   PetscCall(PetscGetDate(date, sizeof(date)));
129   PetscCall(PetscGetVersion(version, sizeof(version)));
130 
131   PetscCall(PetscViewerXMLStartSection(viewer, "runspecification", "Run Specification"));
132   PetscCall(PetscViewerXMLPutString(viewer, "executable", "Executable", pname));
133   PetscCall(PetscViewerXMLPutString(viewer, "architecture", "Architecture", arch));
134   PetscCall(PetscViewerXMLPutString(viewer, "hostname", "Host", hostname));
135   PetscCall(PetscViewerXMLPutInt(viewer, "nprocesses", "Number of processes", size));
136   PetscCall(PetscViewerXMLPutString(viewer, "user", "Run by user", username));
137   PetscCall(PetscViewerXMLPutString(viewer, "date", "Started at", date));
138   PetscCall(PetscViewerXMLPutString(viewer, "petscrelease", "Petsc Release", version));
139 
140   if (PetscDefined(USE_DEBUG)) PetscCall(PetscStrlcat(buildoptions, "Debug ", sizeof(buildoptions)));
141   if (PetscDefined(USE_COMPLEX)) PetscCall(PetscStrlcat(buildoptions, "Complex ", sizeof(buildoptions)));
142   if (PetscDefined(USE_REAL_SINGLE)) {
143     PetscCall(PetscStrlcat(buildoptions, "Single ", sizeof(buildoptions)));
144   } else if (PetscDefined(USE_REAL___FLOAT128)) {
145     PetscCall(PetscStrlcat(buildoptions, "Quadruple ", sizeof(buildoptions)));
146   } else if (PetscDefined(USE_REAL___FP16)) {
147     PetscCall(PetscStrlcat(buildoptions, "Half ", sizeof(buildoptions)));
148   }
149   if (PetscDefined(USE_64BIT_INDICES)) PetscCall(PetscStrlcat(buildoptions, "Int64 ", sizeof(buildoptions)));
150 #if defined(__cplusplus)
151   PetscCall(PetscStrlcat(buildoptions, "C++ ", sizeof(buildoptions)));
152 #endif
153   PetscCall(PetscStrlen(buildoptions, &len));
154   if (len) PetscCall(PetscViewerXMLPutString(viewer, "petscbuildoptions", "Petsc build options", buildoptions));
155   PetscCall(PetscViewerXMLEndSection(viewer, "runspecification"));
156   PetscFunctionReturn(PETSC_SUCCESS);
157 }
158 
159 #if PetscDefined(USE_LOG)
160 static PetscErrorCode PetscPrintXMLGlobalPerformanceElement(PetscViewer viewer, const char *name, const char *desc, PetscLogDouble local_val, const PetscBool print_average, const PetscBool print_total)
161 {
162   PetscLogDouble min, tot, ratio, avg;
163   MPI_Comm       comm;
164   PetscMPIInt    rank, size;
165   PetscLogDouble valrank[2], max[2];
166 
167   PetscFunctionBegin;
168   PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
169   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
170   PetscCallMPI(MPI_Comm_rank(comm, &rank));
171 
172   valrank[0] = local_val;
173   valrank[1] = (PetscLogDouble)rank;
174   PetscCall(MPIU_Allreduce(&local_val, &min, 1, MPIU_PETSCLOGDOUBLE, MPI_MIN, comm));
175   PetscCall(MPIU_Allreduce(valrank, &max, 1, MPIU_2PETSCLOGDOUBLE, MPI_MAXLOC, comm));
176   PetscCall(MPIU_Allreduce(&local_val, &tot, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, comm));
177   avg = tot / ((PetscLogDouble)size);
178   if (min != 0.0) ratio = max[0] / min;
179   else ratio = 0.0;
180 
181   PetscCall(PetscViewerXMLStartSection(viewer, name, desc));
182   PetscCall(PetscViewerXMLPutDouble(viewer, "max", max[0], "%e"));
183   PetscCall(PetscViewerXMLPutInt(viewer, "maxrank", "rank at which max was found", (PetscMPIInt)max[1]));
184   PetscCall(PetscViewerXMLPutDouble(viewer, "ratio", ratio, "%f"));
185   if (print_average) PetscCall(PetscViewerXMLPutDouble(viewer, "average", avg, "%e"));
186   if (print_total) PetscCall(PetscViewerXMLPutDouble(viewer, "total", tot, "%e"));
187   PetscCall(PetscViewerXMLEndSection(viewer, name));
188   PetscFunctionReturn(PETSC_SUCCESS);
189 }
190 
191 /* Print the global performance: max, max/min, average and total of
192  *      time, objects, flops, flops/sec, memory, MPI messages, MPI message lengths, MPI reductions.
193  */
194 static PetscErrorCode PetscPrintGlobalPerformance(PetscViewer viewer, PetscLogDouble locTotalTime, PetscLogHandler default_handler)
195 {
196   PetscLogDouble  flops, mem, red, mess;
197   PetscInt        num_objects;
198   const PetscBool print_total_yes = PETSC_TRUE, print_total_no = PETSC_FALSE, print_average_no = PETSC_FALSE, print_average_yes = PETSC_TRUE;
199 
200   PetscFunctionBegin;
201   /* Must preserve reduction count before we go on */
202   red = petsc_allreduce_ct + petsc_gather_ct + petsc_scatter_ct;
203 
204   /* Calculate summary information */
205   PetscCall(PetscViewerXMLStartSection(viewer, "globalperformance", "Global performance"));
206 
207   /*   Time */
208   PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "time", "Time (sec)", locTotalTime, print_average_yes, print_total_no));
209 
210   /*   Objects */
211   PetscCall(PetscLogHandlerDefaultGetNumObjects(default_handler, &num_objects));
212   PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "objects", "Objects", (PetscLogDouble)num_objects, print_average_yes, print_total_no));
213 
214   /*   Flop */
215   PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "mflop", "MFlop", petsc_TotalFlops / 1.0E6, print_average_yes, print_total_yes));
216 
217   /*   Flop/sec -- Must talk to Barry here */
218   if (locTotalTime != 0.0) flops = petsc_TotalFlops / locTotalTime;
219   else flops = 0.0;
220   PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "mflops", "MFlop/sec", flops / 1.0E6, print_average_yes, print_total_yes));
221 
222   /*   Memory */
223   PetscCall(PetscMallocGetMaximumUsage(&mem));
224   if (mem > 0.0) PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "memory", "Memory (MiB)", mem / 1024.0 / 1024.0, print_average_yes, print_total_yes));
225   /*   Messages */
226   mess = 0.5 * (petsc_irecv_ct + petsc_isend_ct + petsc_recv_ct + petsc_send_ct);
227   PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "messagetransfers", "MPI Message Transfers", mess, print_average_yes, print_total_yes));
228 
229   /*   Message Volume */
230   mess = 0.5 * (petsc_irecv_len + petsc_isend_len + petsc_recv_len + petsc_send_len);
231   PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "messagevolume", "MPI Message Volume (MiB)", mess / 1024.0 / 1024.0, print_average_yes, print_total_yes));
232 
233   /*   Reductions */
234   PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "reductions", "MPI Reductions", red, print_average_no, print_total_no));
235   PetscCall(PetscViewerXMLEndSection(viewer, "globalperformance"));
236   PetscFunctionReturn(PETSC_SUCCESS);
237 }
238 #endif
239 
240 /* Print the global performance: max, max/min, average and total of
241  *      time, objects, flops, flops/sec, memory, MPI messages, MPI message lengths, MPI reductions.
242  */
243 static PetscErrorCode PetscPrintXMLNestedLinePerfResults(PetscViewer viewer, const char *name, PetscLogDouble value, PetscLogDouble minthreshold, PetscLogDouble maxthreshold, PetscLogDouble minmaxtreshold)
244 {
245   MPI_Comm       comm; /* MPI communicator in reduction */
246   PetscMPIInt    rank; /* rank of this process */
247   PetscLogDouble val_in[2], max[2], min[2];
248   PetscLogDouble minvalue, maxvalue, tot;
249   PetscMPIInt    size;
250   PetscMPIInt    minLoc, maxLoc;
251 
252   PetscFunctionBegin;
253   PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
254   PetscCallMPI(MPI_Comm_size(comm, &size));
255   PetscCallMPI(MPI_Comm_rank(comm, &rank));
256   val_in[0] = value;
257   val_in[1] = (PetscLogDouble)rank;
258   PetscCall(MPIU_Allreduce(val_in, max, 1, MPIU_2PETSCLOGDOUBLE, MPI_MAXLOC, comm));
259   PetscCall(MPIU_Allreduce(val_in, min, 1, MPIU_2PETSCLOGDOUBLE, MPI_MINLOC, comm));
260   maxvalue = max[0];
261   maxLoc   = (PetscMPIInt)max[1];
262   minvalue = min[0];
263   minLoc   = (PetscMPIInt)min[1];
264   PetscCall(MPIU_Allreduce(&value, &tot, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, comm));
265 
266   if (maxvalue < maxthreshold && minvalue >= minthreshold) {
267     /* One call per parent or NO value: don't print */
268   } else {
269     PetscCall(PetscViewerXMLStartSection(viewer, name, NULL));
270     if (maxvalue > minvalue * minmaxtreshold) {
271       PetscCall(PetscViewerXMLPutDouble(viewer, "avgvalue", tot / size, "%g"));
272       PetscCall(PetscViewerXMLPutDouble(viewer, "minvalue", minvalue, "%g"));
273       PetscCall(PetscViewerXMLPutDouble(viewer, "maxvalue", maxvalue, "%g"));
274       PetscCall(PetscViewerXMLPutInt(viewer, "minloc", NULL, minLoc));
275       PetscCall(PetscViewerXMLPutInt(viewer, "maxloc", NULL, maxLoc));
276     } else {
277       PetscCall(PetscViewerXMLPutDouble(viewer, "value", tot / size, "%g"));
278     }
279     PetscCall(PetscViewerXMLEndSection(viewer, name));
280   }
281   PetscFunctionReturn(PETSC_SUCCESS);
282 }
283 
284 static PetscErrorCode PetscLogNestedTreePrintLine(PetscViewer viewer, const PetscEventPerfInfo *perfInfo, int childCount, int parentCount, const char *name, PetscLogDouble totalTime)
285 {
286   PetscLogDouble time = perfInfo->time;
287   PetscLogDouble timeMx;
288   MPI_Comm       comm;
289 
290   PetscFunctionBegin;
291   PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
292   PetscCall(MPIU_Allreduce(&time, &timeMx, 1, MPIU_PETSCLOGDOUBLE, MPI_MAX, comm));
293   PetscCall(PetscViewerXMLPutString(viewer, "name", NULL, name));
294   PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "time", time / totalTime * 100.0, 0, 0, 1.02));
295   PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "ncalls", parentCount > 0 ? ((PetscLogDouble)childCount) / ((PetscLogDouble)parentCount) : 1.0, 0.99, 1.01, 1.02));
296   PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "mflops", time >= timeMx * 0.001 ? 1e-6 * perfInfo->flops / time : 0, 0, 0.01, 1.05));
297   PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "mbps", time >= timeMx * 0.001 ? perfInfo->messageLength / (1024 * 1024 * time) : 0, 0, 0.01, 1.05));
298   PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "nreductsps", time >= timeMx * 0.001 ? perfInfo->numReductions / time : 0, 0, 0.01, 1.05));
299   PetscFunctionReturn(PETSC_SUCCESS);
300 }
301 
302 static PetscErrorCode PetscNestedNameGetBase(const char name[], const char *base[])
303 {
304   size_t n;
305   PetscFunctionBegin;
306   PetscCall(PetscStrlen(name, &n));
307   while (n > 0 && name[n - 1] != ';') n--;
308   *base = &name[n];
309   PetscFunctionReturn(PETSC_SUCCESS);
310 }
311 
312 static PetscErrorCode PetscLogNestedTreePrint(PetscViewer viewer, double total_time, double threshold_time, const PetscNestedEventNode *parent_node, PetscEventPerfInfo *parent_info, const PetscNestedEventNode tree[], PetscEventPerfInfo perf[], PetscLogNestedType type, PetscBool print_events)
313 {
314   PetscInt           num_children = 0, num_printed;
315   PetscInt           num_nodes    = parent_node->num_descendants;
316   PetscInt          *perm;
317   PetscReal         *times;
318   PetscEventPerfInfo other;
319 
320   PetscFunctionBegin;
321   for (PetscInt node = 0; node < num_nodes; node += 1 + tree[node].num_descendants) {
322     PetscAssert(tree[node].parent == parent_node->id, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Failed tree invariant");
323     num_children++;
324   }
325   num_printed = num_children;
326   PetscCall(PetscMemzero(&other, sizeof(other)));
327   PetscCall(PetscMalloc2(num_children + 2, &times, num_children + 2, &perm));
328   for (PetscInt i = 0, node = 0; node < num_nodes; i++, node += 1 + tree[node].num_descendants) {
329     PetscLogDouble child_time = perf[node].time;
330 
331     perm[i] = node;
332     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &child_time, 1, MPI_DOUBLE, MPI_MAX, PetscObjectComm((PetscObject)viewer)));
333     times[i] = -child_time;
334 
335     parent_info->time -= perf[node].time;
336     parent_info->flops -= perf[node].flops;
337     parent_info->numMessages -= perf[node].numMessages;
338     parent_info->messageLength -= perf[node].messageLength;
339     parent_info->numReductions -= perf[node].numReductions;
340     if (child_time < threshold_time) {
341       PetscEventPerfInfo *add_to = (type == PETSC_LOG_NESTED_XML) ? &other : parent_info;
342 
343       add_to->time += perf[node].time;
344       add_to->flops += perf[node].flops;
345       add_to->numMessages += perf[node].numMessages;
346       add_to->messageLength += perf[node].messageLength;
347       add_to->numReductions += perf[node].numReductions;
348       add_to->count += perf[node].count;
349       num_printed--;
350     }
351   }
352   perm[num_children]      = -1;
353   times[num_children]     = -parent_info->time;
354   perm[num_children + 1]  = -2;
355   times[num_children + 1] = -other.time;
356   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &times[num_children], 2, MPI_DOUBLE, MPI_MIN, PetscObjectComm((PetscObject)viewer)));
357   if (type == PETSC_LOG_NESTED_FLAMEGRAPH) {
358     /* The output is given as an integer in microseconds because otherwise the file cannot be read
359      * by apps such as speedscope (https://speedscope.app/). */
360     PetscCall(PetscViewerASCIIPrintf(viewer, "%s %" PetscInt64_FMT "\n", parent_node->name, (PetscInt64)(-times[num_children] * 1e6)));
361   }
362   if (other.time > 0.0) num_printed++;
363   // number of items other than "self" that will be printed
364   if (num_printed == 0) {
365     PetscCall(PetscFree2(times, perm));
366     PetscFunctionReturn(PETSC_SUCCESS);
367   }
368   // sort descending by time
369   PetscCall(PetscSortRealWithArrayInt(num_children + 2, times, perm));
370   if (type == PETSC_LOG_NESTED_XML && print_events) PetscCall(PetscViewerXMLStartSection(viewer, "events", NULL));
371   for (PetscInt i = 0; i < num_children + 2; i++) {
372     PetscInt       node       = perm[i];
373     PetscLogDouble child_time = -times[i];
374 
375     if (child_time >= threshold_time || (node < 0 && child_time > 0.0)) {
376       if (type == PETSC_LOG_NESTED_XML) {
377         PetscCall(PetscViewerXMLStartSection(viewer, "event", NULL));
378         if (node == -1) {
379           PetscCall(PetscLogNestedTreePrintLine(viewer, parent_info, 0, 0, "self", total_time));
380         } else if (node == -2) {
381           PetscCall(PetscLogNestedTreePrintLine(viewer, &other, other.count, parent_info->count, "other", total_time));
382         } else {
383           const char *base_name = NULL;
384           PetscCall(PetscNestedNameGetBase(tree[node].name, &base_name));
385           PetscCall(PetscLogNestedTreePrintLine(viewer, &perf[node], perf[node].count, parent_info->count, base_name, total_time));
386           PetscCall(PetscLogNestedTreePrint(viewer, total_time, threshold_time, &tree[node], &perf[node], &tree[node + 1], &perf[node + 1], type, PETSC_TRUE));
387         }
388         PetscCall(PetscViewerXMLEndSection(viewer, "event"));
389       } else if (node >= 0) {
390         PetscCall(PetscLogNestedTreePrint(viewer, total_time, threshold_time, &tree[node], &perf[node], &tree[node + 1], &perf[node + 1], type, PETSC_FALSE));
391       }
392     }
393   }
394   if (type == PETSC_LOG_NESTED_XML && print_events) PetscCall(PetscViewerXMLEndSection(viewer, "events"));
395   PetscCall(PetscFree2(times, perm));
396   PetscFunctionReturn(PETSC_SUCCESS);
397 }
398 
399 static PetscErrorCode PetscLogNestedTreePrintTop(PetscViewer viewer, PetscNestedEventTree *tree, PetscLogDouble threshold, PetscLogNestedType type)
400 {
401   PetscNestedEventNode *main_stage;
402   PetscNestedEventNode *tree_rem;
403   PetscEventPerfInfo   *main_stage_perf;
404   PetscEventPerfInfo   *perf_rem;
405   PetscLogDouble        time;
406   PetscLogDouble        threshold_time;
407 
408   PetscFunctionBegin;
409   main_stage      = &tree->nodes[0];
410   tree_rem        = &tree->nodes[1];
411   main_stage_perf = &tree->perf[0];
412   perf_rem        = &tree->perf[1];
413   time            = main_stage_perf->time;
414   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &time, 1, MPI_DOUBLE, MPI_MAX, tree->comm));
415   /* Print (or ignore) the children in ascending order of total time */
416   if (type == PETSC_LOG_NESTED_XML) {
417     PetscCall(PetscViewerXMLStartSection(viewer, "timertree", "Timings tree"));
418     PetscCall(PetscViewerXMLPutDouble(viewer, "totaltime", time, "%f"));
419     PetscCall(PetscViewerXMLPutDouble(viewer, "timethreshold", threshold, "%f"));
420   }
421   threshold_time = time * (threshold / 100.0 + 1.e-12);
422   PetscCall(PetscLogNestedTreePrint(viewer, time, threshold_time, main_stage, main_stage_perf, tree_rem, perf_rem, type, PETSC_FALSE));
423   if (type == PETSC_LOG_NESTED_XML) PetscCall(PetscViewerXMLEndSection(viewer, "timertree"));
424   PetscFunctionReturn(PETSC_SUCCESS);
425 }
426 
427 PETSC_INTERN PetscErrorCode PetscLogHandlerView_Nested_XML(PetscLogHandler_Nested nested, PetscNestedEventTree *tree, PetscViewer viewer)
428 {
429   PetscFunctionBegin;
430   PetscCall(PetscViewerInitASCII_XML(viewer));
431   PetscCall(PetscViewerASCIIPrintf(viewer, "<!-- PETSc Performance Summary: -->\n"));
432   PetscCall(PetscViewerXMLStartSection(viewer, "petscroot", NULL));
433 
434   // Print global information about this run
435   PetscCall(PetscPrintExeSpecs(viewer));
436 
437 #if PetscDefined(USE_LOG)
438   {
439     PetscEventPerfInfo *main_stage_info;
440     PetscLogDouble      locTotalTime;
441 
442     PetscCall(PetscLogHandlerDefaultGetEventPerfInfo(nested->handler, 0, 0, &main_stage_info));
443     locTotalTime = main_stage_info->time;
444     PetscCall(PetscPrintGlobalPerformance(viewer, locTotalTime, nested->handler));
445   }
446 #endif
447   PetscCall(PetscLogNestedTreePrintTop(viewer, tree, nested->threshold, PETSC_LOG_NESTED_XML));
448   PetscCall(PetscViewerXMLEndSection(viewer, "petscroot"));
449   PetscCall(PetscViewerFinalASCII_XML(viewer));
450   PetscFunctionReturn(PETSC_SUCCESS);
451 }
452 
453 PETSC_INTERN PetscErrorCode PetscLogHandlerView_Nested_Flamegraph(PetscLogHandler_Nested nested, PetscNestedEventTree *tree, PetscViewer viewer)
454 {
455   PetscFunctionBegin;
456   PetscCall(PetscLogNestedTreePrintTop(viewer, tree, nested->threshold, PETSC_LOG_NESTED_FLAMEGRAPH));
457   PetscFunctionReturn(PETSC_SUCCESS);
458 }
459