xref: /petsc/lib/petsc/bin/maint/gcov.py (revision 6d8694c4fbab79f9439f1ad13c0386ba7ee1ca4b)
1#!/usr/bin/env python3
2"""
3# Created: Wed Oct  5 18:31:45 2022 (-0400)
4# @author: Jacob Faibussowitsch
5"""
6import sys
7import json
8import shutil
9import pathlib
10import tempfile
11import subprocess
12import lxml
13import lxml.etree
14import copy
15import functools
16import textwrap
17import os
18
19# version of gcovr JSON format that this script was tested against and knows how to write
20# see https://gcovr.com/en/stable/output/json.html#json-output
21known_gcovr_json_versions = {'0.1', '0.2', '0.3', '0.5'}
22
23class Logger:
24  def __init__(self, *args, **kwargs):
25    self._stdout = sys.stdout
26    self._stderr = sys.stderr
27    self.setup(*args, **kwargs)
28    return
29
30  def setup(self, stdout = None, stderr = None, verbosity = 0):
31    if stdout is None:
32      stdout = self._stdout
33
34    if stderr is None:
35      stderr = self._stderr
36
37    self.flush(close=True)
38    self.stdout  = stdout
39    self.stderr  = stderr
40    self.verbose = bool(verbosity)
41    return
42
43  @staticmethod
44  def __log(stream, *args, **kwargs):
45    kwargs.setdefault('file', stream)
46    print(*args, **kwargs)
47    return
48
49  def log(self, *args, **kwargs):
50    if self.verbose:
51      self.__log(self.stdout, *args, **kwargs)
52    return
53
54  def log_error(self, *args, **kwargs):
55    self.__log(self.stderr, *args, **kwargs)
56    return
57
58  def flush(self, close=False):
59    for stream_name in ('stdout', 'stderr'):
60      stream = getattr(self, stream_name, None)
61      if stream:
62        stream.flush()
63        if close and stream not in {self._stdout, self._stderr}:
64          stream.close()
65    return
66
67  def __del__(self):
68    try:
69      self.flush(close=True)
70    except:
71      pass
72    return
73
74gcov_logger = Logger()
75
76@functools.total_ordering
77class Version:
78  def __init__(self, major, minor, micro):
79    self.major     = int(major)
80    self.minor     = int(minor)
81    self.micro     = int(micro)
82    self.__version = (self.major, self.minor, self.micro)
83    return
84
85  @classmethod
86  def from_string(cls, ver):
87    return cls.from_iterable(ver.split('.'))
88
89  @classmethod
90  def from_iterable(cls, it):
91    version = list(map(int, it))
92    assert len(version) <= 3
93    while len(version) < 3:
94      # pad out remaining minor, subminor versions
95      version.append(0)
96    return cls(*version)
97
98  def __str__(self):
99    return str(self.__version)
100
101  def __repr__(self):
102    return 'Version(major={}, minor={}, micro={})'.format(self.major, self.minor, self.micro)
103
104  def __getitem__(self, idx):
105    return self.__version[idx]
106
107  def __eq__(self, other):
108    if not isinstance(other, type(self)):
109      other = self.from_iterable(other)
110    return self.__version == other.__version
111
112  def __lt__(self, other):
113    if not isinstance(other, type(self)):
114      other = self.from_iterable(other)
115    return self.__version < other.__version
116
117class GcovrRunner:
118  def __init__(self, petsc_dir, verbosity):
119    self.petsc_dir = petsc_dir
120    self.verbosity = verbosity
121    return
122
123  @classmethod
124  def gcovr_version(cls):
125    attr_name = '__gcovr_version'
126    version   = getattr(cls, attr_name, None)
127    if version:
128      return version
129
130    raw_output = subprocess_check_output(['gcovr', '--version'])
131    output     = raw_output.splitlines()[0].split()
132    try:
133      version = output[1]
134    except IndexError as ie:
135      mess = 'Invalid gcovr version string, cannot determine gcovr version from:\n{}'.format(raw_output)
136      raise RuntimeError(mess) from ie
137
138    version = Version.from_string(version)
139    setattr(cls, attr_name, version)
140    return version
141
142  def build_command(self, *args):
143    base_args = [
144      'gcovr', '-j', '4', '--root', self.petsc_dir, '--exclude-throw-branches',
145      '--exclude-unreachable-branches'
146    ]
147    if self.verbosity > 1:
148      base_args.append('--verbose')
149
150    args    = base_args + list(args)
151    version = self.gcovr_version()
152    if version < (5,) and '--html-self-contained' in args:
153      # --html-self-contained since gcovr 5.0
154      args.remove('--html-self-contained')
155    if version < (5,1) and '--decisions' in args:
156      # --decisions since gcovr 5.1
157      args.remove('--decisions')
158    if (5,) < version <= (5,2):
159      # gcovr 5.1 and 5.2 add an assertion that merging functions (who have the same
160      # mangled name) must have the same line number. Sounds sane in theory but does not
161      # play well in practice, especially with macros. For example the following would
162      # trigger an assertion:
163      #
164      # #if defined(FOO)
165      #   int my_func() { return 0; }
166      # #else
167      #   int my_func() { retunr 1; }
168      # #endif
169      #
170      # So to work around it we monkey-patch the gcovr executable every time to disable
171      # the check
172      monkey_patched = 'import sys; import gcovr; import gcovr.merging; gcovr.merging.DEFAULT_MERGE_OPTIONS.ignore_function_lineno = True; import gcovr.__main__; sys.exit(gcovr.__main__.main())'
173      args = ['python3', '-c', monkey_patched] + args[1:]
174    if version >= (6,):
175      args.extend([
176        '--exclude-noncode-lines',
177        '--merge-mode-functions', 'separate',
178      ])
179    args.append('.')
180    return list(map(str, args))
181
182def sanitize_path(path):
183  """
184  Return a resolved pathlib.Path using PATH and assert it exists
185  """
186  path = pathlib.Path(path).resolve()
187  assert path.exists(), 'path {} does not exist'.format(path)
188  return path
189
190def call_subprocess(func, *args, error_ok=False, **kwargs):
191  gcov_logger.log('running', ' '.join(map(str, args[0])).join(("'","'")))
192  ret = None
193  try:
194    ret = func(*args, **kwargs)
195  except subprocess.CalledProcessError as cpe:
196    return_code = cpe.returncode
197    print('subprocess error, command returned exit code:', return_code)
198    print('command:')
199    print(' '.join(map(str, cpe.cmd)))
200    print('stdout:')
201    print(cpe.output)
202    if getattr(cpe, 'stderr', None):
203      print('stderr:')
204      print(cpe.stderr)
205
206    if error_ok:
207      if isinstance(error_ok, int):
208        error_ok = {error_ok}
209      else:
210        error_ok = set(map(int, error_ok))
211
212      if return_code in error_ok:
213        return ret
214    raise cpe
215  return ret
216
217def subprocess_run(*args, **kwargs):
218  if sys.version_info < (3,7):
219    if kwargs.pop('capture_output', None):
220      kwargs.setdefault('stdout', subprocess.PIPE)
221      kwargs.setdefault('stderr', subprocess.PIPE)
222
223  return call_subprocess(subprocess.run, *args, **kwargs)
224
225def subprocess_check_output(*args, **kwargs):
226  kwargs.setdefault('universal_newlines', True)
227  return call_subprocess(subprocess.check_output, *args, **kwargs)
228
229def load_report(json_file):
230  """
231  Read a json file JSON_FILE and yield each entry in files
232  """
233  with json_file.open() as fd:
234    json_data = json.load(fd)
235
236  try:
237    json_format_version = json_data['gcovr/format_version']
238  except KeyError:
239    json_format_version = 'unknown'
240
241  if str(json_format_version) not in known_gcovr_json_versions:
242    mess = 'gcovr JSON version \'{}\' is incompatible, script is tested with version(s) {}'.format(
243      json_format_version, known_gcovr_json_versions
244    )
245    raise RuntimeError(mess)
246
247  return json_data
248
249def store_report(json_data, dest_path):
250  """
251  Store JSON_DATA to disk at DEST_PATH
252  """
253  with dest_path.open('w') as fd:
254    json.dump(json_data, fd)
255  return dest_path
256
257def get_branch_diff(merge_branch):
258  """
259  Get the diff between MERGE_BRANCH and current branch and return a dictionary RET which has entries:
260
261  ret = {
262    file_name : [list, of, lines, changed, by, branch],
263  }
264  """
265  ret = {}
266
267  merge_branch_name       = str(merge_branch)
268  files_changed_by_branch = subprocess_check_output(
269    ['git', 'diff', '--name-only', merge_branch_name + '...']
270  ).splitlines()
271  files_changed_by_branch = [f for f in files_changed_by_branch if not f.startswith('share/petsc/datafiles/') and os.path.basename(os.path.dirname(f)) != 'output']
272  for file_name in files_changed_by_branch:
273    blame_output = subprocess_run(
274      ['git', 'blame', '-s', '--show-name', merge_branch_name + '..', file_name],
275      capture_output=True, universal_newlines=True
276    )
277
278    try:
279      blame_output.check_returncode()
280    except subprocess.CalledProcessError:
281      stderr = blame_output.stderr.strip()
282      if stderr.startswith('fatal: no such path') and stderr.endswith('in HEAD'):
283        # The branch removed a file from the repository. Since it no longer exists there
284        # will obviously not be any coverage of it. So we ignore it.
285        gcov_logger.log('File', "'"+file_name+"'", 'was deleted by branch. Skipping it')
286        continue
287      raise
288
289    changed_ret = subprocess_run(
290      ['grep', '-v', r'^\^'], input=blame_output.stdout, capture_output=True, universal_newlines=True
291    )
292
293    try:
294      changed_ret.check_returncode()
295    except subprocess.CalledProcessError:
296      if changed_ret.returncode == 1:
297        # git returns exitcode 1 if it finds nothing
298        gcov_logger.log('File', "'"+file_name+"'", 'only contained deletions. Skipping it!')
299        continue
300      raise
301
302    # each line in the blame is in the format
303    #
304    # commit_hash line_number) line_of_code
305    #
306    # we want a list of line_numbers
307    ret[file_name] = [
308      int(line[2].replace(')','')) for line in map(str.split, changed_ret.stdout.splitlines())
309    ]
310  return ret
311
312def extract_tarballs(base_paths, dest_dir):
313  """
314  Search BASE_PATH for tarballs, and extract them to DEST_DIR
315  """
316  def unique_list(seq):
317    seen = set()
318    return [x for x in seq if x not in seen and not seen.add(x)]
319
320  tar_files = []
321  for path in base_paths:
322    if path.is_dir():
323      tar_files.extend(list(path.glob('*.json.tar.*')))
324    else:
325      assert path.is_file(), 'path {} is not a file'.format(path)
326      tar_files.append(path)
327
328  tar_files = unique_list(tar_files)
329  if len(tar_files) == 0:
330    mess = 'could not locate gcovr report tarballs in:\n{}'.format(
331      '\n'.join(map(lambda p: '- '+str(p), base_paths))
332    )
333    raise RuntimeError(mess)
334
335  gcov_logger.log('found', len(tar_files), 'tarball(s):')
336  gcov_logger.log('- '+'\n- '.join(map(str, tar_files)))
337
338  dest_dir.mkdir(exist_ok=True)
339  for tarball in map(str, tar_files):
340    gcov_logger.log('extracting', tarball, 'in directory', dest_dir)
341    shutil.unpack_archive(tarball, extract_dir=str(dest_dir))
342  return dest_dir
343
344def merge_reports(runner, base_paths, dest_path):
345  """
346  Search BASE_PATH for a list of tarballs containing gcovr reports, unpack them and merge their
347  contents. Write the merged result to DEST_PATH.
348  """
349  if dest_path.suffix != '.json':
350    dest_path = pathlib.Path(str(dest_path) + '.json').resolve()
351
352  try:
353    dest_path.unlink()
354  except FileNotFoundError:
355    pass
356
357  # unpack the tarballs in base_path and merge them if necessary
358  with tempfile.TemporaryDirectory() as reports_path:
359    reports_path = sanitize_path(reports_path)
360    extract_tarballs(base_paths, reports_path)
361
362    reports = [report for report in reports_path.iterdir() if report.name.endswith('.json')]
363    assert len(reports) > 0, 'no gcovr reports in {}'.format(reports_path)
364
365    gcov_logger.log('found', len(reports), 'report(s):')
366    gcov_logger.log('- '+'\n- '.join(map(str, reports)))
367
368    if len(reports) == 1:
369      gcov_logger.log('copying', reports[0], 'to', dest_path)
370      # only 1 report? no need to merge anything, just copy to new name
371      return shutil.copy2(reports[0], dest_path)
372
373    gcov_logger.log('merging reports to', dest_path)
374    command = runner.build_command(
375      '--json', '--output', dest_path, '--decisions', '--exclude-lines-by-pattern', r'^\s*SETERR.*'
376    )
377    for report in reports:
378      command.extend(['--add-tracefile', report])
379    gcov_logger.log(subprocess_check_output(command))
380
381  return dest_path
382
383def create_and_clear(dir_path, delete_pred = None):
384  """
385  Ensure directory at DIR_PATH exists (creating it if need be) and clear files in it according to
386  DELETE_PRED. If DELETE_PRED is None, deletes all files in DIR_PATH. Not recursive.
387  """
388  if delete_pred is None:
389      delete_pred = lambda p: p.is_file()
390
391  if dir_path.exists():
392    assert dir_path.is_dir(), "Directory path {} must be a directory".format(dir_path)
393    for path in filter(delete_pred, dir_path.iterdir()):
394      path.unlink()
395  else:
396    dir_path.mkdir()
397
398  return dir_path
399
400def generate_html(runner, merged_report, dest_dir, symlink_dir=None, report_name=None, html_title=None):
401  """
402  Generate a HTML coverage file from MERGED_REPORT in DEST_DIR. Optionally symlink the report base
403  html file to SYMLINK_DIR. Optionally supply REPORT_NAME as the base name of the report, defaults to
404  report.html. Optionally supply HTML_TITLE to set the title of the resulting report
405  """
406  if report_name is None:
407    report_name = 'report.html'
408  elif not report_name.endswith('.html'):
409    report_name += '.html'
410
411  if html_title is None:
412    html_title = 'PETSc Code Coverage Report'
413
414  html_title  = html_title.join(("'", "'"))
415  dest_dir    = create_and_clear(dest_dir, delete_pred = lambda p: p.suffix.endswith('html'))
416  report_path = dest_dir/report_name
417
418  subprocess_check_output(
419    runner.build_command(
420      '--output', report_path,
421      '--add-tracefile', merged_report,
422      '--html-details',
423      '--html-title', html_title,
424      '--html-self-contained',
425      '--sort-percentage',
426      '--decisions',
427      '--exclude-lines-by-pattern', r'^\s*SETERR.*',
428      '--exclude', r'arch-ci.*'
429    ),
430    error_ok = 7 # return-code of 7 means some files were not found
431  )
432
433  symlink_name = None
434  if symlink_dir is not None:
435    assert symlink_dir.exists()
436    symlink_name = symlink_dir/report_path.name
437    try:
438      symlink_name.unlink()
439    except FileNotFoundError:
440      pass
441    symlink_name.symlink_to(report_path.relative_to(symlink_dir))
442  return symlink_name
443
444def generate_xml(runner, merged_report, dest_dir):
445  """
446  Generate a set of XML coverage files from MERGED_REPORT in DEST_DIR.
447  """
448  dest_dir    = create_and_clear(dest_dir, delete_pred = lambda p: p.suffix.endswith('xml'))
449  mega_report = dest_dir/'mega_report.xml'
450
451  ret = subprocess_check_output(
452    runner.build_command(
453      '--output', mega_report,
454      '--add-tracefile', merged_report,
455      '--xml-pretty',
456      '--print-summary',
457      '--exclude', r'arch-ci.*'
458    ),
459    error_ok = 7 # return-code of 7 means some files were not found
460  )
461  # print the output for CI
462  print(ret)
463  ## Workaround for https://gitlab.com/gitlab-org/gitlab/-/issues/328772. Pipeline
464  ## artifacts are limited to 10M. So split the single cobertura xml (which is often
465  ## >40MB) into one file per package, since there seems to be no limit on the _number_ of
466  ## files just their size.
467  orig_mega_xml_file = lxml.etree.fromstring(mega_report.read_bytes())
468
469  # create a deep copy of the data, we want to preserve the metadata and structure of it,
470  # but clear it of any "stuff". Note even though it is called 'empty_template' it is not
471  # empty yet
472  empty_template = copy.deepcopy(orig_mega_xml_file)
473  packages       = empty_template.find('packages')
474
475  # clear out all the existing packages in our copy of the data
476  for p in packages:
477    packages.remove(p)
478
479  # 'empty_template' is now empty, i.e. contains only the header and description etc. Now
480  # we go back through all the packages and use the template to create individual files
481  # for each of the packages
482  for package in orig_mega_xml_file.find('packages'):
483    single_package_file = dest_dir/'report-{}.xml'.format(package.attrib['name'])
484    gcov_logger.log("Creating package file {}".format(single_package_file))
485    xml_to_write  = copy.deepcopy(empty_template)
486    packages_node = xml_to_write.find('packages')
487
488    # Add back the one package we want
489    packages_node.append(package)
490
491    single_package_file.write_bytes(lxml.etree.tostring(xml_to_write))
492
493  # delete the mega report after we are done
494  mega_report.unlink()
495  return
496
497def do_main(petsc_dir, petsc_arch, merge_branch, base_path, formats, verbosity, ci_mode):
498  petsc_dir = sanitize_path(petsc_dir)
499  assert petsc_dir.is_dir(), 'PETSC_DIR {} is not a directory'.format(petsc_dir)
500  petsc_arch_dir = sanitize_path(petsc_dir/petsc_arch)
501  base_path      = list(map(sanitize_path, base_path))
502  if base_path[-1] != petsc_arch_dir:
503    base_path.append(petsc_arch_dir)
504
505  gcovr_dir = petsc_arch_dir/'gcovr'
506  gcovr_dir.mkdir(exist_ok=True)
507
508  if ci_mode:
509    stdout_file = gcovr_dir/'merge_gcov.log'
510    stderr_file = gcovr_dir/'merge_gcov_errors.log'
511    # clear the files
512    stdout_file.open('w').close()
513    stderr_file.open('w').close()
514    # reopen
515    stdout = stdout_file.open('w')
516    stderr = stderr_file.open('w')
517  else:
518    stdout = sys.stdout
519    stderr = sys.stderr
520  gcov_logger.setup(stdout, stderr, verbosity)
521
522  runner        = GcovrRunner(petsc_dir, verbosity)
523  merged_report = merge_reports(runner, base_path, gcovr_dir/'merged-gcovr-report.json')
524
525  files_changed_by_branch = get_branch_diff(merge_branch)
526  merged_report_json      = load_report(merged_report)
527
528  total_testable_lines_by_branch = 0
529  gcovr_report_version_str       = merged_report_json['gcovr/format_version']
530  gcovr_report_version           = Version.from_string(gcovr_report_version_str)
531  untested_code_by_branch        = {}
532  untested_code_report           = {
533    'gcovr/format_version' : gcovr_report_version_str,
534    'files'                : []
535  }
536
537  if gcovr_report_version < (0, 5):
538    line_exclusion = 'gcovr/noncode'
539  elif gcovr_report_version == (0, 5):
540    # Since JSON format version 0.5:
541    # - The gcovr/noncode field was removed. Instead of generating noncode entries,
542    #   the entire line is skipped.
543    # - The gcovr/excluded field can be absent if false.
544    line_exclusion = 'gcovr/excluded'
545  else:
546    # In addition to JSON format changes, also since gcovr 6.0:
547    # - New --exclude-noncode-lines to exclude noncode lines. Noncode lines are not
548    #   excluded by default anymore.
549    #
550    # should also check that empty lines are nicely handled.
551    raise RuntimeError('Check that gcovr still handles report exclusions as above! See comment above')
552
553  for data in merged_report_json['files']:
554    file_name = data['file']
555    if file_name not in files_changed_by_branch:
556      continue
557
558    changed_lines = set(files_changed_by_branch[file_name])
559    cur_file_data = [
560      line['line_number'] for line in data['lines']
561      if line['line_number'] in changed_lines and line['count'] == 0 and not line.get(line_exclusion)
562    ]
563
564    if cur_file_data:
565      # Make a copy of the line data, then iterate through and "invert" it, so that only
566      # untested lines are left in. We achieve this by marking every line *except* new,
567      # untested lines as "noncode". Gcovr ignores all noncode lines in the report.
568      report_data = copy.deepcopy(data)
569      for line in report_data['lines']:
570        if line['line_number'] in changed_lines and line['count'] == 0:
571          # only ignore untested lines added by the branch
572          continue
573        if gcovr_report_version < (0, 5):
574          line['gcovr/noncode'] = True
575        else:
576          line['gcovr/excluded'] = True
577
578      untested_code_report['files'].append(report_data)
579      untested_code_by_branch[file_name] = cur_file_data
580
581    total_testable_lines_by_branch += len(changed_lines)
582    # a minor performance optimization, we remove the processed file from the list of
583    # files to check for, and if we don't have any more to check for we can just bail
584    files_changed_by_branch.pop(file_name)
585    if len(files_changed_by_branch.keys()) == 0:
586      break
587
588  # generate the html report
589  if 'html' in formats:
590    # CI mode unconditionally creates the untested line report even if there are no
591    # untested lines since the environment must have a valid file to load...
592    if ci_mode or untested_code_report['files']:
593      untested_report = store_report(untested_code_report, gcovr_dir/'untested-gcovr-report.json')
594      generate_html(
595        runner, untested_report, gcovr_dir/'html_untested',
596        symlink_dir=gcovr_dir, report_name='report_untested.html',
597        html_title='PETSc Untested Code Report'
598      )
599    generate_html(runner, merged_report, gcovr_dir/'html', symlink_dir=gcovr_dir)
600
601  if 'xml' in formats:
602    generate_xml(runner, merged_report, gcovr_dir/'xml')
603
604  ret_code     = 0
605  ci_fail_file = gcovr_dir/'.CI_FAIL'
606  try:
607    ci_fail_file.unlink()
608  except FileNotFoundError:
609    pass
610  if untested_code_by_branch:
611    def num_uncovered_lines_allowed(num_lines_changed):
612      import math
613
614      if num_lines_changed < 10:
615        # small MRs must cover all changed code
616        return 0
617      return math.floor(num_lines_changed / (7.0 * math.log(num_lines_changed)))
618
619    ret_code    = 1
620    warn_banner = ' WARNING '.join(('*'*40, '*'*40))
621    mini_bar    = '-'*5
622    gcov_logger.log_error(warn_banner)
623    gcov_logger.log_error('This branch introduces untested new code!')
624    gcov_logger.log_error('')
625    gcov_logger.log_error(mini_bar, 'summary:')
626    # print a summary first
627    for file_name, lines in untested_code_by_branch.items():
628      gcov_logger.log_error('-', len(lines), 'line(s) in', file_name)
629    gcov_logger.log_error('')
630    gcov_logger.log_error(mini_bar, 'detailed breakdown:')
631    for file_name, lines in untested_code_by_branch.items():
632      gcov_logger.log_error('\n-', '{}:'.format(file_name))
633      with open(file_name) as fd:
634        src_lines = fd.readlines()
635      for line in lines:
636        gcov_logger.log_error('{}:'.format(line), src_lines[line - 1], end='')
637    gcov_logger.log_error(warn_banner)
638    gcov_logger.log_error('NOTE:')
639    gcov_logger.log_error('\n'.join((
640      '- If you believe this is a false positive (covered code accused of being uncovered), check again! The vast majority of packages *are* run through coverage.',
641      '',
642      '- If code is of the form:',
643      '',
644      '    if (condition) {',
645      '      SETERRQ(...); <--- line marked as untested',
646      '    }',
647      '',
648      '  Use PetscCheck()/PetscAssert() instead, they will properly count the error line as tested',
649      '',
650      '- If the code is part of an extended (multi-line) error path, it is better to explicitly test such code as described at https://petsc.org/main/developers/testing/#testing-errors-and-exceptional-code'
651    )))
652
653    # flush stdout, pythons print is line buffered and since we don't end with newline in
654    # the prints above it may not have flushed
655    gcov_logger.flush()
656
657    # gobble the error code if we are in CI mode. The CI job must not fail, otherwise the
658    # environments hosting the reports are not deployed. Instead we signal the error by
659    # creating a special .CI_FAIL file in the arches gcovr directory
660    if ci_mode:
661      ret_code           = 0
662      num_untested_lines = sum(map(len, untested_code_by_branch.values()))
663      if num_untested_lines > num_uncovered_lines_allowed(total_testable_lines_by_branch):
664        # have more uncovered code than was allowed, the CI pipeline must ultimately fail
665        ci_fail_file.touch()
666
667  return ret_code
668
669def make_error_exc():
670  def add_logfile_path(mess, stream_name):
671    try:
672      path = getattr(gcov_logger, stream_name)
673    except AttributeError:
674      path = 'unknown location'
675    mess.append('  {}: {}'.format(stream_name, path))
676    return mess
677
678  width = 90
679  bars  = '=' * width
680  mess  = textwrap.wrap('An error occurred while processing GCOVR results. NOTE THAT DEBUG LOGS ARE LOCATED:', width=width-2, initial_indent='  ', subsequent_indent='  ')
681  add_logfile_path(mess, 'stdout')
682  add_logfile_path(mess, 'stderr')
683  mess.insert(0, bars)
684  mess.append(bars)
685  return Exception('\n' + '\n'.join(mess))
686
687def main(*args, **kwargs):
688  try:
689    return do_main(*args, **kwargs)
690  except Exception as e:
691    try:
692      exc = make_error_exc()
693    except Exception as mem:
694      exc = mem
695    raise exc from e
696
697if __name__ == '__main__':
698  import argparse
699
700  petsc_dir  = os.environ.get('PETSC_DIR')
701  petsc_arch = os.environ.get('PETSC_ARCH')
702
703  parser = argparse.ArgumentParser('PETSc gcovr utility')
704  parser.add_argument('--petsc_dir', default=petsc_dir, required=petsc_dir is None, type=pathlib.Path, help='PETSc directory')
705  parser.add_argument('--petsc_arch', default=petsc_arch, required=petsc_arch is None, help='PETSc build directory name')
706  parser.add_argument('-b', '--merge-branch', help='destination branch corresponding to the merge request')
707  parser.add_argument('-c', '--ci-mode', action='store_true', help='enable CI mode, which adds all arch-ci-* folders in PETSC_DIR to the base search path, and overrides the log output files')
708  parser.add_argument('-p','--base-path', type=pathlib.Path, nargs='*', help='base path containing tarball of gcovr report files for analysis, may be repeated to add multiple base paths')
709  parser.add_argument('--html', action='store_true', help='generate HTML output')
710  parser.add_argument('--xml', action='store_true', help='generate XML output')
711  parser.add_argument('-v', '--verbose', action='count', default=0, help='verbose output, multiple flags increases verbosity')
712  parser.add_argument('-l', '--log-output-stdout', default='stdout', const='stdout', nargs='?', help='Output file (or file stream) to log informational output to')
713  parser.add_argument('-e', '--log-output-stderr', default='stderr', const='stderr', nargs='?', help='Output file (or file stream) to log errors to')
714  args = parser.parse_args()
715
716  formats = [attr for attr in ('html', 'xml') if getattr(args, attr)]
717
718  if len(formats) == 0:
719    parser.error('Must supply one of --html or --xml or both')
720
721  for stream_name in ('stdout', 'stderr'):
722    attr = 'log_output_' + stream_name
723    if getattr(args, attr) == stream_name:
724      setattr(args, attr, getattr(sys, stream_name))
725
726  gcov_logger.setup(args.log_output_stdout, args.log_output_stderr, args.verbose)
727
728  args.petsc_dir = sanitize_path(args.petsc_dir)
729
730  if args.base_path is None:
731    args.base_path = [args.petsc_dir]
732
733  if args.ci_mode:
734    args.base_path.extend(list(args.petsc_dir.glob('arch-*')))
735
736  if not args.merge_branch:
737    args.merge_branch = subprocess_check_output(
738      [args.petsc_dir/'lib'/'petsc'/'bin'/'maint'/'check-merge-branch.sh']
739    ).strip()
740
741  sys.exit(
742    main(
743      args.petsc_dir, args.petsc_arch, args.merge_branch, args.base_path, formats,
744      args.verbose, args.ci_mode
745    )
746  )
747