xref: /libCEED/tests/junit.py (revision 8a4ce0d78b95fffc485d7f6e76eabd31204930a1)
1#!/usr/bin/env python3
2
3import os
4import sys
5sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'junit-xml')))
6from junit_xml import TestCase, TestSuite
7
8def parse_testargs(file):
9    if os.path.splitext(file)[1] in ['.c', '.cpp']:
10        return sum([[line.split()[1:]] for line in open(file).readlines()
11                    if line.startswith('//TESTARGS')], [])
12    elif os.path.splitext(file)[1] == '.usr':
13        return sum([[line.split()[1:]] for line in open(file).readlines()
14                    if line.startswith('C_TESTARGS')], [])
15    raise RuntimeError('Unrecognized extension for file: {}'.format(file))
16
17def get_source(test):
18    if test.startswith('petsc-'):
19        return os.path.join('examples', 'petsc', test[6:] + '.c')
20    elif test.startswith('mfem-'):
21        return os.path.join('examples', 'mfem', test[5:] + '.cpp')
22    elif test.startswith('nek-'):
23        return os.path.join('examples', 'nek', 'bps', test[4:] + '.usr')
24    elif test.startswith('fluids-'):
25        return os.path.join('examples', 'fluids', test[7:] + '.c')
26    elif test.startswith('solids-'):
27        return os.path.join('examples', 'solids', test[7:] + '.c')
28    elif test.startswith('ex'):
29        return os.path.join('examples', 'ceed', test + '.c')
30
31def get_testargs(test):
32    source = get_source(test)
33    if source is None:
34        return [['{ceed_resource}']]
35    return parse_testargs(source)
36
37def check_required_failure(case, stderr, required):
38    if required in stderr:
39        case.status = 'fails with required: {}'.format(required)
40    else:
41        case.add_failure_info('required: {}'.format(required))
42
43def contains_any(resource, substrings):
44    return any((sub in resource for sub in substrings))
45
46def skip_rule(test, resource):
47    return any((
48        test.startswith('fluids-') and contains_any(resource, ['occa', 'gpu']) and not contains_any(resource, ['/gpu/cuda/gen']),
49        test.startswith('solids-') and contains_any(resource, ['occa']),
50        test.startswith('petsc-multigrid') and contains_any(resource, ['occa']),
51        test.startswith('nek') and contains_any(resource, ['occa']),
52        test.startswith('t507') and contains_any(resource, ['occa']),
53        test.startswith('t318') and contains_any(resource, ['magma']),
54        test.startswith('t506') and contains_any(resource, ['magma']),
55        ))
56
57def run(test, backends):
58    import subprocess
59    import time
60    import difflib
61    allargs = get_testargs(test)
62
63    testcases = []
64    for args in allargs:
65        for ceed_resource in backends:
66            rargs = [os.path.join('build', test)] + args.copy()
67            rargs[rargs.index('{ceed_resource}')] = ceed_resource
68
69            if skip_rule(test, ceed_resource):
70                case = TestCase('{} {}'.format(test, ceed_resource),
71                                elapsed_sec=0,
72                                timestamp=time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(start)),
73                                stdout='',
74                                stderr='')
75                case.add_skipped_info('Pre-run skip rule')
76            else:
77                start = time.time()
78                proc = subprocess.run(rargs,
79                                      stdout=subprocess.PIPE,
80                                      stderr=subprocess.PIPE)
81                proc.stdout = proc.stdout.decode('utf-8')
82                proc.stderr = proc.stderr.decode('utf-8')
83
84                case = TestCase('{} {}'.format(test, ceed_resource),
85                                elapsed_sec=time.time()-start,
86                                timestamp=time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(start)),
87                                stdout=proc.stdout,
88                                stderr=proc.stderr)
89                ref_stdout = os.path.join('tests/output', test + '.out')
90
91            if not case.is_skipped() and proc.stderr:
92                if 'OCCA backend failed to use' in proc.stderr:
93                    case.add_skipped_info('occa mode not supported {} {}'.format(test, ceed_resource))
94                elif 'Backend does not implement' in proc.stderr:
95                    case.add_skipped_info('not implemented {} {}'.format(test, ceed_resource))
96                elif 'Can only provide to HOST memory' in proc.stderr:
97                    case.add_skipped_info('device memory not supported {} {}'.format(test, ceed_resource))
98
99            if not case.is_skipped():
100                if test[:4] in 't110 t111 t112 t113 t114'.split():
101                    check_required_failure(case, proc.stderr, 'Cannot grant CeedVector array access')
102                if test[:4] in 't115'.split():
103                    check_required_failure(case, proc.stderr, 'Cannot grant CeedVector read-only array access, the access lock is already in use')
104                if test[:4] in 't116'.split():
105                    check_required_failure(case, proc.stderr, 'Cannot destroy CeedVector, the writable access lock is in use')
106                if test[:4] in 't117'.split():
107                    check_required_failure(case, proc.stderr, 'Cannot restore CeedVector array access, access was not granted')
108                if test[:4] in 't118'.split():
109                    check_required_failure(case, proc.stderr, 'Cannot sync CeedVector, the access lock is already in use')
110                if test[:4] in 't215'.split():
111                    check_required_failure(case, proc.stderr, 'Cannot destroy CeedElemRestriction, a process has read access to the offset data')
112                if test[:4] in 't303'.split():
113                    check_required_failure(case, proc.stderr, 'Length of input/output vectors incompatible with basis dimensions')
114
115            if not case.is_skipped() and not case.status:
116                if proc.stderr:
117                    case.add_failure_info('stderr', proc.stderr)
118                elif proc.returncode != 0:
119                    case.add_error_info('returncode = {}'.format(proc.returncode))
120                elif os.path.isfile(ref_stdout):
121                    with open(ref_stdout) as ref:
122                        diff = list(difflib.unified_diff(ref.readlines(),
123                                                         proc.stdout.splitlines(keepends=True),
124                                                         fromfile=ref_stdout,
125                                                         tofile='New'))
126                    if diff:
127                        case.add_failure_info('stdout', output=''.join(diff))
128                elif proc.stdout and test[:4] not in 't003':
129                    case.add_failure_info('stdout', output=proc.stdout)
130            testcases.append(case)
131        return TestSuite(test, testcases)
132
133if __name__ == '__main__':
134    import argparse
135    parser = argparse.ArgumentParser('Test runner with JUnit output')
136    parser.add_argument('--output', help='Output file to write test', default=None)
137    parser.add_argument('--gather', help='Gather all *.junit files into XML', action='store_true')
138    parser.add_argument('test', help='Test executable', nargs='?')
139    args = parser.parse_args()
140
141    if args.gather:
142        gather()
143    else:
144        backends = os.environ['BACKENDS'].split()
145
146        result = run(args.test, backends)
147        output = (os.path.join('build', args.test + '.junit')
148                  if args.output is None
149                  else args.output)
150        with open(output, 'w') as fd:
151            TestSuite.to_file(fd, [result])
152        for t in result.test_cases:
153            failures = len([c for c in result.test_cases if c.is_failure()])
154            errors = len([c for c in result.test_cases if c.is_error()])
155            if failures + errors > 0:
156                sys.exit(1)
157