summaryrefslogtreecommitdiff
path: root/utils
diff options
context:
space:
mode:
authorDaniel Dunbar <daniel@zuster.org>2013-09-14 01:19:17 +0000
committerDaniel Dunbar <daniel@zuster.org>2013-09-14 01:19:17 +0000
commit2849503ab240a2dab6f2e3c5a029e3416165554f (patch)
tree9c635687635c6bf68c0125d0a1b6e1d62db721a8 /utils
parente94e0984dfd22ede6211080d1f3e47138f1ea857 (diff)
downloadllvm-2849503ab240a2dab6f2e3c5a029e3416165554f.tar.gz
llvm-2849503ab240a2dab6f2e3c5a029e3416165554f.tar.bz2
llvm-2849503ab240a2dab6f2e3c5a029e3416165554f.tar.xz
[lit] Add an --output option, for writing results in a machine readable form.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@190738 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'utils')
-rw-r--r--utils/lit/lit/Test.py21
-rwxr-xr-xutils/lit/lit/main.py49
-rw-r--r--utils/lit/tests/Inputs/test-data/metrics.ini2
-rw-r--r--utils/lit/tests/lit.cfg6
-rw-r--r--utils/lit/tests/test-output.py21
5 files changed, 96 insertions, 3 deletions
diff --git a/utils/lit/lit/Test.py b/utils/lit/lit/Test.py
index d84eb4798f..b4988f530d 100644
--- a/utils/lit/lit/Test.py
+++ b/utils/lit/lit/Test.py
@@ -35,6 +35,21 @@ UNSUPPORTED = ResultCode('UNSUPPORTED', False)
class MetricValue(object):
def format(self):
+ """
+ format() -> str
+
+ Convert this metric to a string suitable for displaying as part of the
+ console output.
+ """
+ raise RuntimeError("abstract method")
+
+ def todata(self):
+ """
+ todata() -> json-serializable data
+
+ Convert this metric to content suitable for serializing in the JSON test
+ output.
+ """
raise RuntimeError("abstract method")
class IntMetricValue(MetricValue):
@@ -44,6 +59,9 @@ class IntMetricValue(MetricValue):
def format(self):
return str(self.value)
+ def todata(self):
+ return self.value
+
class RealMetricValue(MetricValue):
def __init__(self, value):
self.value = value
@@ -51,6 +69,9 @@ class RealMetricValue(MetricValue):
def format(self):
return '%.4f' % self.value
+ def todata(self):
+ return self.value
+
# Test results.
class Result(object):
diff --git a/utils/lit/lit/main.py b/utils/lit/lit/main.py
index b93aa6fd0b..ba2490effa 100755
--- a/utils/lit/lit/main.py
+++ b/utils/lit/lit/main.py
@@ -69,6 +69,45 @@ class TestingProgressDisplay(object):
# Ensure the output is flushed.
sys.stdout.flush()
+def write_test_results(run, lit_config, testing_time, output_path):
+ try:
+ import json
+ except ImportError:
+ lit_config.fatal('test output unsupported with Python 2.5')
+
+ # Construct the data we will write.
+ data = {}
+ # Encode the current lit version as a schema version.
+ data['__version__'] = lit.__versioninfo__
+ data['elapsed'] = testing_time
+ # FIXME: Record some information on the lit configuration used?
+ # FIXME: Record information from the individual test suites?
+
+ # Encode the tests.
+ data['tests'] = tests_data = []
+ for test in run.tests:
+ test_data = {
+ 'name' : test.getFullName(),
+ 'code' : test.result.code.name,
+ 'output' : test.result.output,
+ 'elapsed' : test.result.elapsed }
+
+ # Add test metrics, if present.
+ if test.result.metrics:
+ test_data['metrics'] = metrics_data = {}
+ for key, value in test.result.metrics.items():
+ metrics_data[key] = value.todata()
+
+ tests_data.append(test_data)
+
+ # Write the output.
+ f = open(output_path, 'w')
+ try:
+ json.dump(data, f, indent=2, sort_keys=True)
+ f.write('\n')
+ finally:
+ f.close()
+
def main(builtinParameters = {}):
# Bump the GIL check interval, its more important to get any one thread to a
# blocking operation (hopefully exec) than to try and unblock other threads.
@@ -103,6 +142,9 @@ def main(builtinParameters = {}):
group.add_option("-v", "--verbose", dest="showOutput",
help="Show all test output",
action="store_true", default=False)
+ group.add_option("-o", "--output", dest="output_path",
+ help="Write test results to the provided path",
+ action="store", type=str, metavar="PATH")
group.add_option("", "--no-progress-bar", dest="useProgressBar",
help="Do not use curses based progress bar",
action="store_false", default=True)
@@ -289,8 +331,13 @@ def main(builtinParameters = {}):
sys.exit(2)
display.finish()
+ testing_time = time.time() - startTime
if not opts.quiet:
- print('Testing Time: %.2fs'%(time.time() - startTime))
+ print('Testing Time: %.2fs' % (testing_time,))
+
+ # Write out the test data, if requested.
+ if opts.output_path is not None:
+ write_test_results(run, litConfig, testing_time, opts.output_path)
# List test results organized by kind.
hasFailures = False
diff --git a/utils/lit/tests/Inputs/test-data/metrics.ini b/utils/lit/tests/Inputs/test-data/metrics.ini
index 267e516803..01b09c5c77 100644
--- a/utils/lit/tests/Inputs/test-data/metrics.ini
+++ b/utils/lit/tests/Inputs/test-data/metrics.ini
@@ -1,6 +1,6 @@
[global]
result_code = PASS
-result_output = 'Test passed.'
+result_output = Test passed.
[results]
value0 = 1
diff --git a/utils/lit/tests/lit.cfg b/utils/lit/tests/lit.cfg
index a8d259136e..2111b72748 100644
--- a/utils/lit/tests/lit.cfg
+++ b/utils/lit/tests/lit.cfg
@@ -23,7 +23,7 @@ config.excludes = ['Inputs']
config.test_source_root = os.path.dirname(__file__)
config.test_exec_root = config.test_source_root
-config.target_triple = None
+config.target_triple = '(unused)'
src_root = os.path.join(config.test_source_root, '..')
config.environment['PYTHONPATH'] = src_root
@@ -39,3 +39,7 @@ config.substitutions.append(('%{python}', sys.executable))
if lit_config.params.get('check-coverage', None):
config.environment['COVERAGE_PROCESS_START'] = os.path.join(
os.path.dirname(__file__), ".coveragerc")
+
+# Add a feature to detect the Python version.
+config.available_features.add("python%d.%d" % (sys.version_info[0],
+ sys.version_info[1]))
diff --git a/utils/lit/tests/test-output.py b/utils/lit/tests/test-output.py
new file mode 100644
index 0000000000..adfbcd88f2
--- /dev/null
+++ b/utils/lit/tests/test-output.py
@@ -0,0 +1,21 @@
+# XFAIL: python2.5
+
+# RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out
+# RUN: FileCheck < %t.results.out %s
+
+# CHECK: {
+# CHECK: "__version__"
+# CHECK: "elapsed"
+# CHECK-NEXT: "tests": [
+# CHECK-NEXT: {
+# CHECK-NEXT: "code": "PASS",
+# CHECK-NEXT: "elapsed": {{[0-9.]+}},
+# CHECK-NEXT: "metrics": {
+# CHECK-NEXT: "value0": 1,
+# CHECK-NEXT: "value1": 2.3456
+# CHECK-NEXT: }
+# CHECK-NEXT: "name": "test-data :: metrics.ini",
+# CHECK-NEXT: "output": "Test passed."
+# CHECK-NEXT: }
+# CHECK-NEXT: ]
+# CHECK-NEXT: }