diff --git a/testsuite/driver/perf_notes.py b/testsuite/driver/perf_notes.py index 931af03751ec569eb88ab0094be42f481291da42..5c36bc2251ec13667ef669029022f1ccd91276f0 100644 --- a/testsuite/driver/perf_notes.py +++ b/testsuite/driver/perf_notes.py @@ -9,6 +9,7 @@ # (which defaults to 'local' if not given by --test-env). # +from enum import Enum import colorsys import tempfile import json @@ -62,7 +63,7 @@ PerfStat = namedtuple('PerfStat', ['test_env','test','way','metric','value']) # A baseline recovered form stored metrics. Baseline = namedtuple('Baseline', ['perfStat','commit','commitDepth']) -class MetricChange: +class MetricChange(Enum): NewMetric = 'NewMetric' NoChange = 'NoChange' Increase = 'Increase' diff --git a/testsuite/driver/testglobals.py b/testsuite/driver/testglobals.py index 3d273cb6472843d05f1939f64452efbc2d380d42..c89e225c721ed2235b4701b6aead5e17b230ac4e 100644 --- a/testsuite/driver/testglobals.py +++ b/testsuite/driver/testglobals.py @@ -225,20 +225,22 @@ class TestRun: self.n_expected_failures = 0 self.missing_libs = [] # type: List[TestResult] - self.framework_failures = [] - self.framework_warnings = [] + self.framework_failures = [] # type: List[TestResult] + self.framework_warnings = [] # type: List[TestResult] - self.expected_passes = [] - self.unexpected_passes = [] - self.unexpected_failures = [] - self.unexpected_stat_failures = [] - self.fragile_results = [] + self.expected_passes = [] # type: List[TestResult] + self.unexpected_passes = [] # type: List[TestResult] + self.unexpected_failures = [] # type: List[TestResult] + self.unexpected_stat_failures = [] # type: List[TestResult] + + # Results from tests that have been marked as fragile + self.fragile_results = [] # type: List[TestResult] # List of all metrics measured in this test run. # [(change, PerfStat)] where change is one of the MetricChange # constants: NewMetric, NoChange, Increase, Decrease. # NewMetric happens when the previous git commit has no metric recorded. - self.metrics = [] + self.metrics = [] # type: List[Tuple[MetricChange, PerfStat]] global t t = TestRun()