summaryrefslogtreecommitdiffstats
path: root/WebKitTools/TestResultServer
diff options
context:
space:
mode:
Diffstat (limited to 'WebKitTools/TestResultServer')
-rw-r--r--WebKitTools/TestResultServer/handlers/testfilehandler.py31
-rwxr-xr-xWebKitTools/TestResultServer/model/jsonresults.py127
-rwxr-xr-xWebKitTools/TestResultServer/model/jsonresults_unittest.py127
3 files changed, 230 insertions, 55 deletions
diff --git a/WebKitTools/TestResultServer/handlers/testfilehandler.py b/WebKitTools/TestResultServer/handlers/testfilehandler.py
index 97953e7..4d1320f 100644
--- a/WebKitTools/TestResultServer/handlers/testfilehandler.py
+++ b/WebKitTools/TestResultServer/handlers/testfilehandler.py
@@ -43,6 +43,7 @@ PARAM_NAME = "name"
PARAM_KEY = "key"
PARAM_TEST_TYPE = "testtype"
PARAM_INCREMENTAL = "incremental"
+PARAM_TEST_LIST_JSON = "testlistjson"
class DeleteFile(webapp.RequestHandler):
@@ -109,16 +110,31 @@ class GetFile(webapp.RequestHandler):
if not files:
logging.info("File not found, builder: %s, test_type: %s, name: %s.",
builder, test_type, name)
- return
+ return None
+
+ return files[0].data
+
+ def _get_test_list_json(self, builder, test_type):
+ """Return json file with test name list only, do not include test
+ results and other non-test-data .
- self.response.headers["Content-Type"] = "text/plain; charset=utf-8"
- self.response.out.write(files[0].data)
+ Args:
+ builder: builder name.
+ test_type: type of test results.
+ """
+
+ json = self._get_file_content(builder, test_type, "results.json")
+ if not json:
+ return None
+
+ return JsonResults.get_test_list(builder, json)
def get(self):
builder = self.request.get(PARAM_BUILDER)
test_type = self.request.get(PARAM_TEST_TYPE)
name = self.request.get(PARAM_NAME)
dir = self.request.get(PARAM_DIR)
+ test_list_json = self.request.get(PARAM_TEST_LIST_JSON)
logging.debug(
"Getting files, builder: %s, test_type: %s, name: %s.",
@@ -129,8 +145,15 @@ class GetFile(webapp.RequestHandler):
# file content.
if dir or not builder or not name:
return self._get_file_list(builder, test_type, name)
+
+ if name == "results.json" and test_list_json:
+ json = self._get_test_list_json(builder, test_type)
else:
- return self._get_file_content(builder, test_type, name)
+ json = self._get_file_content(builder, test_type, name)
+
+ if json:
+ self.response.headers["Content-Type"] = "text/plain; charset=utf-8"
+ self.response.out.write(json)
class Upload(webapp.RequestHandler):
diff --git a/WebKitTools/TestResultServer/model/jsonresults.py b/WebKitTools/TestResultServer/model/jsonresults.py
index d86fbcd..e5eb7f7 100755
--- a/WebKitTools/TestResultServer/model/jsonresults.py
+++ b/WebKitTools/TestResultServer/model/jsonresults.py
@@ -40,8 +40,11 @@ JSON_RESULTS_BUILD_NUMBERS = "buildNumbers"
JSON_RESULTS_TESTS = "tests"
JSON_RESULTS_RESULTS = "results"
JSON_RESULTS_TIMES = "times"
+JSON_RESULTS_PASS = "P"
+JSON_RESULTS_NO_DATA = "N"
+JSON_RESULTS_MIN_TIME = 1
JSON_RESULTS_VERSION = 3
-JSON_RESULTS_MAX_BUILDS = 750
+JSON_RESULTS_MAX_BUILDS = 1500
class JsonResults(object):
@@ -171,8 +174,6 @@ class JsonResults(object):
# Merge this build into aggreagated results.
cls._merge_one_build(aggregated_json, incremental_json, index)
- logging.debug("Merged build %s, merged json: %s.",
- build_number, aggregated_json)
return True
@@ -210,18 +211,26 @@ class JsonResults(object):
incremental_json: incremental json object.
"""
- for test_name in incremental_json:
- incremental_test = incremental_json[test_name]
+ all_tests = (set(aggregated_json.iterkeys()) |
+ set(incremental_json.iterkeys()))
+ for test_name in all_tests:
if test_name in aggregated_json:
aggregated_test = aggregated_json[test_name]
+ if test_name in incremental_json:
+ incremental_test = incremental_json[test_name]
+ results = incremental_test[JSON_RESULTS_RESULTS]
+ times = incremental_test[JSON_RESULTS_TIMES]
+ else:
+ results = [[1, JSON_RESULTS_NO_DATA]]
+ times = [[1, 0]]
+
cls._insert_item_run_length_encoded(
- incremental_test[JSON_RESULTS_RESULTS],
- aggregated_test[JSON_RESULTS_RESULTS])
+ results, aggregated_test[JSON_RESULTS_RESULTS])
cls._insert_item_run_length_encoded(
- incremental_test[JSON_RESULTS_TIMES],
- aggregated_test[JSON_RESULTS_TIMES])
+ times, aggregated_test[JSON_RESULTS_TIMES])
+ cls._normalize_results_json(test_name, aggregated_json)
else:
- aggregated_json[test_name] = incremental_test
+ aggregated_json[test_name] = incremental_json[test_name]
@classmethod
def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item):
@@ -238,17 +247,69 @@ class JsonResults(object):
aggregated_item[0][0] = min(
aggregated_item[0][0] + item[0], JSON_RESULTS_MAX_BUILDS)
else:
- # The test item values need to be summed from continuous runs.
- # If there is an older item (not most recent one) whose value is
- # same as the one to insert, then we should remove the old item
- # from aggregated list.
- for i in reversed(range(1, len(aggregated_item))):
- if item[1] == aggregated_item[i][1]:
- aggregated_item.pop(i)
-
aggregated_item.insert(0, item)
@classmethod
+ def _normalize_results_json(cls, test_name, aggregated_json):
+ """ Prune tests where all runs pass or tests that no longer exist and
+ truncate all results to JSON_RESULTS_MAX_BUILDS.
+
+ Args:
+ test_name: Name of the test.
+ aggregated_json: The JSON object with all the test results for
+ this builder.
+ """
+
+ aggregated_test = aggregated_json[test_name]
+ aggregated_test[JSON_RESULTS_RESULTS] = \
+ cls._remove_items_over_max_number_of_builds(
+ aggregated_test[JSON_RESULTS_RESULTS])
+ aggregated_test[JSON_RESULTS_TIMES] = \
+ cls._remove_items_over_max_number_of_builds(
+ aggregated_test[JSON_RESULTS_TIMES])
+
+ is_all_pass = cls._is_results_all_of_type(
+ aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_PASS)
+ is_all_no_data = cls._is_results_all_of_type(
+ aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_NO_DATA)
+
+ max_time = max(
+ [time[1] for time in aggregated_test[JSON_RESULTS_TIMES]])
+ # Remove all passes/no-data from the results to reduce noise and
+ # filesize. If a test passes every run, but
+ # takes >= JSON_RESULTS_MIN_TIME to run, don't throw away the data.
+ if (is_all_no_data or
+ (is_all_pass and max_time < JSON_RESULTS_MIN_TIME)):
+ del aggregated_json[test_name]
+
+ @classmethod
+ def _remove_items_over_max_number_of_builds(cls, encoded_list):
+ """Removes items from the run-length encoded list after the final
+ item that exceeds the max number of builds to track.
+
+ Args:
+ encoded_results: run-length encoded results. An array of arrays, e.g.
+ [[3,'A'],[1,'Q']] encodes AAAQ.
+ """
+ num_builds = 0
+ index = 0
+ for result in encoded_list:
+ num_builds = num_builds + result[0]
+ index = index + 1
+ if num_builds > JSON_RESULTS_MAX_BUILDS:
+ return encoded_list[:index]
+
+ return encoded_list
+
+ @classmethod
+ def _is_results_all_of_type(cls, results, type):
+ """Returns whether all the results are of the given type
+ (e.g. all passes).
+ """
+
+ return len(results) == 1 and results[0][1] == type
+
+ @classmethod
def _check_json(cls, builder, json):
"""Check whether the given json is valid.
@@ -363,3 +424,33 @@ class JsonResults(object):
return None
return file
+
+ @classmethod
+ def get_test_list(cls, builder, json_file_data):
+ """Get list of test names from aggregated json file data.
+
+ Args:
+ json_file_data: json file data that has all test-data and
+ non-test-data.
+
+ Returns:
+ json file with test name list only. The json format is the same
+ as the one saved in datastore, but all non-test-data and test detail
+ results are removed.
+ """
+
+ logging.debug("Loading test results json...")
+ json = cls._load_json(json_file_data)
+ if not json:
+ return None
+
+ logging.debug("Checking test results json...")
+ if not cls._check_json(builder, json):
+ return None
+
+ test_list_json = {}
+ tests = json[builder][JSON_RESULTS_TESTS]
+ test_list_json[builder] = {
+ "tests": dict.fromkeys(tests, {})}
+
+ return cls._generate_file_data(test_list_json)
diff --git a/WebKitTools/TestResultServer/model/jsonresults_unittest.py b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
index fd646c8..15b659b 100755
--- a/WebKitTools/TestResultServer/model/jsonresults_unittest.py
+++ b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
@@ -26,6 +26,7 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import jsonresults
import unittest
from jsonresults import JsonResults
@@ -65,6 +66,9 @@ JSON_RESULTS_TESTS_TEMPLATE = (
JSON_RESULTS_PREFIX = "ADD_RESULTS("
JSON_RESULTS_SUFFIX = ");"
+JSON_RESULTS_TEST_LIST_TEMPLATE = (
+ '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}')
+
class JsonResultsTest(unittest.TestCase):
def setUp(self):
@@ -122,12 +126,27 @@ class JsonResultsTest(unittest.TestCase):
else:
self.assertFalse(merged_results)
+ def _test_get_test_list(self, input_data, expected_data):
+ input_results = self._make_test_json(input_data)
+
+ json_tests = []
+ for test in expected_data:
+ json_tests.append("\"" + test + "\":{}")
+
+ expected_results = JSON_RESULTS_PREFIX + \
+ JSON_RESULTS_TEST_LIST_TEMPLATE.replace(
+ "[TESTDATA_TESTS]", ",".join(json_tests)) + \
+ JSON_RESULTS_SUFFIX
+
+ actual_results = JsonResults.get_test_list(self._builder, input_results)
+ self.assertEquals(actual_results, expected_results)
+
def test(self):
# Empty incremental results json.
# Nothing to merge.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Incremental results
None,
# Expect no merge happens.
@@ -137,7 +156,7 @@ class JsonResultsTest(unittest.TestCase):
# Nothing to merge.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Incremental results
([], []),
# Expected no merge happens.
@@ -149,9 +168,9 @@ class JsonResultsTest(unittest.TestCase):
# Aggregated results
None,
# Incremental results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Expected results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]))
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]))
# Single test for single run.
# Incremental results has the latest build and same test results for
@@ -160,11 +179,11 @@ class JsonResultsTest(unittest.TestCase):
# of runs for "P" (200 + 1) to get merged results.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Incremental results
- (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"]]),
+ (["3"], [["001.html", "[1,\"F\"]", "[1,0]"]]),
# Expected results
- (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"]]))
+ (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"]]))
# Single test for single run.
# Incremental results has the latest build but different test results
@@ -172,72 +191,68 @@ class JsonResultsTest(unittest.TestCase):
# Insert the incremental results at the first place.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Incremental results
- (["3"], [["001.html", "[1, \"I\"]", "[1,\"1\"]"]]),
+ (["3"], [["001.html", "[1, \"I\"]", "[1,1]"]]),
# Expected results
- (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+ (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"]", "[1,1],[200,0]"]]))
# Single test for single run.
# Incremental results has the latest build but different test results
# for that run.
- # The test "results" and "times" need to be continuous, so the old
- # [10,"I"] result should be dropped because a new result of same type [1,"I"]
- # is inserted in front of [200,"P"].
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"],[10,\"I\"]", "[200,\"0\"],[10,\"1\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"],[10,\"I\"]", "[200,0],[10,1]"]]),
# Incremental results
- (["3"], [["001.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+ (["3"], [["001.html", "[1,\"I\"]", "[1,1]"]]),
# Expected results
- (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+ (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"],[10,\"I\"]", "[1,1],[200,0],[10,1]"]]))
# Multiple tests for single run.
# All tests have incremental updates.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]),
# Incremental results
- (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"], ["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+ (["3"], [["001.html", "[1,\"F\"]", "[1,0]"], ["002.html", "[1,\"I\"]", "[1,1]"]]),
# Expected results
- (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"]]))
+ (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]]))
# Multiple tests for single run.
- # Not all tests have update.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]),
# Incremental results
- (["3"], [["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+ (["3"], [["002.html", "[1,\"I\"]", "[1,1]"]]),
# Expected results
- (["3", "2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"]]))
+ (["3", "2", "1"], [["001.html", "[1,\"N\"],[200,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]]))
# Single test for multiple runs.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Incremental results
- (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"]]),
+ (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"]]),
# Expected results
- (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"]]))
+ (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"]]))
# Multiple tests for multiple runs.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[10,\"Z\"]", "[10,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[10,\"Z\"]", "[10,0]"]]),
# Incremental results
- (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"], ["002.html", "[1,\"C\"]", "[1,\"1\"]"]]),
+ (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"], ["002.html", "[1,\"C\"]", "[1,1]"]]),
# Expected results
- (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,\"1\"],[10,\"0\"]"]]))
+ (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,1],[10,0]"]]))
# Test the build in incremental results is older than the most recent
# build in aggregated results.
# The incremental results should be dropped and no merge happens.
self._test_merge(
# Aggregated results
- (["3", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["3", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Incremental results
- (["2"], [["001.html", "[1, \"P\"]", "[1,\"0\"]"]]),
+ (["2"], [["001.html", "[1, \"F\"]", "[1,0]"]]),
# Expected no merge happens.
None)
@@ -246,11 +261,57 @@ class JsonResultsTest(unittest.TestCase):
# The incremental results should be dropped and no merge happens.
self._test_merge(
# Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
# Incremental results
- (["3", "2"], [["001.html", "[2, \"P\"]", "[2,\"0\"]"]]),
+ (["3", "2"], [["001.html", "[2, \"F\"]", "[2,0]"]]),
# Expected no merge happens.
None)
+ # Remove test where there is no data in all runs.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"N\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1,\"N\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
+ # Expected results
+ (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
+
+ # Remove test where all run pass and max running time < 1 seconds
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1,\"P\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
+ # Expected results
+ (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
+
+ # Do not remove test where all run pass but max running time >= 1 seconds
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1,\"P\"]", "[1,1]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
+ # Expected results
+ (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[1,1],[200,0]"], ["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
+
+ # Remove items from test results and times that exceeds the max number
+ # of builds to track.
+ max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS)
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[" + max_builds + ",\"F\"],[1,\"I\"]", "[" + max_builds + ",0],[1,1]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1,\"T\"]", "[1,1]"]]),
+ # Expected results
+ (["3", "2", "1"], [["001.html", "[1,\"T\"],[" + max_builds + ",\"F\"]", "[1,1],[" + max_builds + ",0]"]]))
+
+ # Get test name list only. Don't include non-test-list data and
+ # of test result details.
+ self._test_get_test_list(
+ # Input results
+ (["3", "2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
+ # Expected results
+ ["001.html", "002.html"])
+
if __name__ == '__main__':
unittest.main()