summaryrefslogtreecommitdiffstats
path: root/WebKitTools/TestResultServer/model
diff options
context:
space:
mode:
Diffstat (limited to 'WebKitTools/TestResultServer/model')
-rw-r--r--WebKitTools/TestResultServer/model/dashboardfile.py7
-rwxr-xr-xWebKitTools/TestResultServer/model/datastorefile.py129
-rwxr-xr-xWebKitTools/TestResultServer/model/jsonresults.py365
-rwxr-xr-xWebKitTools/TestResultServer/model/jsonresults_unittest.py256
-rw-r--r--WebKitTools/TestResultServer/model/testfile.py71
5 files changed, 790 insertions, 38 deletions
diff --git a/WebKitTools/TestResultServer/model/dashboardfile.py b/WebKitTools/TestResultServer/model/dashboardfile.py
index c74f071..57d3f6f 100644
--- a/WebKitTools/TestResultServer/model/dashboardfile.py
+++ b/WebKitTools/TestResultServer/model/dashboardfile.py
@@ -3,7 +3,7 @@
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
-#
+#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
-#
+#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -36,6 +36,7 @@ from google.appengine.ext import db
SVN_PATH_DASHBOARD = ("http://src.chromium.org/viewvc/chrome/trunk/tools/"
"dashboards/")
+
class DashboardFile(db.Model):
name = db.StringProperty()
data = db.BlobProperty()
@@ -92,7 +93,7 @@ class DashboardFile(db.Model):
if not files:
logging.info("No existing file, added as new file.")
return cls.add_file(name, data)
-
+
logging.debug("Updating existing file.")
file = files[0]
file.data = data
diff --git a/WebKitTools/TestResultServer/model/datastorefile.py b/WebKitTools/TestResultServer/model/datastorefile.py
new file mode 100755
index 0000000..dd4c366
--- /dev/null
+++ b/WebKitTools/TestResultServer/model/datastorefile.py
@@ -0,0 +1,129 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from datetime import datetime
+import logging
+
+from google.appengine.ext import db
+
+MAX_DATA_ENTRY_PER_FILE = 10
+MAX_ENTRY_LEN = 1000 * 1000
+
+
+class DataEntry(db.Model):
+ """Datastore entry that stores one segmant of file data
+ (<1000*1000 bytes).
+ """
+
+ data = db.BlobProperty()
+
+ @classmethod
+ def get(cls, key):
+ return db.get(key)
+
+ def get_data(self, key):
+ return db.get(key)
+
+
+class DataStoreFile(db.Model):
+ """This class stores file in datastore.
+ If a file is oversize (>1000*1000 bytes), the file is split into
+ multiple segments and stored in multiple datastore entries.
+ """
+
+ name = db.StringProperty()
+ data_keys = db.ListProperty(db.Key)
+ date = db.DateTimeProperty(auto_now_add=True)
+
+ data = None
+
+ def delete_data(self, keys=None):
+ if not keys:
+ keys = self.data_keys
+
+ for key in keys:
+ data_entry = DataEntry.get(key)
+ if data_entry:
+ data_entry.delete()
+
+ def save_data(self, data):
+ if not data:
+ logging.warning("No data to save.")
+ return False
+
+ if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN):
+ logging.error("File too big, can't save to datastore: %dK",
+ len(data) / 1024)
+ return False
+
+ start = 0
+ keys = self.data_keys
+ self.data_keys = []
+ while start < len(data):
+ if keys:
+ key = keys.pop(0)
+ data_entry = DataEntry.get(key)
+ if not data_entry:
+ logging.warning("Found key, but no data entry: %s", key)
+ data_entry = DataEntry()
+ else:
+ data_entry = DataEntry()
+
+ data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN])
+ data_entry.put()
+
+ logging.info("Data saved: %s.", data_entry.key())
+ self.data_keys.append(data_entry.key())
+
+ start = start + MAX_ENTRY_LEN
+
+ if keys:
+ self.delete_data(keys)
+
+ self.data = data
+
+ return True
+
+ def load_data(self):
+ if not self.data_keys:
+ logging.warning("No data to load.")
+ return None
+
+ data = []
+ for key in self.data_keys:
+ logging.info("Loading data for key: %s.", key)
+ data_entry = DataEntry.get(key)
+ if not data_entry:
+ logging.error("No data found for key: %s.", key)
+ return None
+
+ data.append(data_entry.data)
+
+ self.data = "".join(data)
+
+ return self.data
diff --git a/WebKitTools/TestResultServer/model/jsonresults.py b/WebKitTools/TestResultServer/model/jsonresults.py
new file mode 100755
index 0000000..d86fbcd
--- /dev/null
+++ b/WebKitTools/TestResultServer/model/jsonresults.py
@@ -0,0 +1,365 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from datetime import datetime
+from django.utils import simplejson
+import logging
+
+from model.testfile import TestFile
+
+JSON_RESULTS_FILE = "results.json"
+JSON_RESULTS_PREFIX = "ADD_RESULTS("
+JSON_RESULTS_SUFFIX = ");"
+JSON_RESULTS_VERSION_KEY = "version"
+JSON_RESULTS_BUILD_NUMBERS = "buildNumbers"
+JSON_RESULTS_TESTS = "tests"
+JSON_RESULTS_RESULTS = "results"
+JSON_RESULTS_TIMES = "times"
+JSON_RESULTS_VERSION = 3
+JSON_RESULTS_MAX_BUILDS = 750
+
+
+class JsonResults(object):
+ @classmethod
+ def _strip_prefix_suffix(cls, data):
+ """Strip out prefix and suffix of json results string.
+
+ Args:
+ data: json file content.
+
+ Returns:
+ json string without prefix and suffix.
+ """
+
+ assert(data.startswith(JSON_RESULTS_PREFIX))
+ assert(data.endswith(JSON_RESULTS_SUFFIX))
+
+ return data[len(JSON_RESULTS_PREFIX):
+ len(data) - len(JSON_RESULTS_SUFFIX)]
+
+ @classmethod
+ def _generate_file_data(cls, json, sort_keys=False):
+ """Given json string, generate file content data by adding
+ prefix and suffix.
+
+ Args:
+ json: json string without prefix and suffix.
+
+ Returns:
+ json file data.
+ """
+
+ data = simplejson.dumps(json, separators=(',', ':'),
+ sort_keys=sort_keys)
+ return JSON_RESULTS_PREFIX + data + JSON_RESULTS_SUFFIX
+
+ @classmethod
+ def _load_json(cls, file_data):
+ """Load json file to a python object.
+
+ Args:
+ file_data: json file content.
+
+ Returns:
+ json object or
+ None on failure.
+ """
+
+ json_results_str = cls._strip_prefix_suffix(file_data)
+ if not json_results_str:
+ logging.warning("No json results data.")
+ return None
+
+ try:
+ return simplejson.loads(json_results_str)
+ except Exception, err:
+ logging.debug(json_results_str)
+ logging.error("Failed to load json results: %s", str(err))
+ return None
+
+ @classmethod
+ def _merge_json(cls, aggregated_json, incremental_json):
+ """Merge incremental json into aggregated json results.
+
+ Args:
+ aggregated_json: aggregated json object.
+ incremental_json: incremental json object.
+
+ Returns:
+ True if merge succeeds or
+ False on failure.
+ """
+
+ # Merge non tests property data.
+ # Tests properties are merged in _merge_tests.
+ if not cls._merge_non_test_data(aggregated_json, incremental_json):
+ return False
+
+ # Merge tests results and times
+ incremental_tests = incremental_json[JSON_RESULTS_TESTS]
+ if incremental_tests:
+ aggregated_tests = aggregated_json[JSON_RESULTS_TESTS]
+ cls._merge_tests(aggregated_tests, incremental_tests)
+
+ return True
+
+ @classmethod
+ def _merge_non_test_data(cls, aggregated_json, incremental_json):
+ """Merge incremental non tests property data into aggregated json results.
+
+ Args:
+ aggregated_json: aggregated json object.
+ incremental_json: incremental json object.
+
+ Returns:
+ True if merge succeeds or
+ False on failure.
+ """
+
+ incremental_builds = incremental_json[JSON_RESULTS_BUILD_NUMBERS]
+ aggregated_builds = aggregated_json[JSON_RESULTS_BUILD_NUMBERS]
+ aggregated_build_number = int(aggregated_builds[0])
+ # Loop through all incremental builds, start from the oldest run.
+ for index in reversed(range(len(incremental_builds))):
+ build_number = int(incremental_builds[index])
+ logging.debug("Merging build %s, incremental json index: %d.",
+ build_number, index)
+
+ # Return if not all build numbers in the incremental json results
+ # are newer than the most recent build in the aggregated results.
+ # FIXME: make this case work.
+ if build_number < aggregated_build_number:
+ logging.warning(("Build %d in incremental json is older than "
+ "the most recent build in aggregated results: %d"),
+ build_number, aggregated_build_number)
+ return False
+
+ # Return if the build number is duplicated.
+ # FIXME: skip the duplicated build and merge rest of the results.
+ # Need to be careful on skiping the corresponding value in
+ # _merge_tests because the property data for each test could
+ # be accumulated.
+ if build_number == aggregated_build_number:
+ logging.warning("Duplicate build %d in incremental json",
+ build_number)
+ return False
+
+ # Merge this build into aggreagated results.
+ cls._merge_one_build(aggregated_json, incremental_json, index)
+ logging.debug("Merged build %s, merged json: %s.",
+ build_number, aggregated_json)
+
+ return True
+
+ @classmethod
+ def _merge_one_build(cls, aggregated_json, incremental_json,
+ incremental_index):
+ """Merge one build of incremental json into aggregated json results.
+
+ Args:
+ aggregated_json: aggregated json object.
+ incremental_json: incremental json object.
+ incremental_index: index of the incremental json results to merge.
+ """
+
+ for key in incremental_json.keys():
+ # Merge json results except "tests" properties (results, times etc).
+ # "tests" properties will be handled separately.
+ if key == JSON_RESULTS_TESTS:
+ continue
+
+ if key in aggregated_json:
+ aggregated_json[key].insert(
+ 0, incremental_json[key][incremental_index])
+ aggregated_json[key] = \
+ aggregated_json[key][:JSON_RESULTS_MAX_BUILDS]
+ else:
+ aggregated_json[key] = incremental_json[key]
+
+ @classmethod
+ def _merge_tests(cls, aggregated_json, incremental_json):
+ """Merge "tests" properties:results, times.
+
+ Args:
+ aggregated_json: aggregated json object.
+ incremental_json: incremental json object.
+ """
+
+ for test_name in incremental_json:
+ incremental_test = incremental_json[test_name]
+ if test_name in aggregated_json:
+ aggregated_test = aggregated_json[test_name]
+ cls._insert_item_run_length_encoded(
+ incremental_test[JSON_RESULTS_RESULTS],
+ aggregated_test[JSON_RESULTS_RESULTS])
+ cls._insert_item_run_length_encoded(
+ incremental_test[JSON_RESULTS_TIMES],
+ aggregated_test[JSON_RESULTS_TIMES])
+ else:
+ aggregated_json[test_name] = incremental_test
+
+ @classmethod
+ def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item):
+ """Inserts the incremental run-length encoded results into the aggregated
+ run-length encoded results.
+
+ Args:
+ incremental_item: incremental run-length encoded results.
+ aggregated_item: aggregated run-length encoded results.
+ """
+
+ for item in incremental_item:
+ if len(aggregated_item) and item[1] == aggregated_item[0][1]:
+ aggregated_item[0][0] = min(
+ aggregated_item[0][0] + item[0], JSON_RESULTS_MAX_BUILDS)
+ else:
+ # The test item values need to be summed from continuous runs.
+ # If there is an older item (not most recent one) whose value is
+ # same as the one to insert, then we should remove the old item
+ # from aggregated list.
+ for i in reversed(range(1, len(aggregated_item))):
+ if item[1] == aggregated_item[i][1]:
+ aggregated_item.pop(i)
+
+ aggregated_item.insert(0, item)
+
+ @classmethod
+ def _check_json(cls, builder, json):
+ """Check whether the given json is valid.
+
+ Args:
+ builder: builder name this json is for.
+ json: json object to check.
+
+ Returns:
+ True if the json is valid or
+ False otherwise.
+ """
+
+ version = json[JSON_RESULTS_VERSION_KEY]
+ if version > JSON_RESULTS_VERSION:
+ logging.error("Results JSON version '%s' is not supported.",
+ version)
+ return False
+
+ if not builder in json:
+ logging.error("Builder '%s' is not in json results.", builder)
+ return False
+
+ results_for_builder = json[builder]
+ if not JSON_RESULTS_BUILD_NUMBERS in results_for_builder:
+ logging.error("Missing build number in json results.")
+ return False
+
+ return True
+
+ @classmethod
+ def merge(cls, builder, aggregated, incremental, sort_keys=False):
+ """Merge incremental json file data with aggregated json file data.
+
+ Args:
+ builder: builder name.
+ aggregated: aggregated json file data.
+ incremental: incremental json file data.
+ sort_key: whether or not to sort key when dumping json results.
+
+ Returns:
+ Merged json file data if merge succeeds or
+ None on failure.
+ """
+
+ if not incremental:
+ logging.warning("Nothing to merge.")
+ return None
+
+ logging.info("Loading incremental json...")
+ incremental_json = cls._load_json(incremental)
+ if not incremental_json:
+ return None
+
+ logging.info("Checking incremental json...")
+ if not cls._check_json(builder, incremental_json):
+ return None
+
+ logging.info("Loading existing aggregated json...")
+ aggregated_json = cls._load_json(aggregated)
+ if not aggregated_json:
+ return incremental
+
+ logging.info("Checking existing aggregated json...")
+ if not cls._check_json(builder, aggregated_json):
+ return incremental
+
+ logging.info("Merging json results...")
+ try:
+ if not cls._merge_json(
+ aggregated_json[builder],
+ incremental_json[builder]):
+ return None
+ except Exception, err:
+ logging.error("Failed to merge json results: %s", str(err))
+ return None
+
+ aggregated_json[JSON_RESULTS_VERSION_KEY] = JSON_RESULTS_VERSION
+
+ return cls._generate_file_data(aggregated_json, sort_keys)
+
+ @classmethod
+ def update(cls, builder, test_type, incremental):
+ """Update datastore json file data by merging it with incremental json
+ file.
+
+ Args:
+ builder: builder name.
+ test_type: type of test results.
+ incremental: incremental json file data to merge.
+
+ Returns:
+ TestFile object if update succeeds or
+ None on failure.
+ """
+
+ files = TestFile.get_files(builder, test_type, JSON_RESULTS_FILE)
+ if files:
+ file = files[0]
+ new_results = cls.merge(builder, file.data, incremental)
+ else:
+ # Use the incremental data if there is no aggregated file to merge.
+ file = TestFile()
+ file.builder = builder
+ file.name = JSON_RESULTS_FILE
+ new_results = incremental
+ logging.info("No existing json results, incremental json is saved.")
+
+ if not new_results:
+ return None
+
+ if not file.save(new_results):
+ return None
+
+ return file
diff --git a/WebKitTools/TestResultServer/model/jsonresults_unittest.py b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
new file mode 100755
index 0000000..fd646c8
--- /dev/null
+++ b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
@@ -0,0 +1,256 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from jsonresults import JsonResults
+
+JSON_RESULTS_TEMPLATE = (
+ '{"Webkit":{'
+ '"allFixableCount":[[TESTDATA_COUNT]],'
+ '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
+ '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
+ '"deferredCounts":[[TESTDATA_COUNTS]],'
+ '"fixableCount":[[TESTDATA_COUNT]],'
+ '"fixableCounts":[[TESTDATA_COUNTS]],'
+ '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
+ '"tests":{[TESTDATA_TESTS]},'
+ '"webkitRevision":[[TESTDATA_WEBKITREVISION]],'
+ '"wontfixCounts":[[TESTDATA_COUNTS]]'
+ '},'
+ '"version":3'
+ '}')
+
+JSON_RESULTS_COUNTS_TEMPLATE = (
+ '{'
+ '"C":[TESTDATA],'
+ '"F":[TESTDATA],'
+ '"I":[TESTDATA],'
+ '"O":[TESTDATA],'
+ '"P":[TESTDATA],'
+ '"T":[TESTDATA],'
+ '"X":[TESTDATA],'
+ '"Z":[TESTDATA]}')
+
+JSON_RESULTS_TESTS_TEMPLATE = (
+ '"[TESTDATA_TEST_NAME]":{'
+ '"results":[[TESTDATA_TEST_RESULTS]],'
+ '"times":[[TESTDATA_TEST_TIMES]]}')
+
+JSON_RESULTS_PREFIX = "ADD_RESULTS("
+JSON_RESULTS_SUFFIX = ");"
+
+
+class JsonResultsTest(unittest.TestCase):
+ def setUp(self):
+ self._builder = "Webkit"
+
+ def _make_test_json(self, test_data):
+ if not test_data:
+ return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
+
+ (builds, tests) = test_data
+ if not builds or not tests:
+ return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
+
+ json = JSON_RESULTS_TEMPLATE
+
+ counts = []
+ build_numbers = []
+ webkit_revision = []
+ chrome_revision = []
+ times = []
+ for build in builds:
+ counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build))
+ build_numbers.append("1000%s" % build)
+ webkit_revision.append("2000%s" % build)
+ chrome_revision.append("3000%s" % build)
+ times.append("100000%s000" % build)
+
+ json = json.replace("[TESTDATA_COUNTS]", ",".join(counts))
+ json = json.replace("[TESTDATA_COUNT]", ",".join(builds))
+ json = json.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers))
+ json = json.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision))
+ json = json.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision))
+ json = json.replace("[TESTDATA_TIMES]", ",".join(times))
+
+ json_tests = []
+ for test in tests:
+ t = JSON_RESULTS_TESTS_TEMPLATE.replace("[TESTDATA_TEST_NAME]", test[0])
+ t = t.replace("[TESTDATA_TEST_RESULTS]", test[1])
+ t = t.replace("[TESTDATA_TEST_TIMES]", test[2])
+ json_tests.append(t)
+
+ json = json.replace("[TESTDATA_TESTS]", ",".join(json_tests))
+
+ return JSON_RESULTS_PREFIX + json + JSON_RESULTS_SUFFIX
+
+ def _test_merge(self, aggregated_data, incremental_data, expected_data):
+ aggregated_results = self._make_test_json(aggregated_data)
+ incremental_results = self._make_test_json(incremental_data)
+ merged_results = JsonResults.merge(self._builder,
+ aggregated_results, incremental_results, sort_keys=True)
+
+ if expected_data:
+ expected_results = self._make_test_json(expected_data)
+ self.assertEquals(merged_results, expected_results)
+ else:
+ self.assertFalse(merged_results)
+
+ def test(self):
+ # Empty incremental results json.
+ # Nothing to merge.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Incremental results
+ None,
+ # Expect no merge happens.
+ None)
+
+ # No actual incremental test results (only prefix and suffix) to merge.
+ # Nothing to merge.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Incremental results
+ ([], []),
+ # Expected no merge happens.
+ None)
+
+ # No existing aggregated results.
+ # Merged results == new incremental results.
+ self._test_merge(
+ # Aggregated results
+ None,
+ # Incremental results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Expected results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]))
+
+ # Single test for single run.
+ # Incremental results has the latest build and same test results for
+ # that run.
+ # Insert the incremental results at the first place and sum number
+ # of runs for "P" (200 + 1) to get merged results.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"]]),
+ # Expected results
+ (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"]]))
+
+ # Single test for single run.
+ # Incremental results has the latest build but different test results
+ # for that run.
+ # Insert the incremental results at the first place.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1, \"I\"]", "[1,\"1\"]"]]),
+ # Expected results
+ (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+
+ # Single test for single run.
+ # Incremental results has the latest build but different test results
+ # for that run.
+ # The test "results" and "times" need to be continuous, so the old
+ # [10,"I"] result should be dropped because a new result of same type [1,"I"]
+ # is inserted in front of [200,"P"].
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"],[10,\"I\"]", "[200,\"0\"],[10,\"1\"]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+ # Expected results
+ (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+
+ # Multiple tests for single run.
+ # All tests have incremental updates.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"]]),
+ # Incremental results
+ (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"], ["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+ # Expected results
+ (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"]]))
+
+ # Multiple tests for single run.
+ # Not all tests have update.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"]]),
+ # Incremental results
+ (["3"], [["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+ # Expected results
+ (["3", "2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"]]))
+
+ # Single test for multiple runs.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Incremental results
+ (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"]]),
+ # Expected results
+ (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"]]))
+
+ # Multiple tests for multiple runs.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[10,\"Z\"]", "[10,\"0\"]"]]),
+ # Incremental results
+ (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"], ["002.html", "[1,\"C\"]", "[1,\"1\"]"]]),
+ # Expected results
+ (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,\"1\"],[10,\"0\"]"]]))
+
+ # Test the build in incremental results is older than the most recent
+ # build in aggregated results.
+ # The incremental results should be dropped and no merge happens.
+ self._test_merge(
+ # Aggregated results
+ (["3", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Incremental results
+ (["2"], [["001.html", "[1, \"P\"]", "[1,\"0\"]"]]),
+ # Expected no merge happens.
+ None)
+
+ # Test the build in incremental results is same as the build in
+ # aggregated results.
+ # The incremental results should be dropped and no merge happens.
+ self._test_merge(
+ # Aggregated results
+ (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+ # Incremental results
+ (["3", "2"], [["001.html", "[2, \"P\"]", "[2,\"0\"]"]]),
+ # Expected no merge happens.
+ None)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/WebKitTools/TestResultServer/model/testfile.py b/WebKitTools/TestResultServer/model/testfile.py
index 35ab967..ce92b65 100644
--- a/WebKitTools/TestResultServer/model/testfile.py
+++ b/WebKitTools/TestResultServer/model/testfile.py
@@ -3,7 +3,7 @@
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
-#
+#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
-#
+#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -29,16 +29,14 @@
from datetime import datetime
import logging
-from google.appengine.ext import blobstore
from google.appengine.ext import db
+from model.datastorefile import DataStoreFile
-class TestFile(db.Model):
+
+class TestFile(DataStoreFile):
builder = db.StringProperty()
- name = db.StringProperty()
test_type = db.StringProperty()
- blob_key = db.StringProperty()
- date = db.DateTimeProperty(auto_now_add=True)
@classmethod
def delete_file(cls, key, builder, test_type, name, limit):
@@ -63,7 +61,7 @@ class TestFile(db.Model):
return True
@classmethod
- def get_files(cls, builder, test_type, name, limit):
+ def get_files(cls, builder, test_type, name, load_data=True, limit=1):
query = TestFile.all()
if builder:
query = query.filter("builder =", builder)
@@ -72,51 +70,54 @@ class TestFile(db.Model):
if name:
query = query.filter("name =", name)
- return query.order("-date").fetch(limit)
+ files = query.order("-date").fetch(limit)
+ if load_data:
+ for file in files:
+ file.load_data()
+
+ return files
@classmethod
- def add_file(cls, builder, test_type, blob_info):
+ def add_file(cls, builder, test_type, name, data):
file = TestFile()
file.builder = builder
file.test_type = test_type
- file.name = blob_info.filename
- file.blob_key = str(blob_info.key())
- file.put()
+ file.name = name
+
+ if not file.save(data):
+ return None
logging.info(
- "File saved, builder: %s, test_type: %s, name: %s, blob key: %s.",
- builder, test_type, file.name, file.blob_key)
+ "File saved, builder: %s, test_type: %s, name: %s, key: %s.",
+ builder, test_type, file.name, str(file.data_keys))
return file
@classmethod
- def update_file(cls, builder, test_type, blob_info):
- files = cls.get_files(builder, test_type, blob_info.filename, 1)
+ def update(cls, builder, test_type, name, data):
+ files = cls.get_files(builder, test_type, name)
if not files:
- return cls.add_file(builder, test_type, blob_info)
+ return cls.add_file(builder, test_type, name, data)
file = files[0]
- old_blob_info = blobstore.BlobInfo.get(file.blob_key)
- if old_blob_info:
- old_blob_info.delete()
-
- file.builder = builder
- file.test_type = test_type
- file.name = blob_info.filename
- file.blob_key = str(blob_info.key())
- file.date = datetime.now()
- file.put()
+ if not file.save(data):
+ return None
logging.info(
- "File replaced, builder: %s, test_type: %s, name: %s, blob key: %s.",
- builder, test_type, file.name, file.blob_key)
+ "File replaced, builder: %s, test_type: %s, name: %s, data key: %s.",
+ builder, test_type, file.name, str(file.data_keys))
return file
- def _delete_all(self):
- if self.blob_key:
- blob_info = blobstore.BlobInfo.get(self.blob_key)
- if blob_info:
- blob_info.delete()
+ def save(self, data):
+ if not self.save_data(data):
+ return False
+
+ self.date = datetime.now()
+ self.put()
+ return True
+
+ def _delete_all(self):
+ self.delete_data()
self.delete()