summaryrefslogtreecommitdiffstats
path: root/WebKitTools/TestResultServer/model
diff options
context:
space:
mode:
Diffstat (limited to 'WebKitTools/TestResultServer/model')
-rw-r--r--WebKitTools/TestResultServer/model/__init__.py1
-rw-r--r--WebKitTools/TestResultServer/model/dashboardfile.py119
-rwxr-xr-xWebKitTools/TestResultServer/model/datastorefile.py150
-rwxr-xr-xWebKitTools/TestResultServer/model/jsonresults.py466
-rwxr-xr-xWebKitTools/TestResultServer/model/jsonresults_unittest.py322
-rw-r--r--WebKitTools/TestResultServer/model/testfile.py127
6 files changed, 0 insertions, 1185 deletions
diff --git a/WebKitTools/TestResultServer/model/__init__.py b/WebKitTools/TestResultServer/model/__init__.py
deleted file mode 100644
index ef65bee..0000000
--- a/WebKitTools/TestResultServer/model/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# Required for Python to search this directory for module files
diff --git a/WebKitTools/TestResultServer/model/dashboardfile.py b/WebKitTools/TestResultServer/model/dashboardfile.py
deleted file mode 100644
index aad6d50..0000000
--- a/WebKitTools/TestResultServer/model/dashboardfile.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright (C) 2010 Google Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-from datetime import datetime
-import logging
-import urllib
-import urllib2
-
-from google.appengine.ext import db
-
-SVN_PATH_DASHBOARD = ("http://src.chromium.org/viewvc/chrome/trunk/tools/"
- "dashboards/")
-
-
-class DashboardFile(db.Model):
- name = db.StringProperty()
- data = db.BlobProperty()
- date = db.DateTimeProperty(auto_now_add=True)
-
- @classmethod
- def get_files(cls, name, limit=1):
- query = DashboardFile.all()
- if name:
- query = query.filter("name =", name)
- return query.order("-date").fetch(limit)
-
- @classmethod
- def add_file(cls, name, data):
- file = DashboardFile()
- file.name = name
- file.data = db.Blob(data)
- file.put()
-
- logging.debug("Dashboard file saved, name: %s.", name)
-
- return file
-
- @classmethod
- def grab_file_from_svn(cls, name):
- logging.debug("Grab file from SVN, name: %s.", name)
-
- url = SVN_PATH_DASHBOARD + urllib.quote_plus(name)
-
- logging.info("Grab file from SVN, url: %s.", url)
- try:
- file = urllib2.urlopen(url)
- if not file:
- logging.error("Failed to grab dashboard file: %s.", url)
- return None
-
- return file.read()
- except urllib2.HTTPError, e:
- logging.error("Failed to grab dashboard file: %s", str(e))
- except urllib2.URLError, e:
- logging.error("Failed to grab dashboard file: %s", str(e))
-
- return None
-
- @classmethod
- def update_file(cls, name):
- data = cls.grab_file_from_svn(name)
- if not data:
- return False
-
- logging.info("Got file from SVN.")
-
- files = cls.get_files(name)
- if not files:
- logging.info("No existing file, added as new file.")
- if cls.add_file(name, data):
- return True
- return False
-
- logging.debug("Updating existing file.")
- file = files[0]
- file.data = data
- file.date = datetime.now()
- file.put()
-
- logging.info("Dashboard file replaced, name: %s.", name)
-
- return True
-
- @classmethod
- def delete_file(cls, name):
- files = cls.get_files(name)
- if not files:
- logging.warning("File not found, name: %s.", name)
- return False
-
- for file in files:
- file.delete()
-
- return True
diff --git a/WebKitTools/TestResultServer/model/datastorefile.py b/WebKitTools/TestResultServer/model/datastorefile.py
deleted file mode 100755
index ac28d64..0000000
--- a/WebKitTools/TestResultServer/model/datastorefile.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright (C) 2010 Google Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-from datetime import datetime
-import logging
-
-from google.appengine.ext import db
-
-MAX_DATA_ENTRY_PER_FILE = 10
-MAX_ENTRY_LEN = 1000 * 1000
-
-
-class DataEntry(db.Model):
- """Datastore entry that stores one segmant of file data
- (<1000*1000 bytes).
- """
-
- data = db.BlobProperty()
-
- @classmethod
- def get(cls, key):
- return db.get(key)
-
- def get_data(self, key):
- return db.get(key)
-
-
-class DataStoreFile(db.Model):
- """This class stores file in datastore.
- If a file is oversize (>1000*1000 bytes), the file is split into
- multiple segments and stored in multiple datastore entries.
- """
-
- name = db.StringProperty()
- data_keys = db.ListProperty(db.Key)
- # keys to the data store entries that can be reused for new data.
- # If it is emtpy, create new DataEntry.
- new_data_keys = db.ListProperty(db.Key)
- date = db.DateTimeProperty(auto_now_add=True)
-
- data = None
-
- def delete_data(self, keys=None):
- if not keys:
- keys = self.data_keys
-
- for key in keys:
- data_entry = DataEntry.get(key)
- if data_entry:
- data_entry.delete()
-
- def save_data(self, data):
- if not data:
- logging.warning("No data to save.")
- return False
-
- if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN):
- logging.error("File too big, can't save to datastore: %dK",
- len(data) / 1024)
- return False
-
- start = 0
- # Use the new_data_keys to store new data. If all new data are saved
- # successfully, swap new_data_keys and data_keys so we can reuse the
- # data_keys entries in next run. If unable to save new data for any
- # reason, only the data pointed by new_data_keys may be corrupted,
- # the existing data_keys data remains untouched. The corrupted data
- # in new_data_keys will be overwritten in next update.
- keys = self.new_data_keys
- self.new_data_keys = []
-
- while start < len(data):
- if keys:
- key = keys[0]
- data_entry = DataEntry.get(key)
- if not data_entry:
- logging.warning("Found key, but no data entry: %s", key)
- data_entry = DataEntry()
- else:
- data_entry = DataEntry()
-
- data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN])
- try:
- data_entry.put()
- except Exception, err:
- logging.error("Failed to save data store entry: %s", err)
- if keys:
- self.delete_data(keys)
- return False
-
- logging.info("Data saved: %s.", data_entry.key())
- self.new_data_keys.append(data_entry.key())
- if keys:
- keys.pop(0)
-
- start = start + MAX_ENTRY_LEN
-
- if keys:
- self.delete_data(keys)
-
- temp_keys = self.data_keys
- self.data_keys = self.new_data_keys
- self.new_data_keys = temp_keys
- self.data = data
-
- return True
-
- def load_data(self):
- if not self.data_keys:
- logging.warning("No data to load.")
- return None
-
- data = []
- for key in self.data_keys:
- logging.info("Loading data for key: %s.", key)
- data_entry = DataEntry.get(key)
- if not data_entry:
- logging.error("No data found for key: %s.", key)
- return None
-
- data.append(data_entry.data)
-
- self.data = "".join(data)
-
- return self.data
diff --git a/WebKitTools/TestResultServer/model/jsonresults.py b/WebKitTools/TestResultServer/model/jsonresults.py
deleted file mode 100755
index f5a0fde..0000000
--- a/WebKitTools/TestResultServer/model/jsonresults.py
+++ /dev/null
@@ -1,466 +0,0 @@
-# Copyright (C) 2010 Google Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-from datetime import datetime
-from django.utils import simplejson
-import logging
-
-from model.testfile import TestFile
-
-JSON_RESULTS_FILE = "results.json"
-JSON_RESULTS_FILE_SMALL = "results-small.json"
-JSON_RESULTS_PREFIX = "ADD_RESULTS("
-JSON_RESULTS_SUFFIX = ");"
-JSON_RESULTS_VERSION_KEY = "version"
-JSON_RESULTS_BUILD_NUMBERS = "buildNumbers"
-JSON_RESULTS_TESTS = "tests"
-JSON_RESULTS_RESULTS = "results"
-JSON_RESULTS_TIMES = "times"
-JSON_RESULTS_PASS = "P"
-JSON_RESULTS_NO_DATA = "N"
-JSON_RESULTS_MIN_TIME = 1
-JSON_RESULTS_VERSION = 3
-JSON_RESULTS_MAX_BUILDS = 750
-JSON_RESULTS_MAX_BUILDS_SMALL = 200
-
-
-class JsonResults(object):
- @classmethod
- def _strip_prefix_suffix(cls, data):
- """Strip out prefix and suffix of json results string.
-
- Args:
- data: json file content.
-
- Returns:
- json string without prefix and suffix.
- """
-
- assert(data.startswith(JSON_RESULTS_PREFIX))
- assert(data.endswith(JSON_RESULTS_SUFFIX))
-
- return data[len(JSON_RESULTS_PREFIX):
- len(data) - len(JSON_RESULTS_SUFFIX)]
-
- @classmethod
- def _generate_file_data(cls, json, sort_keys=False):
- """Given json string, generate file content data by adding
- prefix and suffix.
-
- Args:
- json: json string without prefix and suffix.
-
- Returns:
- json file data.
- """
-
- data = simplejson.dumps(json, separators=(',', ':'),
- sort_keys=sort_keys)
- return JSON_RESULTS_PREFIX + data + JSON_RESULTS_SUFFIX
-
- @classmethod
- def _load_json(cls, file_data):
- """Load json file to a python object.
-
- Args:
- file_data: json file content.
-
- Returns:
- json object or
- None on failure.
- """
-
- json_results_str = cls._strip_prefix_suffix(file_data)
- if not json_results_str:
- logging.warning("No json results data.")
- return None
-
- try:
- return simplejson.loads(json_results_str)
- except Exception, err:
- logging.debug(json_results_str)
- logging.error("Failed to load json results: %s", str(err))
- return None
-
- @classmethod
- def _merge_json(cls, aggregated_json, incremental_json, num_runs):
- """Merge incremental json into aggregated json results.
-
- Args:
- aggregated_json: aggregated json object.
- incremental_json: incremental json object.
-
- Returns:
- True if merge succeeds or
- False on failure.
- """
-
- # Merge non tests property data.
- # Tests properties are merged in _merge_tests.
- if not cls._merge_non_test_data(aggregated_json, incremental_json, num_runs):
- return False
-
- # Merge tests results and times
- incremental_tests = incremental_json[JSON_RESULTS_TESTS]
- if incremental_tests:
- aggregated_tests = aggregated_json[JSON_RESULTS_TESTS]
- cls._merge_tests(aggregated_tests, incremental_tests, num_runs)
-
- return True
-
- @classmethod
- def _merge_non_test_data(cls, aggregated_json, incremental_json, num_runs):
- """Merge incremental non tests property data into aggregated json results.
-
- Args:
- aggregated_json: aggregated json object.
- incremental_json: incremental json object.
-
- Returns:
- True if merge succeeds or
- False on failure.
- """
-
- incremental_builds = incremental_json[JSON_RESULTS_BUILD_NUMBERS]
- aggregated_builds = aggregated_json[JSON_RESULTS_BUILD_NUMBERS]
- aggregated_build_number = int(aggregated_builds[0])
- # Loop through all incremental builds, start from the oldest run.
- for index in reversed(range(len(incremental_builds))):
- build_number = int(incremental_builds[index])
- logging.debug("Merging build %s, incremental json index: %d.",
- build_number, index)
-
- # Return if not all build numbers in the incremental json results
- # are newer than the most recent build in the aggregated results.
- # FIXME: make this case work.
- if build_number < aggregated_build_number:
- logging.warning(("Build %d in incremental json is older than "
- "the most recent build in aggregated results: %d"),
- build_number, aggregated_build_number)
- return False
-
- # Return if the build number is duplicated.
- # FIXME: skip the duplicated build and merge rest of the results.
- # Need to be careful on skiping the corresponding value in
- # _merge_tests because the property data for each test could
- # be accumulated.
- if build_number == aggregated_build_number:
- logging.warning("Duplicate build %d in incremental json",
- build_number)
- return False
-
- # Merge this build into aggreagated results.
- cls._merge_one_build(aggregated_json, incremental_json, index, num_runs)
-
- return True
-
- @classmethod
- def _merge_one_build(cls, aggregated_json, incremental_json,
- incremental_index, num_runs):
- """Merge one build of incremental json into aggregated json results.
-
- Args:
- aggregated_json: aggregated json object.
- incremental_json: incremental json object.
- incremental_index: index of the incremental json results to merge.
- """
-
- for key in incremental_json.keys():
- # Merge json results except "tests" properties (results, times etc).
- # "tests" properties will be handled separately.
- if key == JSON_RESULTS_TESTS:
- continue
-
- if key in aggregated_json:
- aggregated_json[key].insert(
- 0, incremental_json[key][incremental_index])
- aggregated_json[key] = \
- aggregated_json[key][:num_runs]
- else:
- aggregated_json[key] = incremental_json[key]
-
- @classmethod
- def _merge_tests(cls, aggregated_json, incremental_json, num_runs):
- """Merge "tests" properties:results, times.
-
- Args:
- aggregated_json: aggregated json object.
- incremental_json: incremental json object.
- """
-
- all_tests = (set(aggregated_json.iterkeys()) |
- set(incremental_json.iterkeys()))
- for test_name in all_tests:
- if test_name in aggregated_json:
- aggregated_test = aggregated_json[test_name]
- if test_name in incremental_json:
- incremental_test = incremental_json[test_name]
- results = incremental_test[JSON_RESULTS_RESULTS]
- times = incremental_test[JSON_RESULTS_TIMES]
- else:
- results = [[1, JSON_RESULTS_NO_DATA]]
- times = [[1, 0]]
-
- cls._insert_item_run_length_encoded(
- results, aggregated_test[JSON_RESULTS_RESULTS], num_runs)
- cls._insert_item_run_length_encoded(
- times, aggregated_test[JSON_RESULTS_TIMES], num_runs)
- cls._normalize_results_json(test_name, aggregated_json)
- else:
- aggregated_json[test_name] = incremental_json[test_name]
-
- @classmethod
- def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item, num_runs):
- """Inserts the incremental run-length encoded results into the aggregated
- run-length encoded results.
-
- Args:
- incremental_item: incremental run-length encoded results.
- aggregated_item: aggregated run-length encoded results.
- """
-
- for item in incremental_item:
- if len(aggregated_item) and item[1] == aggregated_item[0][1]:
- aggregated_item[0][0] = min(
- aggregated_item[0][0] + item[0], num_runs)
- else:
- aggregated_item.insert(0, item)
-
- @classmethod
- def _normalize_results_json(cls, test_name, aggregated_json):
- """ Prune tests where all runs pass or tests that no longer exist and
- truncate all results to JSON_RESULTS_MAX_BUILDS.
-
- Args:
- test_name: Name of the test.
- aggregated_json: The JSON object with all the test results for
- this builder.
- """
-
- aggregated_test = aggregated_json[test_name]
- aggregated_test[JSON_RESULTS_RESULTS] = \
- cls._remove_items_over_max_number_of_builds(
- aggregated_test[JSON_RESULTS_RESULTS])
- aggregated_test[JSON_RESULTS_TIMES] = \
- cls._remove_items_over_max_number_of_builds(
- aggregated_test[JSON_RESULTS_TIMES])
-
- is_all_pass = cls._is_results_all_of_type(
- aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_PASS)
- is_all_no_data = cls._is_results_all_of_type(
- aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_NO_DATA)
-
- max_time = max(
- [time[1] for time in aggregated_test[JSON_RESULTS_TIMES]])
- # Remove all passes/no-data from the results to reduce noise and
- # filesize. If a test passes every run, but
- # takes >= JSON_RESULTS_MIN_TIME to run, don't throw away the data.
- if (is_all_no_data or
- (is_all_pass and max_time < JSON_RESULTS_MIN_TIME)):
- del aggregated_json[test_name]
-
- @classmethod
- def _remove_items_over_max_number_of_builds(cls, encoded_list):
- """Removes items from the run-length encoded list after the final
- item that exceeds the max number of builds to track.
-
- Args:
- encoded_results: run-length encoded results. An array of arrays, e.g.
- [[3,'A'],[1,'Q']] encodes AAAQ.
- """
- num_builds = 0
- index = 0
- for result in encoded_list:
- num_builds = num_builds + result[0]
- index = index + 1
- if num_builds > JSON_RESULTS_MAX_BUILDS:
- return encoded_list[:index]
-
- return encoded_list
-
- @classmethod
- def _is_results_all_of_type(cls, results, type):
- """Returns whether all the results are of the given type
- (e.g. all passes).
- """
-
- return len(results) == 1 and results[0][1] == type
-
- @classmethod
- def _check_json(cls, builder, json):
- """Check whether the given json is valid.
-
- Args:
- builder: builder name this json is for.
- json: json object to check.
-
- Returns:
- True if the json is valid or
- False otherwise.
- """
-
- version = json[JSON_RESULTS_VERSION_KEY]
- if version > JSON_RESULTS_VERSION:
- logging.error("Results JSON version '%s' is not supported.",
- version)
- return False
-
- if not builder in json:
- logging.error("Builder '%s' is not in json results.", builder)
- return False
-
- results_for_builder = json[builder]
- if not JSON_RESULTS_BUILD_NUMBERS in results_for_builder:
- logging.error("Missing build number in json results.")
- return False
-
- return True
-
- @classmethod
- def merge(cls, builder, aggregated, incremental, num_runs, sort_keys=False):
- """Merge incremental json file data with aggregated json file data.
-
- Args:
- builder: builder name.
- aggregated: aggregated json file data.
- incremental: incremental json file data.
- sort_key: whether or not to sort key when dumping json results.
-
- Returns:
- Merged json file data if merge succeeds or
- None on failure.
- """
-
- if not incremental:
- logging.warning("Nothing to merge.")
- return None
-
- logging.info("Loading incremental json...")
- incremental_json = cls._load_json(incremental)
- if not incremental_json:
- return None
-
- logging.info("Checking incremental json...")
- if not cls._check_json(builder, incremental_json):
- return None
-
- logging.info("Loading existing aggregated json...")
- aggregated_json = cls._load_json(aggregated)
- if not aggregated_json:
- return incremental
-
- logging.info("Checking existing aggregated json...")
- if not cls._check_json(builder, aggregated_json):
- return incremental
-
- logging.info("Merging json results...")
- try:
- if not cls._merge_json(aggregated_json[builder], incremental_json[builder], num_runs):
- return None
- except Exception, err:
- logging.error("Failed to merge json results: %s", str(err))
- return None
-
- aggregated_json[JSON_RESULTS_VERSION_KEY] = JSON_RESULTS_VERSION
-
- return cls._generate_file_data(aggregated_json, sort_keys)
-
- @classmethod
- def update(cls, master, builder, test_type, incremental):
- """Update datastore json file data by merging it with incremental json
- file. Writes the large file and a small file. The small file just stores
- fewer runs.
-
- Args:
- master: master name.
- builder: builder name.
- test_type: type of test results.
- incremental: incremental json file data to merge.
-
- Returns:
- Large TestFile object if update succeeds or
- None on failure.
- """
- small_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE_SMALL, JSON_RESULTS_MAX_BUILDS_SMALL)
- large_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE, JSON_RESULTS_MAX_BUILDS)
-
- return small_file_updated and large_file_updated
-
- @classmethod
- def update_file(cls, master, builder, test_type, incremental, filename, num_runs):
- files = TestFile.get_files(master, builder, test_type, filename)
- if files:
- file = files[0]
- new_results = cls.merge(builder, file.data, incremental, num_runs)
- else:
- # Use the incremental data if there is no aggregated file to merge.
- file = TestFile()
- file.master = master
- file.builder = builder
- file.test_type = test_type
- file.name = filename
- new_results = incremental
- logging.info("No existing json results, incremental json is saved.")
-
- if not new_results or not file.save(new_results):
- logging.info(
- "Update failed, master: %s, builder: %s, test_type: %s, name: %s." %
- (master, builder, test_type, filename))
- return False
-
- return True
-
- @classmethod
- def get_test_list(cls, builder, json_file_data):
- """Get list of test names from aggregated json file data.
-
- Args:
- json_file_data: json file data that has all test-data and
- non-test-data.
-
- Returns:
- json file with test name list only. The json format is the same
- as the one saved in datastore, but all non-test-data and test detail
- results are removed.
- """
-
- logging.debug("Loading test results json...")
- json = cls._load_json(json_file_data)
- if not json:
- return None
-
- logging.debug("Checking test results json...")
- if not cls._check_json(builder, json):
- return None
-
- test_list_json = {}
- tests = json[builder][JSON_RESULTS_TESTS]
- test_list_json[builder] = {
- "tests": dict.fromkeys(tests, {})}
-
- return cls._generate_file_data(test_list_json)
diff --git a/WebKitTools/TestResultServer/model/jsonresults_unittest.py b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
deleted file mode 100755
index c70b90c..0000000
--- a/WebKitTools/TestResultServer/model/jsonresults_unittest.py
+++ /dev/null
@@ -1,322 +0,0 @@
-# Copyright (C) 2010 Google Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-try:
- import jsonresults
- from jsonresults import JsonResults
-except ImportError:
- print "ERROR: Add the TestResultServer, google_appengine and yaml/lib directories to your PYTHONPATH"
-
-import unittest
-
-
-JSON_RESULTS_TEMPLATE = (
- '{"Webkit":{'
- '"allFixableCount":[[TESTDATA_COUNT]],'
- '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
- '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
- '"deferredCounts":[[TESTDATA_COUNTS]],'
- '"fixableCount":[[TESTDATA_COUNT]],'
- '"fixableCounts":[[TESTDATA_COUNTS]],'
- '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
- '"tests":{[TESTDATA_TESTS]},'
- '"webkitRevision":[[TESTDATA_WEBKITREVISION]],'
- '"wontfixCounts":[[TESTDATA_COUNTS]]'
- '},'
- '"version":3'
- '}')
-
-JSON_RESULTS_COUNTS_TEMPLATE = (
- '{'
- '"C":[TESTDATA],'
- '"F":[TESTDATA],'
- '"I":[TESTDATA],'
- '"O":[TESTDATA],'
- '"P":[TESTDATA],'
- '"T":[TESTDATA],'
- '"X":[TESTDATA],'
- '"Z":[TESTDATA]}')
-
-JSON_RESULTS_TESTS_TEMPLATE = (
- '"[TESTDATA_TEST_NAME]":{'
- '"results":[[TESTDATA_TEST_RESULTS]],'
- '"times":[[TESTDATA_TEST_TIMES]]}')
-
-JSON_RESULTS_PREFIX = "ADD_RESULTS("
-JSON_RESULTS_SUFFIX = ");"
-
-JSON_RESULTS_TEST_LIST_TEMPLATE = (
- '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}')
-
-
-class JsonResultsTest(unittest.TestCase):
- def setUp(self):
- self._builder = "Webkit"
-
- def _make_test_json(self, test_data):
- if not test_data:
- return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
-
- (builds, tests) = test_data
- if not builds or not tests:
- return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
-
- json = JSON_RESULTS_TEMPLATE
-
- counts = []
- build_numbers = []
- webkit_revision = []
- chrome_revision = []
- times = []
- for build in builds:
- counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build))
- build_numbers.append("1000%s" % build)
- webkit_revision.append("2000%s" % build)
- chrome_revision.append("3000%s" % build)
- times.append("100000%s000" % build)
-
- json = json.replace("[TESTDATA_COUNTS]", ",".join(counts))
- json = json.replace("[TESTDATA_COUNT]", ",".join(builds))
- json = json.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers))
- json = json.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision))
- json = json.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision))
- json = json.replace("[TESTDATA_TIMES]", ",".join(times))
-
- json_tests = []
- for test in tests:
- t = JSON_RESULTS_TESTS_TEMPLATE.replace("[TESTDATA_TEST_NAME]", test[0])
- t = t.replace("[TESTDATA_TEST_RESULTS]", test[1])
- t = t.replace("[TESTDATA_TEST_TIMES]", test[2])
- json_tests.append(t)
-
- json = json.replace("[TESTDATA_TESTS]", ",".join(json_tests))
-
- return JSON_RESULTS_PREFIX + json + JSON_RESULTS_SUFFIX
-
- def _test_merge(self, aggregated_data, incremental_data, expected_data):
- aggregated_results = self._make_test_json(aggregated_data)
- incremental_results = self._make_test_json(incremental_data)
- merged_results = JsonResults.merge(self._builder,
- aggregated_results, incremental_results, jsonresults.JSON_RESULTS_MAX_BUILDS,
- sort_keys=True)
-
- if expected_data:
- expected_results = self._make_test_json(expected_data)
- self.assertEquals(merged_results, expected_results)
- else:
- self.assertFalse(merged_results)
-
- def _test_get_test_list(self, input_data, expected_data):
- input_results = self._make_test_json(input_data)
-
- json_tests = []
- for test in expected_data:
- json_tests.append("\"" + test + "\":{}")
-
- expected_results = JSON_RESULTS_PREFIX + \
- JSON_RESULTS_TEST_LIST_TEMPLATE.replace(
- "[TESTDATA_TESTS]", ",".join(json_tests)) + \
- JSON_RESULTS_SUFFIX
-
- actual_results = JsonResults.get_test_list(self._builder, input_results)
- self.assertEquals(actual_results, expected_results)
-
- def test(self):
- # Empty incremental results json.
- # Nothing to merge.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Incremental results
- None,
- # Expect no merge happens.
- None)
-
- # No actual incremental test results (only prefix and suffix) to merge.
- # Nothing to merge.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Incremental results
- ([], []),
- # Expected no merge happens.
- None)
-
- # No existing aggregated results.
- # Merged results == new incremental results.
- self._test_merge(
- # Aggregated results
- None,
- # Incremental results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Expected results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]))
-
- # Single test for single run.
- # Incremental results has the latest build and same test results for
- # that run.
- # Insert the incremental results at the first place and sum number
- # of runs for "P" (200 + 1) to get merged results.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Incremental results
- (["3"], [["001.html", "[1,\"F\"]", "[1,0]"]]),
- # Expected results
- (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"]]))
-
- # Single test for single run.
- # Incremental results has the latest build but different test results
- # for that run.
- # Insert the incremental results at the first place.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Incremental results
- (["3"], [["001.html", "[1, \"I\"]", "[1,1]"]]),
- # Expected results
- (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"]", "[1,1],[200,0]"]]))
-
- # Single test for single run.
- # Incremental results has the latest build but different test results
- # for that run.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"],[10,\"I\"]", "[200,0],[10,1]"]]),
- # Incremental results
- (["3"], [["001.html", "[1,\"I\"]", "[1,1]"]]),
- # Expected results
- (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"],[10,\"I\"]", "[1,1],[200,0],[10,1]"]]))
-
- # Multiple tests for single run.
- # All tests have incremental updates.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]),
- # Incremental results
- (["3"], [["001.html", "[1,\"F\"]", "[1,0]"], ["002.html", "[1,\"I\"]", "[1,1]"]]),
- # Expected results
- (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]]))
-
- # Multiple tests for single run.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]),
- # Incremental results
- (["3"], [["002.html", "[1,\"I\"]", "[1,1]"]]),
- # Expected results
- (["3", "2", "1"], [["001.html", "[1,\"N\"],[200,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]]))
-
- # Single test for multiple runs.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Incremental results
- (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"]]),
- # Expected results
- (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"]]))
-
- # Multiple tests for multiple runs.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[10,\"Z\"]", "[10,0]"]]),
- # Incremental results
- (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"], ["002.html", "[1,\"C\"]", "[1,1]"]]),
- # Expected results
- (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,1],[10,0]"]]))
-
- # Test the build in incremental results is older than the most recent
- # build in aggregated results.
- # The incremental results should be dropped and no merge happens.
- self._test_merge(
- # Aggregated results
- (["3", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Incremental results
- (["2"], [["001.html", "[1, \"F\"]", "[1,0]"]]),
- # Expected no merge happens.
- None)
-
- # Test the build in incremental results is same as the build in
- # aggregated results.
- # The incremental results should be dropped and no merge happens.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
- # Incremental results
- (["3", "2"], [["001.html", "[2, \"F\"]", "[2,0]"]]),
- # Expected no merge happens.
- None)
-
- # Remove test where there is no data in all runs.
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"N\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
- # Incremental results
- (["3"], [["001.html", "[1,\"N\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
- # Expected results
- (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
-
- # Remove test where all run pass and max running time < 1 seconds
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
- # Incremental results
- (["3"], [["001.html", "[1,\"P\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
- # Expected results
- (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
-
- # Do not remove test where all run pass but max running time >= 1 seconds
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
- # Incremental results
- (["3"], [["001.html", "[1,\"P\"]", "[1,1]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
- # Expected results
- (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[1,1],[200,0]"], ["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
-
- # Remove items from test results and times that exceeds the max number
- # of builds to track.
- max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS)
- self._test_merge(
- # Aggregated results
- (["2", "1"], [["001.html", "[" + max_builds + ",\"F\"],[1,\"I\"]", "[" + max_builds + ",0],[1,1]"]]),
- # Incremental results
- (["3"], [["001.html", "[1,\"T\"]", "[1,1]"]]),
- # Expected results
- (["3", "2", "1"], [["001.html", "[1,\"T\"],[" + max_builds + ",\"F\"]", "[1,1],[" + max_builds + ",0]"]]))
-
- # Get test name list only. Don't include non-test-list data and
- # of test result details.
- self._test_get_test_list(
- # Input results
- (["3", "2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
- # Expected results
- ["001.html", "002.html"])
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/WebKitTools/TestResultServer/model/testfile.py b/WebKitTools/TestResultServer/model/testfile.py
deleted file mode 100644
index e600c99..0000000
--- a/WebKitTools/TestResultServer/model/testfile.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Copyright (C) 2010 Google Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-from datetime import datetime
-import logging
-
-from google.appengine.ext import db
-
-from model.datastorefile import DataStoreFile
-
-
-class TestFile(DataStoreFile):
- master = db.StringProperty()
- builder = db.StringProperty()
- test_type = db.StringProperty()
-
- @classmethod
- def delete_file(cls, key, master, builder, test_type, name, limit):
- if key:
- file = db.get(key)
- if not file:
- logging.warning("File not found, key: %s.", key)
- return False
-
- file._delete_all()
- else:
- files = cls.get_files(master, builder, test_type, name, limit)
- if not files:
- logging.warning(
- "File not found, master: %s, builder: %s, test_type:%s, name: %s.",
- builder, test_type, name)
- return False
-
- for file in files:
- file._delete_all()
-
- return True
-
- @classmethod
- def get_files(cls, master, builder, test_type, name, load_data=True, limit=1):
- query = TestFile.all()
- if master:
- query = query.filter("master =", master)
- if builder:
- query = query.filter("builder =", builder)
- if test_type:
- query = query.filter("test_type =", test_type)
- if name:
- query = query.filter("name =", name)
-
- files = query.order("-date").fetch(limit)
- if load_data:
- for file in files:
- file.load_data()
-
- return files
-
- @classmethod
- def add_file(cls, master, builder, test_type, name, data):
- file = TestFile()
- file.master = master
- file.builder = builder
- file.test_type = test_type
- file.name = name
-
- if not file.save(data):
- return None
-
- logging.info(
- "File saved, master: %s, builder: %s, test_type: %s, name: %s, key: %s.",
- master, builder, test_type, file.name, str(file.data_keys))
-
- return file
-
- @classmethod
- def update(cls, master, builder, test_type, name, data):
- files = cls.get_files(master, builder, test_type, name)
- if not files:
- return cls.add_file(master, builder, test_type, name, data)
-
- file = files[0]
- if not file.save(data):
- return None
-
- logging.info(
- "File replaced, master: %s, builder: %s, test_type: %s, name: %s, data key: %s.",
- master, builder, test_type, file.name, str(file.data_keys))
-
- return file
-
- def save(self, data):
- if not self.save_data(data):
- return False
-
- self.date = datetime.now()
- self.put()
-
- return True
-
- def _delete_all(self):
- self.delete_data()
- self.delete()