diff options
Diffstat (limited to 'Tools/TestResultServer')
21 files changed, 2014 insertions, 0 deletions
diff --git a/Tools/TestResultServer/app.yaml b/Tools/TestResultServer/app.yaml new file mode 100644 index 0000000..e51af84 --- /dev/null +++ b/Tools/TestResultServer/app.yaml @@ -0,0 +1,19 @@ +application: test-results +version: 1 +runtime: python +api_version: 1 + +handlers: +- url: /stylesheets + static_dir: stylesheets + +- url: /testfile/delete + script: main.py + login: admin + +- url: /dashboards/delete + script: main.py + login: admin + +- url: /.* + script: main.py diff --git a/Tools/TestResultServer/handlers/__init__.py b/Tools/TestResultServer/handlers/__init__.py new file mode 100644 index 0000000..ef65bee --- /dev/null +++ b/Tools/TestResultServer/handlers/__init__.py @@ -0,0 +1 @@ +# Required for Python to search this directory for module files diff --git a/Tools/TestResultServer/handlers/dashboardhandler.py b/Tools/TestResultServer/handlers/dashboardhandler.py new file mode 100644 index 0000000..c8b5ace --- /dev/null +++ b/Tools/TestResultServer/handlers/dashboardhandler.py @@ -0,0 +1,123 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import logging +import mimetypes +import urllib2 + +from google.appengine.api import users +from google.appengine.ext import webapp +from google.appengine.ext.webapp import template + +from model.dashboardfile import DashboardFile + +PARAM_FILE = "file" + + +def get_content_type(filename): + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + + +class GetDashboardFile(webapp.RequestHandler): + def get(self, resource): + if not resource: + logging.debug("Getting dashboard file list.") + return self._get_file_list() + + filename = str(urllib2.unquote(resource)) + + logging.debug("Getting dashboard file: %s", filename) + + files = DashboardFile.get_files(filename) + if not files: + logging.error("Failed to find dashboard file: %s, request: %s", + filename, self.request) + self.response.set_status(404) + return + + content_type = "%s; charset=utf-8" % get_content_type(filename) + logging.info("content type: %s", content_type) + self.response.headers["Content-Type"] = content_type + self.response.out.write(files[0].data) + + def _get_file_list(self): + logging.info("getting dashboard file list.") + + files = DashboardFile.get_files("", 100) + if not files: + logging.info("Failed to find dashboard files.") + self.response.set_status(404) + return + + template_values = { + "admin": users.is_current_user_admin(), + "files": files, + } + self.response.out.write( + template.render("templates/dashboardfilelist.html", + template_values)) + + +class UpdateDashboardFile(webapp.RequestHandler): + def get(self): + files = self.request.get_all(PARAM_FILE) + if not files: + files = ["flakiness_dashboard.html", + "dashboard_base.js", + "aggregate_results.html", + "dygraph-combined.js", + "timeline_explorer.html"] + + errors = [] + for file in files: + if not DashboardFile.update_file(file): + errors.append("Failed to update file: %s" % file) + + if errors: + messages = "; ".join(errors) + logging.warning(messages) + self.response.set_status(500, messages) + self.response.out.write("FAIL") + else: + self.response.set_status(200) + self.response.out.write("OK") + + +class DeleteDashboardFile(webapp.RequestHandler): + def get(self): + files = self.request.get_all(PARAM_FILE) + if not files: + logging.warning("No dashboard file to delete.") + self.response.set_status(400) + return + + for file in files: + DashboardFile.delete_file(file) + + # Display dashboard file list after deleting the file. + self.redirect("/dashboards/") diff --git a/Tools/TestResultServer/handlers/menu.py b/Tools/TestResultServer/handlers/menu.py new file mode 100644 index 0000000..f2f3855 --- /dev/null +++ b/Tools/TestResultServer/handlers/menu.py @@ -0,0 +1,63 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from google.appengine.api import users +from google.appengine.ext import webapp +from google.appengine.ext.webapp import template + +menu = [ + ["List of test files", "/testfile"], + ["List of results.json files", "/testfile?name=results.json"], + ["List of expectations.json files", "/testfile?name=expectations.json"], + ["Upload test file", "/testfile/uploadform"], + ["List of dashboard files", "/dashboards/"], + ["Update dashboard files", "/dashboards/update"], +] + + +class Menu(webapp.RequestHandler): + def get(self): + user = users.get_current_user() + if user: + user_email = user.email() + login_text = "Sign out" + login_url = users.create_logout_url(self.request.uri) + else: + user_email = "" + login_text = "Sign in" + login_url = users.create_login_url(self.request.uri) + + template_values = { + "user_email": user_email, + "login_text": login_text, + "login_url": login_url, + "menu": menu, + } + + self.response.out.write( + template.render("templates/menu.html", template_values)) diff --git a/Tools/TestResultServer/handlers/testfilehandler.py b/Tools/TestResultServer/handlers/testfilehandler.py new file mode 100644 index 0000000..6f0ca44 --- /dev/null +++ b/Tools/TestResultServer/handlers/testfilehandler.py @@ -0,0 +1,230 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import logging +import urllib + +from google.appengine.api import users +from google.appengine.ext import webapp +from google.appengine.ext.webapp import template + +from model.jsonresults import JsonResults +from model.testfile import TestFile + +PARAM_MASTER = "master" +PARAM_BUILDER = "builder" +PARAM_DIR = "dir" +PARAM_FILE = "file" +PARAM_NAME = "name" +PARAM_KEY = "key" +PARAM_TEST_TYPE = "testtype" +PARAM_INCREMENTAL = "incremental" +PARAM_TEST_LIST_JSON = "testlistjson" + + +class DeleteFile(webapp.RequestHandler): + """Delete test file for a given builder and name from datastore.""" + + def get(self): + key = self.request.get(PARAM_KEY) + master = self.request.get(PARAM_MASTER) + builder = self.request.get(PARAM_BUILDER) + test_type = self.request.get(PARAM_TEST_TYPE) + name = self.request.get(PARAM_NAME) + + logging.debug( + "Deleting File, master: %s, builder: %s, test_type: %s, name: %s, key: %s.", + master, builder, test_type, name, key) + + TestFile.delete_file(key, master, builder, test_type, name, 100) + + # Display file list after deleting the file. + self.redirect("/testfile?master=%s&builder=%s&testtype=%s&name=%s" + % (master, builder, test_type, name)) + + +class GetFile(webapp.RequestHandler): + """Get file content or list of files for given builder and name.""" + + def _get_file_list(self, master, builder, test_type, name): + """Get and display a list of files that matches builder and file name. + + Args: + builder: builder name + test_type: type of the test + name: file name + """ + + files = TestFile.get_files( + master, builder, test_type, name, load_data=False, limit=100) + if not files: + logging.info("File not found, master: %s, builder: %s, test_type: %s, name: %s.", + master, builder, test_type, name) + self.response.out.write("File not found") + return + + template_values = { + "admin": users.is_current_user_admin(), + "master": master, + "builder": builder, + "test_type": test_type, + "name": name, + "files": files, + } + self.response.out.write(template.render("templates/showfilelist.html", + template_values)) + + def _get_file_content(self, master, builder, test_type, name): + """Return content of the file that matches builder and file name. + + Args: + builder: builder name + test_type: type of the test + name: file name + """ + + files = TestFile.get_files( + master, builder, test_type, name, load_data=True, limit=1) + if not files: + logging.info("File not found, master %s, builder: %s, test_type: %s, name: %s.", + master, builder, test_type, name) + return None + + return files[0].data + + def _get_test_list_json(self, master, builder, test_type): + """Return json file with test name list only, do not include test + results and other non-test-data . + + Args: + builder: builder name. + test_type: type of test results. + """ + + json = self._get_file_content(master, builder, test_type, "results.json") + if not json: + return None + + return JsonResults.get_test_list(builder, json) + + def get(self): + master = self.request.get(PARAM_MASTER) + builder = self.request.get(PARAM_BUILDER) + test_type = self.request.get(PARAM_TEST_TYPE) + name = self.request.get(PARAM_NAME) + dir = self.request.get(PARAM_DIR) + test_list_json = self.request.get(PARAM_TEST_LIST_JSON) + + logging.debug( + "Getting files, master %s, builder: %s, test_type: %s, name: %s.", + master, builder, test_type, name) + + # If parameter "dir" is specified or there is no builder or filename + # specified in the request, return list of files, otherwise, return + # file content. + if dir or not builder or not name: + return self._get_file_list(master, builder, test_type, name) + + if name == "results.json" and test_list_json: + json = self._get_test_list_json(master, builder, test_type) + else: + json = self._get_file_content(master, builder, test_type, name) + + if json: + self.response.headers["Content-Type"] = "text/plain; charset=utf-8" + self.response.out.write(json) + else: + self.error(404) + +class Upload(webapp.RequestHandler): + """Upload test results file to datastore.""" + + def post(self): + file_params = self.request.POST.getall(PARAM_FILE) + if not file_params: + self.response.out.write("FAIL: missing upload file field.") + return + + builder = self.request.get(PARAM_BUILDER) + if not builder: + self.response.out.write("FAIL: missing builder parameter.") + return + + master = self.request.get(PARAM_MASTER) + test_type = self.request.get(PARAM_TEST_TYPE) + incremental = self.request.get(PARAM_INCREMENTAL) + + logging.debug( + "Processing upload request, master: %s, builder: %s, test_type: %s.", + master, builder, test_type) + + # There are two possible types of each file_params in the request: + # one file item or a list of file items. + # Normalize file_params to a file item list. + files = [] + logging.debug("test: %s, type:%s", file_params, type(file_params)) + for item in file_params: + if not isinstance(item, list) and not isinstance(item, tuple): + item = [item] + files.extend(item) + + errors = [] + for file in files: + filename = file.filename.lower() + if ((incremental and filename == "results.json") or + (filename == "incremental_results.json")): + # Merge incremental json results. + update_succeeded = JsonResults.update(master, builder, test_type, file.value) + else: + update_succeeded = TestFile.update( + master, builder, test_type, file.filename, file.value) + + if not update_succeeded: + errors.append( + "Upload failed, master: %s, builder: %s, test_type: %s, name: %s." % + (master, builder, test_type, file.filename)) + + if errors: + messages = "FAIL: " + "; ".join(errors) + logging.warning(messages) + self.response.set_status(500, messages) + self.response.out.write("FAIL") + else: + self.response.set_status(200) + self.response.out.write("OK") + + +class UploadForm(webapp.RequestHandler): + """Show a form so user can upload a file.""" + + def get(self): + template_values = { + "upload_url": "/testfile/upload", + } + self.response.out.write(template.render("templates/uploadform.html", + template_values)) diff --git a/Tools/TestResultServer/index.yaml b/Tools/TestResultServer/index.yaml new file mode 100644 index 0000000..a7d3e48 --- /dev/null +++ b/Tools/TestResultServer/index.yaml @@ -0,0 +1,65 @@ +indexes: + +# AUTOGENERATED + +# This index.yaml is automatically updated whenever the dev_appserver +# detects that a new type of query is run. If you want to manage the +# index.yaml file manually, remove the above marker line (the line +# saying "# AUTOGENERATED"). If you want to manage some indexes +# manually, move them above the marker line. The index.yaml file is +# automatically uploaded to the admin console when you next deploy +# your application using appcfg.py. + +- kind: DashboardFile + properties: + - name: name + - name: date + direction: desc + +- kind: TestFile + properties: + - name: builder + - name: date + direction: desc + +- kind: TestFile + properties: + - name: builder + - name: master + - name: name + - name: test_type + - name: date + direction: desc + +- kind: TestFile + properties: + - name: builder + - name: name + - name: date + direction: desc + +- kind: TestFile + properties: + - name: builder + - name: name + - name: test_type + - name: date + direction: desc + +- kind: TestFile + properties: + - name: master + - name: date + direction: desc + +- kind: TestFile + properties: + - name: name + - name: date + direction: desc + +- kind: TestFile + properties: + - name: test_type + - name: date + direction: desc diff --git a/Tools/TestResultServer/main.py b/Tools/TestResultServer/main.py new file mode 100644 index 0000000..aa6e432 --- /dev/null +++ b/Tools/TestResultServer/main.py @@ -0,0 +1,58 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Request a modern Django +from google.appengine.dist import use_library +use_library('django', '1.1') + +from google.appengine.ext import webapp +from google.appengine.ext.webapp.util import run_wsgi_app + +from handlers import dashboardhandler +from handlers import menu +from handlers import testfilehandler + +routes = [ + ('/dashboards/delete', dashboardhandler.DeleteDashboardFile), + ('/dashboards/update', dashboardhandler.UpdateDashboardFile), + ('/dashboards/([^?]+)?', dashboardhandler.GetDashboardFile), + ('/testfile/delete', testfilehandler.DeleteFile), + ('/testfile/upload', testfilehandler.Upload), + ('/testfile/uploadform', testfilehandler.UploadForm), + ('/testfile/?', testfilehandler.GetFile), + ('/*|/menu', menu.Menu), +] + +application = webapp.WSGIApplication(routes, debug=True) + + +def main(): + run_wsgi_app(application) + +if __name__ == "__main__": + main() diff --git a/Tools/TestResultServer/model/__init__.py b/Tools/TestResultServer/model/__init__.py new file mode 100644 index 0000000..ef65bee --- /dev/null +++ b/Tools/TestResultServer/model/__init__.py @@ -0,0 +1 @@ +# Required for Python to search this directory for module files diff --git a/Tools/TestResultServer/model/dashboardfile.py b/Tools/TestResultServer/model/dashboardfile.py new file mode 100644 index 0000000..aad6d50 --- /dev/null +++ b/Tools/TestResultServer/model/dashboardfile.py @@ -0,0 +1,119 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from datetime import datetime +import logging +import urllib +import urllib2 + +from google.appengine.ext import db + +SVN_PATH_DASHBOARD = ("http://src.chromium.org/viewvc/chrome/trunk/tools/" + "dashboards/") + + +class DashboardFile(db.Model): + name = db.StringProperty() + data = db.BlobProperty() + date = db.DateTimeProperty(auto_now_add=True) + + @classmethod + def get_files(cls, name, limit=1): + query = DashboardFile.all() + if name: + query = query.filter("name =", name) + return query.order("-date").fetch(limit) + + @classmethod + def add_file(cls, name, data): + file = DashboardFile() + file.name = name + file.data = db.Blob(data) + file.put() + + logging.debug("Dashboard file saved, name: %s.", name) + + return file + + @classmethod + def grab_file_from_svn(cls, name): + logging.debug("Grab file from SVN, name: %s.", name) + + url = SVN_PATH_DASHBOARD + urllib.quote_plus(name) + + logging.info("Grab file from SVN, url: %s.", url) + try: + file = urllib2.urlopen(url) + if not file: + logging.error("Failed to grab dashboard file: %s.", url) + return None + + return file.read() + except urllib2.HTTPError, e: + logging.error("Failed to grab dashboard file: %s", str(e)) + except urllib2.URLError, e: + logging.error("Failed to grab dashboard file: %s", str(e)) + + return None + + @classmethod + def update_file(cls, name): + data = cls.grab_file_from_svn(name) + if not data: + return False + + logging.info("Got file from SVN.") + + files = cls.get_files(name) + if not files: + logging.info("No existing file, added as new file.") + if cls.add_file(name, data): + return True + return False + + logging.debug("Updating existing file.") + file = files[0] + file.data = data + file.date = datetime.now() + file.put() + + logging.info("Dashboard file replaced, name: %s.", name) + + return True + + @classmethod + def delete_file(cls, name): + files = cls.get_files(name) + if not files: + logging.warning("File not found, name: %s.", name) + return False + + for file in files: + file.delete() + + return True diff --git a/Tools/TestResultServer/model/datastorefile.py b/Tools/TestResultServer/model/datastorefile.py new file mode 100755 index 0000000..ac28d64 --- /dev/null +++ b/Tools/TestResultServer/model/datastorefile.py @@ -0,0 +1,150 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from datetime import datetime +import logging + +from google.appengine.ext import db + +MAX_DATA_ENTRY_PER_FILE = 10 +MAX_ENTRY_LEN = 1000 * 1000 + + +class DataEntry(db.Model): + """Datastore entry that stores one segmant of file data + (<1000*1000 bytes). + """ + + data = db.BlobProperty() + + @classmethod + def get(cls, key): + return db.get(key) + + def get_data(self, key): + return db.get(key) + + +class DataStoreFile(db.Model): + """This class stores file in datastore. + If a file is oversize (>1000*1000 bytes), the file is split into + multiple segments and stored in multiple datastore entries. + """ + + name = db.StringProperty() + data_keys = db.ListProperty(db.Key) + # keys to the data store entries that can be reused for new data. + # If it is emtpy, create new DataEntry. + new_data_keys = db.ListProperty(db.Key) + date = db.DateTimeProperty(auto_now_add=True) + + data = None + + def delete_data(self, keys=None): + if not keys: + keys = self.data_keys + + for key in keys: + data_entry = DataEntry.get(key) + if data_entry: + data_entry.delete() + + def save_data(self, data): + if not data: + logging.warning("No data to save.") + return False + + if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN): + logging.error("File too big, can't save to datastore: %dK", + len(data) / 1024) + return False + + start = 0 + # Use the new_data_keys to store new data. If all new data are saved + # successfully, swap new_data_keys and data_keys so we can reuse the + # data_keys entries in next run. If unable to save new data for any + # reason, only the data pointed by new_data_keys may be corrupted, + # the existing data_keys data remains untouched. The corrupted data + # in new_data_keys will be overwritten in next update. + keys = self.new_data_keys + self.new_data_keys = [] + + while start < len(data): + if keys: + key = keys[0] + data_entry = DataEntry.get(key) + if not data_entry: + logging.warning("Found key, but no data entry: %s", key) + data_entry = DataEntry() + else: + data_entry = DataEntry() + + data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN]) + try: + data_entry.put() + except Exception, err: + logging.error("Failed to save data store entry: %s", err) + if keys: + self.delete_data(keys) + return False + + logging.info("Data saved: %s.", data_entry.key()) + self.new_data_keys.append(data_entry.key()) + if keys: + keys.pop(0) + + start = start + MAX_ENTRY_LEN + + if keys: + self.delete_data(keys) + + temp_keys = self.data_keys + self.data_keys = self.new_data_keys + self.new_data_keys = temp_keys + self.data = data + + return True + + def load_data(self): + if not self.data_keys: + logging.warning("No data to load.") + return None + + data = [] + for key in self.data_keys: + logging.info("Loading data for key: %s.", key) + data_entry = DataEntry.get(key) + if not data_entry: + logging.error("No data found for key: %s.", key) + return None + + data.append(data_entry.data) + + self.data = "".join(data) + + return self.data diff --git a/Tools/TestResultServer/model/jsonresults.py b/Tools/TestResultServer/model/jsonresults.py new file mode 100755 index 0000000..f5a0fde --- /dev/null +++ b/Tools/TestResultServer/model/jsonresults.py @@ -0,0 +1,466 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from datetime import datetime +from django.utils import simplejson +import logging + +from model.testfile import TestFile + +JSON_RESULTS_FILE = "results.json" +JSON_RESULTS_FILE_SMALL = "results-small.json" +JSON_RESULTS_PREFIX = "ADD_RESULTS(" +JSON_RESULTS_SUFFIX = ");" +JSON_RESULTS_VERSION_KEY = "version" +JSON_RESULTS_BUILD_NUMBERS = "buildNumbers" +JSON_RESULTS_TESTS = "tests" +JSON_RESULTS_RESULTS = "results" +JSON_RESULTS_TIMES = "times" +JSON_RESULTS_PASS = "P" +JSON_RESULTS_NO_DATA = "N" +JSON_RESULTS_MIN_TIME = 1 +JSON_RESULTS_VERSION = 3 +JSON_RESULTS_MAX_BUILDS = 750 +JSON_RESULTS_MAX_BUILDS_SMALL = 200 + + +class JsonResults(object): + @classmethod + def _strip_prefix_suffix(cls, data): + """Strip out prefix and suffix of json results string. + + Args: + data: json file content. + + Returns: + json string without prefix and suffix. + """ + + assert(data.startswith(JSON_RESULTS_PREFIX)) + assert(data.endswith(JSON_RESULTS_SUFFIX)) + + return data[len(JSON_RESULTS_PREFIX): + len(data) - len(JSON_RESULTS_SUFFIX)] + + @classmethod + def _generate_file_data(cls, json, sort_keys=False): + """Given json string, generate file content data by adding + prefix and suffix. + + Args: + json: json string without prefix and suffix. + + Returns: + json file data. + """ + + data = simplejson.dumps(json, separators=(',', ':'), + sort_keys=sort_keys) + return JSON_RESULTS_PREFIX + data + JSON_RESULTS_SUFFIX + + @classmethod + def _load_json(cls, file_data): + """Load json file to a python object. + + Args: + file_data: json file content. + + Returns: + json object or + None on failure. + """ + + json_results_str = cls._strip_prefix_suffix(file_data) + if not json_results_str: + logging.warning("No json results data.") + return None + + try: + return simplejson.loads(json_results_str) + except Exception, err: + logging.debug(json_results_str) + logging.error("Failed to load json results: %s", str(err)) + return None + + @classmethod + def _merge_json(cls, aggregated_json, incremental_json, num_runs): + """Merge incremental json into aggregated json results. + + Args: + aggregated_json: aggregated json object. + incremental_json: incremental json object. + + Returns: + True if merge succeeds or + False on failure. + """ + + # Merge non tests property data. + # Tests properties are merged in _merge_tests. + if not cls._merge_non_test_data(aggregated_json, incremental_json, num_runs): + return False + + # Merge tests results and times + incremental_tests = incremental_json[JSON_RESULTS_TESTS] + if incremental_tests: + aggregated_tests = aggregated_json[JSON_RESULTS_TESTS] + cls._merge_tests(aggregated_tests, incremental_tests, num_runs) + + return True + + @classmethod + def _merge_non_test_data(cls, aggregated_json, incremental_json, num_runs): + """Merge incremental non tests property data into aggregated json results. + + Args: + aggregated_json: aggregated json object. + incremental_json: incremental json object. + + Returns: + True if merge succeeds or + False on failure. + """ + + incremental_builds = incremental_json[JSON_RESULTS_BUILD_NUMBERS] + aggregated_builds = aggregated_json[JSON_RESULTS_BUILD_NUMBERS] + aggregated_build_number = int(aggregated_builds[0]) + # Loop through all incremental builds, start from the oldest run. + for index in reversed(range(len(incremental_builds))): + build_number = int(incremental_builds[index]) + logging.debug("Merging build %s, incremental json index: %d.", + build_number, index) + + # Return if not all build numbers in the incremental json results + # are newer than the most recent build in the aggregated results. + # FIXME: make this case work. + if build_number < aggregated_build_number: + logging.warning(("Build %d in incremental json is older than " + "the most recent build in aggregated results: %d"), + build_number, aggregated_build_number) + return False + + # Return if the build number is duplicated. + # FIXME: skip the duplicated build and merge rest of the results. + # Need to be careful on skiping the corresponding value in + # _merge_tests because the property data for each test could + # be accumulated. + if build_number == aggregated_build_number: + logging.warning("Duplicate build %d in incremental json", + build_number) + return False + + # Merge this build into aggreagated results. + cls._merge_one_build(aggregated_json, incremental_json, index, num_runs) + + return True + + @classmethod + def _merge_one_build(cls, aggregated_json, incremental_json, + incremental_index, num_runs): + """Merge one build of incremental json into aggregated json results. + + Args: + aggregated_json: aggregated json object. + incremental_json: incremental json object. + incremental_index: index of the incremental json results to merge. + """ + + for key in incremental_json.keys(): + # Merge json results except "tests" properties (results, times etc). + # "tests" properties will be handled separately. + if key == JSON_RESULTS_TESTS: + continue + + if key in aggregated_json: + aggregated_json[key].insert( + 0, incremental_json[key][incremental_index]) + aggregated_json[key] = \ + aggregated_json[key][:num_runs] + else: + aggregated_json[key] = incremental_json[key] + + @classmethod + def _merge_tests(cls, aggregated_json, incremental_json, num_runs): + """Merge "tests" properties:results, times. + + Args: + aggregated_json: aggregated json object. + incremental_json: incremental json object. + """ + + all_tests = (set(aggregated_json.iterkeys()) | + set(incremental_json.iterkeys())) + for test_name in all_tests: + if test_name in aggregated_json: + aggregated_test = aggregated_json[test_name] + if test_name in incremental_json: + incremental_test = incremental_json[test_name] + results = incremental_test[JSON_RESULTS_RESULTS] + times = incremental_test[JSON_RESULTS_TIMES] + else: + results = [[1, JSON_RESULTS_NO_DATA]] + times = [[1, 0]] + + cls._insert_item_run_length_encoded( + results, aggregated_test[JSON_RESULTS_RESULTS], num_runs) + cls._insert_item_run_length_encoded( + times, aggregated_test[JSON_RESULTS_TIMES], num_runs) + cls._normalize_results_json(test_name, aggregated_json) + else: + aggregated_json[test_name] = incremental_json[test_name] + + @classmethod + def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item, num_runs): + """Inserts the incremental run-length encoded results into the aggregated + run-length encoded results. + + Args: + incremental_item: incremental run-length encoded results. + aggregated_item: aggregated run-length encoded results. + """ + + for item in incremental_item: + if len(aggregated_item) and item[1] == aggregated_item[0][1]: + aggregated_item[0][0] = min( + aggregated_item[0][0] + item[0], num_runs) + else: + aggregated_item.insert(0, item) + + @classmethod + def _normalize_results_json(cls, test_name, aggregated_json): + """ Prune tests where all runs pass or tests that no longer exist and + truncate all results to JSON_RESULTS_MAX_BUILDS. + + Args: + test_name: Name of the test. + aggregated_json: The JSON object with all the test results for + this builder. + """ + + aggregated_test = aggregated_json[test_name] + aggregated_test[JSON_RESULTS_RESULTS] = \ + cls._remove_items_over_max_number_of_builds( + aggregated_test[JSON_RESULTS_RESULTS]) + aggregated_test[JSON_RESULTS_TIMES] = \ + cls._remove_items_over_max_number_of_builds( + aggregated_test[JSON_RESULTS_TIMES]) + + is_all_pass = cls._is_results_all_of_type( + aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_PASS) + is_all_no_data = cls._is_results_all_of_type( + aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_NO_DATA) + + max_time = max( + [time[1] for time in aggregated_test[JSON_RESULTS_TIMES]]) + # Remove all passes/no-data from the results to reduce noise and + # filesize. If a test passes every run, but + # takes >= JSON_RESULTS_MIN_TIME to run, don't throw away the data. + if (is_all_no_data or + (is_all_pass and max_time < JSON_RESULTS_MIN_TIME)): + del aggregated_json[test_name] + + @classmethod + def _remove_items_over_max_number_of_builds(cls, encoded_list): + """Removes items from the run-length encoded list after the final + item that exceeds the max number of builds to track. + + Args: + encoded_results: run-length encoded results. An array of arrays, e.g. + [[3,'A'],[1,'Q']] encodes AAAQ. + """ + num_builds = 0 + index = 0 + for result in encoded_list: + num_builds = num_builds + result[0] + index = index + 1 + if num_builds > JSON_RESULTS_MAX_BUILDS: + return encoded_list[:index] + + return encoded_list + + @classmethod + def _is_results_all_of_type(cls, results, type): + """Returns whether all the results are of the given type + (e.g. all passes). + """ + + return len(results) == 1 and results[0][1] == type + + @classmethod + def _check_json(cls, builder, json): + """Check whether the given json is valid. + + Args: + builder: builder name this json is for. + json: json object to check. + + Returns: + True if the json is valid or + False otherwise. + """ + + version = json[JSON_RESULTS_VERSION_KEY] + if version > JSON_RESULTS_VERSION: + logging.error("Results JSON version '%s' is not supported.", + version) + return False + + if not builder in json: + logging.error("Builder '%s' is not in json results.", builder) + return False + + results_for_builder = json[builder] + if not JSON_RESULTS_BUILD_NUMBERS in results_for_builder: + logging.error("Missing build number in json results.") + return False + + return True + + @classmethod + def merge(cls, builder, aggregated, incremental, num_runs, sort_keys=False): + """Merge incremental json file data with aggregated json file data. + + Args: + builder: builder name. + aggregated: aggregated json file data. + incremental: incremental json file data. + sort_key: whether or not to sort key when dumping json results. + + Returns: + Merged json file data if merge succeeds or + None on failure. + """ + + if not incremental: + logging.warning("Nothing to merge.") + return None + + logging.info("Loading incremental json...") + incremental_json = cls._load_json(incremental) + if not incremental_json: + return None + + logging.info("Checking incremental json...") + if not cls._check_json(builder, incremental_json): + return None + + logging.info("Loading existing aggregated json...") + aggregated_json = cls._load_json(aggregated) + if not aggregated_json: + return incremental + + logging.info("Checking existing aggregated json...") + if not cls._check_json(builder, aggregated_json): + return incremental + + logging.info("Merging json results...") + try: + if not cls._merge_json(aggregated_json[builder], incremental_json[builder], num_runs): + return None + except Exception, err: + logging.error("Failed to merge json results: %s", str(err)) + return None + + aggregated_json[JSON_RESULTS_VERSION_KEY] = JSON_RESULTS_VERSION + + return cls._generate_file_data(aggregated_json, sort_keys) + + @classmethod + def update(cls, master, builder, test_type, incremental): + """Update datastore json file data by merging it with incremental json + file. Writes the large file and a small file. The small file just stores + fewer runs. + + Args: + master: master name. + builder: builder name. + test_type: type of test results. + incremental: incremental json file data to merge. + + Returns: + Large TestFile object if update succeeds or + None on failure. + """ + small_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE_SMALL, JSON_RESULTS_MAX_BUILDS_SMALL) + large_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE, JSON_RESULTS_MAX_BUILDS) + + return small_file_updated and large_file_updated + + @classmethod + def update_file(cls, master, builder, test_type, incremental, filename, num_runs): + files = TestFile.get_files(master, builder, test_type, filename) + if files: + file = files[0] + new_results = cls.merge(builder, file.data, incremental, num_runs) + else: + # Use the incremental data if there is no aggregated file to merge. + file = TestFile() + file.master = master + file.builder = builder + file.test_type = test_type + file.name = filename + new_results = incremental + logging.info("No existing json results, incremental json is saved.") + + if not new_results or not file.save(new_results): + logging.info( + "Update failed, master: %s, builder: %s, test_type: %s, name: %s." % + (master, builder, test_type, filename)) + return False + + return True + + @classmethod + def get_test_list(cls, builder, json_file_data): + """Get list of test names from aggregated json file data. + + Args: + json_file_data: json file data that has all test-data and + non-test-data. + + Returns: + json file with test name list only. The json format is the same + as the one saved in datastore, but all non-test-data and test detail + results are removed. + """ + + logging.debug("Loading test results json...") + json = cls._load_json(json_file_data) + if not json: + return None + + logging.debug("Checking test results json...") + if not cls._check_json(builder, json): + return None + + test_list_json = {} + tests = json[builder][JSON_RESULTS_TESTS] + test_list_json[builder] = { + "tests": dict.fromkeys(tests, {})} + + return cls._generate_file_data(test_list_json) diff --git a/Tools/TestResultServer/model/jsonresults_unittest.py b/Tools/TestResultServer/model/jsonresults_unittest.py new file mode 100755 index 0000000..c70b90c --- /dev/null +++ b/Tools/TestResultServer/model/jsonresults_unittest.py @@ -0,0 +1,322 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +try: + import jsonresults + from jsonresults import JsonResults +except ImportError: + print "ERROR: Add the TestResultServer, google_appengine and yaml/lib directories to your PYTHONPATH" + +import unittest + + +JSON_RESULTS_TEMPLATE = ( + '{"Webkit":{' + '"allFixableCount":[[TESTDATA_COUNT]],' + '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],' + '"chromeRevision":[[TESTDATA_CHROMEREVISION]],' + '"deferredCounts":[[TESTDATA_COUNTS]],' + '"fixableCount":[[TESTDATA_COUNT]],' + '"fixableCounts":[[TESTDATA_COUNTS]],' + '"secondsSinceEpoch":[[TESTDATA_TIMES]],' + '"tests":{[TESTDATA_TESTS]},' + '"webkitRevision":[[TESTDATA_WEBKITREVISION]],' + '"wontfixCounts":[[TESTDATA_COUNTS]]' + '},' + '"version":3' + '}') + +JSON_RESULTS_COUNTS_TEMPLATE = ( + '{' + '"C":[TESTDATA],' + '"F":[TESTDATA],' + '"I":[TESTDATA],' + '"O":[TESTDATA],' + '"P":[TESTDATA],' + '"T":[TESTDATA],' + '"X":[TESTDATA],' + '"Z":[TESTDATA]}') + +JSON_RESULTS_TESTS_TEMPLATE = ( + '"[TESTDATA_TEST_NAME]":{' + '"results":[[TESTDATA_TEST_RESULTS]],' + '"times":[[TESTDATA_TEST_TIMES]]}') + +JSON_RESULTS_PREFIX = "ADD_RESULTS(" +JSON_RESULTS_SUFFIX = ");" + +JSON_RESULTS_TEST_LIST_TEMPLATE = ( + '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}') + + +class JsonResultsTest(unittest.TestCase): + def setUp(self): + self._builder = "Webkit" + + def _make_test_json(self, test_data): + if not test_data: + return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX + + (builds, tests) = test_data + if not builds or not tests: + return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX + + json = JSON_RESULTS_TEMPLATE + + counts = [] + build_numbers = [] + webkit_revision = [] + chrome_revision = [] + times = [] + for build in builds: + counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build)) + build_numbers.append("1000%s" % build) + webkit_revision.append("2000%s" % build) + chrome_revision.append("3000%s" % build) + times.append("100000%s000" % build) + + json = json.replace("[TESTDATA_COUNTS]", ",".join(counts)) + json = json.replace("[TESTDATA_COUNT]", ",".join(builds)) + json = json.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers)) + json = json.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision)) + json = json.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision)) + json = json.replace("[TESTDATA_TIMES]", ",".join(times)) + + json_tests = [] + for test in tests: + t = JSON_RESULTS_TESTS_TEMPLATE.replace("[TESTDATA_TEST_NAME]", test[0]) + t = t.replace("[TESTDATA_TEST_RESULTS]", test[1]) + t = t.replace("[TESTDATA_TEST_TIMES]", test[2]) + json_tests.append(t) + + json = json.replace("[TESTDATA_TESTS]", ",".join(json_tests)) + + return JSON_RESULTS_PREFIX + json + JSON_RESULTS_SUFFIX + + def _test_merge(self, aggregated_data, incremental_data, expected_data): + aggregated_results = self._make_test_json(aggregated_data) + incremental_results = self._make_test_json(incremental_data) + merged_results = JsonResults.merge(self._builder, + aggregated_results, incremental_results, jsonresults.JSON_RESULTS_MAX_BUILDS, + sort_keys=True) + + if expected_data: + expected_results = self._make_test_json(expected_data) + self.assertEquals(merged_results, expected_results) + else: + self.assertFalse(merged_results) + + def _test_get_test_list(self, input_data, expected_data): + input_results = self._make_test_json(input_data) + + json_tests = [] + for test in expected_data: + json_tests.append("\"" + test + "\":{}") + + expected_results = JSON_RESULTS_PREFIX + \ + JSON_RESULTS_TEST_LIST_TEMPLATE.replace( + "[TESTDATA_TESTS]", ",".join(json_tests)) + \ + JSON_RESULTS_SUFFIX + + actual_results = JsonResults.get_test_list(self._builder, input_results) + self.assertEquals(actual_results, expected_results) + + def test(self): + # Empty incremental results json. + # Nothing to merge. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Incremental results + None, + # Expect no merge happens. + None) + + # No actual incremental test results (only prefix and suffix) to merge. + # Nothing to merge. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Incremental results + ([], []), + # Expected no merge happens. + None) + + # No existing aggregated results. + # Merged results == new incremental results. + self._test_merge( + # Aggregated results + None, + # Incremental results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Expected results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]])) + + # Single test for single run. + # Incremental results has the latest build and same test results for + # that run. + # Insert the incremental results at the first place and sum number + # of runs for "P" (200 + 1) to get merged results. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Incremental results + (["3"], [["001.html", "[1,\"F\"]", "[1,0]"]]), + # Expected results + (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"]])) + + # Single test for single run. + # Incremental results has the latest build but different test results + # for that run. + # Insert the incremental results at the first place. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Incremental results + (["3"], [["001.html", "[1, \"I\"]", "[1,1]"]]), + # Expected results + (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"]", "[1,1],[200,0]"]])) + + # Single test for single run. + # Incremental results has the latest build but different test results + # for that run. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"],[10,\"I\"]", "[200,0],[10,1]"]]), + # Incremental results + (["3"], [["001.html", "[1,\"I\"]", "[1,1]"]]), + # Expected results + (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"],[10,\"I\"]", "[1,1],[200,0],[10,1]"]])) + + # Multiple tests for single run. + # All tests have incremental updates. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]), + # Incremental results + (["3"], [["001.html", "[1,\"F\"]", "[1,0]"], ["002.html", "[1,\"I\"]", "[1,1]"]]), + # Expected results + (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]])) + + # Multiple tests for single run. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]), + # Incremental results + (["3"], [["002.html", "[1,\"I\"]", "[1,1]"]]), + # Expected results + (["3", "2", "1"], [["001.html", "[1,\"N\"],[200,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]])) + + # Single test for multiple runs. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Incremental results + (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"]]), + # Expected results + (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"]])) + + # Multiple tests for multiple runs. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[10,\"Z\"]", "[10,0]"]]), + # Incremental results + (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"], ["002.html", "[1,\"C\"]", "[1,1]"]]), + # Expected results + (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,1],[10,0]"]])) + + # Test the build in incremental results is older than the most recent + # build in aggregated results. + # The incremental results should be dropped and no merge happens. + self._test_merge( + # Aggregated results + (["3", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Incremental results + (["2"], [["001.html", "[1, \"F\"]", "[1,0]"]]), + # Expected no merge happens. + None) + + # Test the build in incremental results is same as the build in + # aggregated results. + # The incremental results should be dropped and no merge happens. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), + # Incremental results + (["3", "2"], [["001.html", "[2, \"F\"]", "[2,0]"]]), + # Expected no merge happens. + None) + + # Remove test where there is no data in all runs. + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"N\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), + # Incremental results + (["3"], [["001.html", "[1,\"N\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]), + # Expected results + (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]])) + + # Remove test where all run pass and max running time < 1 seconds + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), + # Incremental results + (["3"], [["001.html", "[1,\"P\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]), + # Expected results + (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]])) + + # Do not remove test where all run pass but max running time >= 1 seconds + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), + # Incremental results + (["3"], [["001.html", "[1,\"P\"]", "[1,1]"], ["002.html", "[1,\"P\"]", "[1,0]"]]), + # Expected results + (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[1,1],[200,0]"], ["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]])) + + # Remove items from test results and times that exceeds the max number + # of builds to track. + max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS) + self._test_merge( + # Aggregated results + (["2", "1"], [["001.html", "[" + max_builds + ",\"F\"],[1,\"I\"]", "[" + max_builds + ",0],[1,1]"]]), + # Incremental results + (["3"], [["001.html", "[1,\"T\"]", "[1,1]"]]), + # Expected results + (["3", "2", "1"], [["001.html", "[1,\"T\"],[" + max_builds + ",\"F\"]", "[1,1],[" + max_builds + ",0]"]])) + + # Get test name list only. Don't include non-test-list data and + # of test result details. + self._test_get_test_list( + # Input results + (["3", "2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), + # Expected results + ["001.html", "002.html"]) + +if __name__ == '__main__': + unittest.main() diff --git a/Tools/TestResultServer/model/testfile.py b/Tools/TestResultServer/model/testfile.py new file mode 100644 index 0000000..e600c99 --- /dev/null +++ b/Tools/TestResultServer/model/testfile.py @@ -0,0 +1,127 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from datetime import datetime +import logging + +from google.appengine.ext import db + +from model.datastorefile import DataStoreFile + + +class TestFile(DataStoreFile): + master = db.StringProperty() + builder = db.StringProperty() + test_type = db.StringProperty() + + @classmethod + def delete_file(cls, key, master, builder, test_type, name, limit): + if key: + file = db.get(key) + if not file: + logging.warning("File not found, key: %s.", key) + return False + + file._delete_all() + else: + files = cls.get_files(master, builder, test_type, name, limit) + if not files: + logging.warning( + "File not found, master: %s, builder: %s, test_type:%s, name: %s.", + builder, test_type, name) + return False + + for file in files: + file._delete_all() + + return True + + @classmethod + def get_files(cls, master, builder, test_type, name, load_data=True, limit=1): + query = TestFile.all() + if master: + query = query.filter("master =", master) + if builder: + query = query.filter("builder =", builder) + if test_type: + query = query.filter("test_type =", test_type) + if name: + query = query.filter("name =", name) + + files = query.order("-date").fetch(limit) + if load_data: + for file in files: + file.load_data() + + return files + + @classmethod + def add_file(cls, master, builder, test_type, name, data): + file = TestFile() + file.master = master + file.builder = builder + file.test_type = test_type + file.name = name + + if not file.save(data): + return None + + logging.info( + "File saved, master: %s, builder: %s, test_type: %s, name: %s, key: %s.", + master, builder, test_type, file.name, str(file.data_keys)) + + return file + + @classmethod + def update(cls, master, builder, test_type, name, data): + files = cls.get_files(master, builder, test_type, name) + if not files: + return cls.add_file(master, builder, test_type, name, data) + + file = files[0] + if not file.save(data): + return None + + logging.info( + "File replaced, master: %s, builder: %s, test_type: %s, name: %s, data key: %s.", + master, builder, test_type, file.name, str(file.data_keys)) + + return file + + def save(self, data): + if not self.save_data(data): + return False + + self.date = datetime.now() + self.put() + + return True + + def _delete_all(self): + self.delete_data() + self.delete() diff --git a/Tools/TestResultServer/stylesheets/dashboardfile.css b/Tools/TestResultServer/stylesheets/dashboardfile.css new file mode 100644 index 0000000..1b0921c --- /dev/null +++ b/Tools/TestResultServer/stylesheets/dashboardfile.css @@ -0,0 +1,30 @@ +body { + font-family: Verdana, Helvetica, sans-serif; + padding: 0px; + color: #444; +} +h1 { + color: #444; + font-size: 14pt; + font-style: italic; + margin: 0px; + padding: 5px; +} +table { + border-spacing: 0px; +} +th { + background-color: #AAA; + color: white; + text-align: left; + padding: 5px; + font-size: 12pt; +} +td { + font-size: 11pt; + padding: 3px; + text-align: left; +} +tr:hover { + background-color: #EEE; +} diff --git a/Tools/TestResultServer/stylesheets/form.css b/Tools/TestResultServer/stylesheets/form.css new file mode 100644 index 0000000..b8f367d --- /dev/null +++ b/Tools/TestResultServer/stylesheets/form.css @@ -0,0 +1,26 @@ +body { + font-family: Verdana; + padding: 0px; + color: #444; +} +h1 { + color: #444; + font-size: 14pt; + font-style: italic; + margin: 0px; + padding: 5px; +} +.label { + margin: 1px; + padding: 5px; + font-size: 11pt; + width: 90px; +} +.inputtext { + font-size: 11pt; +} +.button { + margin: 1px; + padding: 1px; + font-size: 11pt; +} diff --git a/Tools/TestResultServer/stylesheets/menu.css b/Tools/TestResultServer/stylesheets/menu.css new file mode 100644 index 0000000..9948605 --- /dev/null +++ b/Tools/TestResultServer/stylesheets/menu.css @@ -0,0 +1,28 @@ +body { + font-family: Verdana, Helvetica, sans-serif; +} +h1 { + background-color: #EEE; + color: #444; + font-size: 14pt; + font-style: italic; + margin: 0px; + padding: 5px; +} +ul { + margin: 0px; + padding: 20px; + list-style: none; +} +li { + padding: 5px; +} +li:hover { + background-color: #EEE; +} +.login { + font-size: 8pt; + text-align: right; + width: 100%; +} + diff --git a/Tools/TestResultServer/stylesheets/testfile.css b/Tools/TestResultServer/stylesheets/testfile.css new file mode 100644 index 0000000..1b0921c --- /dev/null +++ b/Tools/TestResultServer/stylesheets/testfile.css @@ -0,0 +1,30 @@ +body { + font-family: Verdana, Helvetica, sans-serif; + padding: 0px; + color: #444; +} +h1 { + color: #444; + font-size: 14pt; + font-style: italic; + margin: 0px; + padding: 5px; +} +table { + border-spacing: 0px; +} +th { + background-color: #AAA; + color: white; + text-align: left; + padding: 5px; + font-size: 12pt; +} +td { + font-size: 11pt; + padding: 3px; + text-align: left; +} +tr:hover { + background-color: #EEE; +} diff --git a/Tools/TestResultServer/templates/dashboardfilelist.html b/Tools/TestResultServer/templates/dashboardfilelist.html new file mode 100644 index 0000000..818cb91 --- /dev/null +++ b/Tools/TestResultServer/templates/dashboardfilelist.html @@ -0,0 +1,38 @@ +<!DOCTYPE html> +<html> +<head> +<title>Dashboard Files</title> +<link type="text/css" rel="stylesheet" href="/stylesheets/dashboardfile.css" /> +</head> +<body> +<h1>Dashboard Files +</h1> +<div> + <table> + <tr> + <th>File</th> + <th>Date</th> + {% if admin %} + <th></th> + {% endif %} + {% for file in files %} + <tr>{% if file.name %} + <td><a href="/dashboards/{{ file.name }}" > + {{ file.name }} + </a> + </td> + <td>{{ file.date|date:"d-M-Y H:i:s" }} + </td> + {% if admin %} + <td><a href="/dashboards/delete?file={{ file.name }}" > + Delete + </a> + </td> + {% endif %} + {% endif %} + </tr> + {% endfor %} + </table> +</div> +</body> +</html> diff --git a/Tools/TestResultServer/templates/menu.html b/Tools/TestResultServer/templates/menu.html new file mode 100644 index 0000000..1ad9f4d --- /dev/null +++ b/Tools/TestResultServer/templates/menu.html @@ -0,0 +1,27 @@ +<!DOCTYPE html> +<html> +<head> +<title>Test Result Server</title> +<table class=login> + <tr> + <td> + {% if user_email %} + <span>{{ user_email }}</span> + {% endif %} + <span><a href="{{ login_url }}">{{ login_text }}</a></span> + </td> + </tr> +</table> +<link type="text/css" rel="stylesheet" href="/stylesheets/menu.css" /> +</head> +<body> +<h1>Test Result Server</h1> +<div> + <ul>{% for title,link in menu %} + <li> + <a href="{{ link }}" >{{ title }}</a> + </li>{% endfor %} + </ul> +</div> +</body> +</html> diff --git a/Tools/TestResultServer/templates/showfilelist.html b/Tools/TestResultServer/templates/showfilelist.html new file mode 100644 index 0000000..d292fe2 --- /dev/null +++ b/Tools/TestResultServer/templates/showfilelist.html @@ -0,0 +1,58 @@ +<!DOCTYPE html> +<html> +<head> +<title>Test Results</title> +<link type="text/css" rel="stylesheet" href="/stylesheets/testfile.css" /> +</head> +<body> +<h1>Test Results +{% if builder or test_type or name %} +- {{ builder }} {{test_type }} {{ name }} +{% endif %} +</h1> +<div> + <table> + <tr> + <th>Master</th> + <th>Builder</th> + <th>Test Type</th> + <th>File</th> + <th>Date</th> + {% if admin %} + <th></th> + {% endif %} + {% for file in files %} + <tr>{% if file.builder and file.name %} + <td><a href="/testfile?master={{ file.master }}" > + {{ file.master }} + </a> + </td> + <td><a href="/testfile?builder={{ file.builder }}" > + {{ file.builder }} + </a> + </td> + <td>{% if file.test_type %} + <a href="/testfile?testtype={{ file.test_type }}" > + {{ file.test_type }} + </a> + {% endif %} + </td> + <td><a href="/testfile?builder={{ file.builder }}&name={{ file.name }}" > + {{ file.name }} + </a> + </td> + <td>{{ file.date|date:"d-M-Y H:i:s" }} + </td> + {% if admin %} + <td><a href="/testfile/delete?key={{ file.key }}&builder={{ builder }}&name={{ name }}" > + Delete + </a> + </td> + {% endif %} + {% endif %} + </tr> + {% endfor %} + </table> +</div> +</body> +</html> diff --git a/Tools/TestResultServer/templates/uploadform.html b/Tools/TestResultServer/templates/uploadform.html new file mode 100644 index 0000000..9974a24 --- /dev/null +++ b/Tools/TestResultServer/templates/uploadform.html @@ -0,0 +1,33 @@ +<!DOCTYPE html> +<html> +<head> +<title>Upload Test Result File</title> +<link type="text/css" rel="stylesheet" href="/stylesheets/form.css" /> +</head> +<body> +<h1>Upload Test Result File</h1> +<form id="uploadForm" name="test_result_upload" accept="text/html" action="{{ upload_url }}" enctype="multipart/form-data" method="post"> + <br> + <table> + <tr> + <td class=label><label>Master:</label></td> + <td><input class=inputtext type="text" name="master" placeholder="Chromium"/></td> + </tr> + <tr> + <td class=label><label>Builder:</label></td> + <td><input class=inputtext type="text" name="builder" placeholder="Webkit"/></td> + </tr> + <tr> + <td class=label><label>Test Type:</label></td> + <td><input class=inputtext type="text" name="testtype" placeholder="layout-tests"/></td> + </tr> + </table> + <br> + <div><input class=button type="checkbox" name="incremental">Incremental results, merge with server file.</div> + <br> + <div><input class=button type="file" name="file" multiple></div> + <br> + <div><input class=button type="submit" value="Upload"></div> +</form> +</body> +</html> |