diff options
Diffstat (limited to 'Tools/Scripts/webkitpy/common/net')
25 files changed, 3670 insertions, 0 deletions
diff --git a/Tools/Scripts/webkitpy/common/net/__init__.py b/Tools/Scripts/webkitpy/common/net/__init__.py new file mode 100644 index 0000000..ef65bee --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/__init__.py @@ -0,0 +1 @@ +# Required for Python to search this directory for module files diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/__init__.py b/Tools/Scripts/webkitpy/common/net/bugzilla/__init__.py new file mode 100644 index 0000000..cfaf3b1 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/bugzilla/__init__.py @@ -0,0 +1,8 @@ +# Required for Python to search this directory for module files + +# We only export public API here. +# FIXME: parse_bug_id should not be a free function. +from .bugzilla import Bugzilla, parse_bug_id +# Unclear if Bug and Attachment need to be public classes. +from .bug import Bug +from .attachment import Attachment diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/attachment.py b/Tools/Scripts/webkitpy/common/net/bugzilla/attachment.py new file mode 100644 index 0000000..85761fe --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/bugzilla/attachment.py @@ -0,0 +1,114 @@ +# Copyright (c) 2009 Google Inc. All rights reserved. +# Copyright (c) 2009 Apple Inc. All rights reserved. +# Copyright (c) 2010 Research In Motion Limited. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from webkitpy.common.system.deprecated_logging import log + + +class Attachment(object): + + rollout_preamble = "ROLLOUT of r" + + def __init__(self, attachment_dictionary, bug): + self._attachment_dictionary = attachment_dictionary + self._bug = bug + self._reviewer = None + self._committer = None + + def _bugzilla(self): + return self._bug._bugzilla + + def id(self): + return int(self._attachment_dictionary.get("id")) + + def attacher_is_committer(self): + return self._bugzilla.committers.committer_by_email( + patch.attacher_email()) + + def attacher_email(self): + return self._attachment_dictionary.get("attacher_email") + + def bug(self): + return self._bug + + def bug_id(self): + return int(self._attachment_dictionary.get("bug_id")) + + def is_patch(self): + return not not self._attachment_dictionary.get("is_patch") + + def is_obsolete(self): + return not not self._attachment_dictionary.get("is_obsolete") + + def is_rollout(self): + return self.name().startswith(self.rollout_preamble) + + def name(self): + return self._attachment_dictionary.get("name") + + def attach_date(self): + return self._attachment_dictionary.get("attach_date") + + def review(self): + return self._attachment_dictionary.get("review") + + def commit_queue(self): + return self._attachment_dictionary.get("commit-queue") + + def url(self): + # FIXME: This should just return + # self._bugzilla().attachment_url_for_id(self.id()). scm_unittest.py + # depends on the current behavior. + return self._attachment_dictionary.get("url") + + def contents(self): + # FIXME: We shouldn't be grabbing at _bugzilla. + return self._bug._bugzilla.fetch_attachment_contents(self.id()) + + def _validate_flag_value(self, flag): + email = self._attachment_dictionary.get("%s_email" % flag) + if not email: + return None + committer = getattr(self._bugzilla().committers, + "%s_by_email" % flag)(email) + if committer: + return committer + log("Warning, attachment %s on bug %s has invalid %s (%s)" % ( + self._attachment_dictionary['id'], + self._attachment_dictionary['bug_id'], flag, email)) + + def reviewer(self): + if not self._reviewer: + self._reviewer = self._validate_flag_value("reviewer") + return self._reviewer + + def committer(self): + if not self._committer: + self._committer = self._validate_flag_value("committer") + return self._committer diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bug.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bug.py new file mode 100644 index 0000000..af258eb --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bug.py @@ -0,0 +1,111 @@ +# Copyright (c) 2009 Google Inc. All rights reserved. +# Copyright (c) 2009 Apple Inc. All rights reserved. +# Copyright (c) 2010 Research In Motion Limited. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from .attachment import Attachment + + +class Bug(object): + # FIXME: This class is kinda a hack for now. It exists so we have one + # place to hold bug logic, even if much of the code deals with + # dictionaries still. + + def __init__(self, bug_dictionary, bugzilla): + self.bug_dictionary = bug_dictionary + self._bugzilla = bugzilla + + def id(self): + return self.bug_dictionary["id"] + + def title(self): + return self.bug_dictionary["title"] + + def reporter_email(self): + return self.bug_dictionary["reporter_email"] + + def assigned_to_email(self): + return self.bug_dictionary["assigned_to_email"] + + # FIXME: This information should be stored in some sort of webkit_config.py instead of here. + unassigned_emails = frozenset([ + "webkit-unassigned@lists.webkit.org", + "webkit-qt-unassigned@trolltech.com", + ]) + + def is_unassigned(self): + return self.assigned_to_email() in self.unassigned_emails + + def status(self): + return self.bug_dictionary["bug_status"] + + # Bugzilla has many status states we don't really use in WebKit: + # https://bugs.webkit.org/page.cgi?id=fields.html#status + _open_states = ["UNCONFIRMED", "NEW", "ASSIGNED", "REOPENED"] + _closed_states = ["RESOLVED", "VERIFIED", "CLOSED"] + + def is_open(self): + return self.status() in self._open_states + + def is_closed(self): + return not self.is_open() + + def duplicate_of(self): + return self.bug_dictionary.get('dup_id', None) + + # Rarely do we actually want obsolete attachments + def attachments(self, include_obsolete=False): + attachments = self.bug_dictionary["attachments"] + if not include_obsolete: + attachments = filter(lambda attachment: + not attachment["is_obsolete"], attachments) + return [Attachment(attachment, self) for attachment in attachments] + + def patches(self, include_obsolete=False): + return [patch for patch in self.attachments(include_obsolete) + if patch.is_patch()] + + def unreviewed_patches(self): + return [patch for patch in self.patches() if patch.review() == "?"] + + def reviewed_patches(self, include_invalid=False): + patches = [patch for patch in self.patches() if patch.review() == "+"] + if include_invalid: + return patches + # Checking reviewer() ensures that it was both reviewed and has a valid + # reviewer. + return filter(lambda patch: patch.reviewer(), patches) + + def commit_queued_patches(self, include_invalid=False): + patches = [patch for patch in self.patches() + if patch.commit_queue() == "+"] + if include_invalid: + return patches + # Checking committer() ensures that it was both commit-queue+'d and has + # a valid committer. + return filter(lambda patch: patch.committer(), patches) diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bug_unittest.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bug_unittest.py new file mode 100644 index 0000000..d43d64f --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bug_unittest.py @@ -0,0 +1,40 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from .bug import Bug + + +class BugTest(unittest.TestCase): + def test_is_unassigned(self): + for email in Bug.unassigned_emails: + bug = Bug({"assigned_to_email": email}, bugzilla=None) + self.assertTrue(bug.is_unassigned()) + bug = Bug({"assigned_to_email": "test@test.com"}, bugzilla=None) + self.assertFalse(bug.is_unassigned()) diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py new file mode 100644 index 0000000..17a8515 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py @@ -0,0 +1,766 @@ +# Copyright (c) 2009 Google Inc. All rights reserved. +# Copyright (c) 2009 Apple Inc. All rights reserved. +# Copyright (c) 2010 Research In Motion Limited. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# WebKit's Python module for interacting with Bugzilla + +import mimetypes +import os.path +import re +import StringIO +import urllib + +from datetime import datetime # used in timestamp() + +from .attachment import Attachment +from .bug import Bug + +from webkitpy.common.system.deprecated_logging import log +from webkitpy.common.config import committers +from webkitpy.common.net.credentials import Credentials +from webkitpy.common.system.user import User +from webkitpy.thirdparty.autoinstalled.mechanize import Browser +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup, SoupStrainer + + +# FIXME: parse_bug_id should not be a free function. +def parse_bug_id(message): + if not message: + return None + match = re.search("http\://webkit\.org/b/(?P<bug_id>\d+)", message) + if match: + return int(match.group('bug_id')) + match = re.search( + Bugzilla.bug_server_regex + "show_bug\.cgi\?id=(?P<bug_id>\d+)", + message) + if match: + return int(match.group('bug_id')) + return None + + +def timestamp(): + return datetime.now().strftime("%Y%m%d%H%M%S") + + +# A container for all of the logic for making and parsing buzilla queries. +class BugzillaQueries(object): + + def __init__(self, bugzilla): + self._bugzilla = bugzilla + + def _is_xml_bugs_form(self, form): + # ClientForm.HTMLForm.find_control throws if the control is not found, + # so we do a manual search instead: + return "xml" in [control.id for control in form.controls] + + # This is kinda a hack. There is probably a better way to get this information from bugzilla. + def _parse_result_count(self, results_page): + result_count_text = BeautifulSoup(results_page).find(attrs={'class': 'bz_result_count'}).string + result_count_parts = result_count_text.strip().split(" ") + if result_count_parts[0] == "Zarro": + return 0 + if result_count_parts[0] == "One": + return 1 + return int(result_count_parts[0]) + + # Note: _load_query, _fetch_bug and _fetch_bugs_from_advanced_query + # are the only methods which access self._bugzilla. + + def _load_query(self, query): + self._bugzilla.authenticate() + full_url = "%s%s" % (self._bugzilla.bug_server_url, query) + return self._bugzilla.browser.open(full_url) + + def _fetch_bugs_from_advanced_query(self, query): + results_page = self._load_query(query) + if not self._parse_result_count(results_page): + return [] + # Bugzilla results pages have an "XML" submit button at the bottom + # which can be used to get an XML page containing all of the <bug> elements. + # This is slighty lame that this assumes that _load_query used + # self._bugzilla.browser and that it's in an acceptable state. + self._bugzilla.browser.select_form(predicate=self._is_xml_bugs_form) + bugs_xml = self._bugzilla.browser.submit() + return self._bugzilla._parse_bugs_from_xml(bugs_xml) + + def _fetch_bug(self, bug_id): + return self._bugzilla.fetch_bug(bug_id) + + def _fetch_bug_ids_advanced_query(self, query): + soup = BeautifulSoup(self._load_query(query)) + # The contents of the <a> inside the cells in the first column happen + # to be the bug id. + return [int(bug_link_cell.find("a").string) + for bug_link_cell in soup('td', "first-child")] + + def _parse_attachment_ids_request_query(self, page): + digits = re.compile("\d+") + attachment_href = re.compile("attachment.cgi\?id=\d+&action=review") + attachment_links = SoupStrainer("a", href=attachment_href) + return [int(digits.search(tag["href"]).group(0)) + for tag in BeautifulSoup(page, parseOnlyThese=attachment_links)] + + def _fetch_attachment_ids_request_query(self, query): + return self._parse_attachment_ids_request_query(self._load_query(query)) + + def _parse_quips(self, page): + soup = BeautifulSoup(page, convertEntities=BeautifulSoup.HTML_ENTITIES) + quips = soup.find(text=re.compile(r"Existing quips:")).findNext("ul").findAll("li") + return [unicode(quip_entry.string) for quip_entry in quips] + + def fetch_quips(self): + return self._parse_quips(self._load_query("/quips.cgi?action=show")) + + # List of all r+'d bugs. + def fetch_bug_ids_from_pending_commit_list(self): + needs_commit_query_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review%2B" + return self._fetch_bug_ids_advanced_query(needs_commit_query_url) + + def fetch_bugs_matching_quicksearch(self, search_string): + # We may want to use a more explicit query than "quicksearch". + # If quicksearch changes we should probably change to use + # a normal buglist.cgi?query_format=advanced query. + quicksearch_url = "buglist.cgi?quicksearch=%s" % urllib.quote(search_string) + return self._fetch_bugs_from_advanced_query(quicksearch_url) + + # Currently this returns all bugs across all components. + # In the future we may wish to extend this API to construct more restricted searches. + def fetch_bugs_matching_search(self, search_string, author_email=None): + query = "buglist.cgi?query_format=advanced" + if search_string: + query += "&short_desc_type=allwordssubstr&short_desc=%s" % urllib.quote(search_string) + if author_email: + query += "&emailreporter1=1&emailtype1=substring&email1=%s" % urllib.quote(search_string) + return self._fetch_bugs_from_advanced_query(query) + + def fetch_patches_from_pending_commit_list(self): + return sum([self._fetch_bug(bug_id).reviewed_patches() + for bug_id in self.fetch_bug_ids_from_pending_commit_list()], []) + + def fetch_bug_ids_from_commit_queue(self): + commit_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=commit-queue%2B&order=Last+Changed" + return self._fetch_bug_ids_advanced_query(commit_queue_url) + + def fetch_patches_from_commit_queue(self): + # This function will only return patches which have valid committers + # set. It won't reject patches with invalid committers/reviewers. + return sum([self._fetch_bug(bug_id).commit_queued_patches() + for bug_id in self.fetch_bug_ids_from_commit_queue()], []) + + def fetch_bug_ids_from_review_queue(self): + review_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review?" + return self._fetch_bug_ids_advanced_query(review_queue_url) + + # This method will make several requests to bugzilla. + def fetch_patches_from_review_queue(self, limit=None): + # [:None] returns the whole array. + return sum([self._fetch_bug(bug_id).unreviewed_patches() + for bug_id in self.fetch_bug_ids_from_review_queue()[:limit]], []) + + # NOTE: This is the only client of _fetch_attachment_ids_request_query + # This method only makes one request to bugzilla. + def fetch_attachment_ids_from_review_queue(self): + review_queue_url = "request.cgi?action=queue&type=review&group=type" + return self._fetch_attachment_ids_request_query(review_queue_url) + + +class Bugzilla(object): + + def __init__(self, dryrun=False, committers=committers.CommitterList()): + self.dryrun = dryrun + self.authenticated = False + self.queries = BugzillaQueries(self) + self.committers = committers + self.cached_quips = [] + + # FIXME: We should use some sort of Browser mock object when in dryrun + # mode (to prevent any mistakes). + self.browser = Browser() + # Ignore bugs.webkit.org/robots.txt until we fix it to allow this + # script. + self.browser.set_handle_robots(False) + + # FIXME: Much of this should go into some sort of config module: + bug_server_host = "bugs.webkit.org" + bug_server_regex = "https?://%s/" % re.sub('\.', '\\.', bug_server_host) + bug_server_url = "https://%s/" % bug_server_host + + def quips(self): + # We only fetch and parse the list of quips once per instantiation + # so that we do not burden bugs.webkit.org. + if not self.cached_quips and not self.dryrun: + self.cached_quips = self.queries.fetch_quips() + return self.cached_quips + + def bug_url_for_bug_id(self, bug_id, xml=False): + if not bug_id: + return None + content_type = "&ctype=xml" if xml else "" + return "%sshow_bug.cgi?id=%s%s" % (self.bug_server_url, bug_id, content_type) + + def short_bug_url_for_bug_id(self, bug_id): + if not bug_id: + return None + return "http://webkit.org/b/%s" % bug_id + + def add_attachment_url(self, bug_id): + return "%sattachment.cgi?action=enter&bugid=%s" % (self.bug_server_url, bug_id) + + def attachment_url_for_id(self, attachment_id, action="view"): + if not attachment_id: + return None + action_param = "" + if action and action != "view": + action_param = "&action=%s" % action + return "%sattachment.cgi?id=%s%s" % (self.bug_server_url, + attachment_id, + action_param) + + def _parse_attachment_flag(self, + element, + flag_name, + attachment, + result_key): + flag = element.find('flag', attrs={'name': flag_name}) + if flag: + attachment[flag_name] = flag['status'] + if flag['status'] == '+': + attachment[result_key] = flag['setter'] + # Sadly show_bug.cgi?ctype=xml does not expose the flag modification date. + + def _string_contents(self, soup): + # WebKit's bugzilla instance uses UTF-8. + # BeautifulSoup always returns Unicode strings, however + # the .string method returns a (unicode) NavigableString. + # NavigableString can confuse other parts of the code, so we + # convert from NavigableString to a real unicode() object using unicode(). + return unicode(soup.string) + + # Example: 2010-01-20 14:31 PST + # FIXME: Some bugzilla dates seem to have seconds in them? + # Python does not support timezones out of the box. + # Assume that bugzilla always uses PST (which is true for bugs.webkit.org) + _bugzilla_date_format = "%Y-%m-%d %H:%M" + + @classmethod + def _parse_date(cls, date_string): + (date, time, time_zone) = date_string.split(" ") + # Ignore the timezone because python doesn't understand timezones out of the box. + date_string = "%s %s" % (date, time) + return datetime.strptime(date_string, cls._bugzilla_date_format) + + def _date_contents(self, soup): + return self._parse_date(self._string_contents(soup)) + + def _parse_attachment_element(self, element, bug_id): + attachment = {} + attachment['bug_id'] = bug_id + attachment['is_obsolete'] = (element.has_key('isobsolete') and element['isobsolete'] == "1") + attachment['is_patch'] = (element.has_key('ispatch') and element['ispatch'] == "1") + attachment['id'] = int(element.find('attachid').string) + # FIXME: No need to parse out the url here. + attachment['url'] = self.attachment_url_for_id(attachment['id']) + attachment["attach_date"] = self._date_contents(element.find("date")) + attachment['name'] = self._string_contents(element.find('desc')) + attachment['attacher_email'] = self._string_contents(element.find('attacher')) + attachment['type'] = self._string_contents(element.find('type')) + self._parse_attachment_flag( + element, 'review', attachment, 'reviewer_email') + self._parse_attachment_flag( + element, 'commit-queue', attachment, 'committer_email') + return attachment + + def _parse_bugs_from_xml(self, page): + soup = BeautifulSoup(page) + # Without the unicode() call, BeautifulSoup occasionally complains of being + # passed None for no apparent reason. + return [Bug(self._parse_bug_dictionary_from_xml(unicode(bug_xml)), self) for bug_xml in soup('bug')] + + def _parse_bug_dictionary_from_xml(self, page): + soup = BeautifulSoup(page) + bug = {} + bug["id"] = int(soup.find("bug_id").string) + bug["title"] = self._string_contents(soup.find("short_desc")) + bug["bug_status"] = self._string_contents(soup.find("bug_status")) + dup_id = soup.find("dup_id") + if dup_id: + bug["dup_id"] = self._string_contents(dup_id) + bug["reporter_email"] = self._string_contents(soup.find("reporter")) + bug["assigned_to_email"] = self._string_contents(soup.find("assigned_to")) + bug["cc_emails"] = [self._string_contents(element) for element in soup.findAll('cc')] + bug["attachments"] = [self._parse_attachment_element(element, bug["id"]) for element in soup.findAll('attachment')] + return bug + + # Makes testing fetch_*_from_bug() possible until we have a better + # BugzillaNetwork abstration. + + def _fetch_bug_page(self, bug_id): + bug_url = self.bug_url_for_bug_id(bug_id, xml=True) + log("Fetching: %s" % bug_url) + return self.browser.open(bug_url) + + def fetch_bug_dictionary(self, bug_id): + try: + return self._parse_bug_dictionary_from_xml(self._fetch_bug_page(bug_id)) + except KeyboardInterrupt: + raise + except: + self.authenticate() + return self._parse_bug_dictionary_from_xml(self._fetch_bug_page(bug_id)) + + # FIXME: A BugzillaCache object should provide all these fetch_ methods. + + def fetch_bug(self, bug_id): + return Bug(self.fetch_bug_dictionary(bug_id), self) + + def fetch_attachment_contents(self, attachment_id): + attachment_url = self.attachment_url_for_id(attachment_id) + # We need to authenticate to download patches from security bugs. + self.authenticate() + return self.browser.open(attachment_url).read() + + def _parse_bug_id_from_attachment_page(self, page): + # The "Up" relation happens to point to the bug. + up_link = BeautifulSoup(page).find('link', rel='Up') + if not up_link: + # This attachment does not exist (or you don't have permissions to + # view it). + return None + match = re.search("show_bug.cgi\?id=(?P<bug_id>\d+)", up_link['href']) + return int(match.group('bug_id')) + + def bug_id_for_attachment_id(self, attachment_id): + self.authenticate() + + attachment_url = self.attachment_url_for_id(attachment_id, 'edit') + log("Fetching: %s" % attachment_url) + page = self.browser.open(attachment_url) + return self._parse_bug_id_from_attachment_page(page) + + # FIXME: This should just return Attachment(id), which should be able to + # lazily fetch needed data. + + def fetch_attachment(self, attachment_id): + # We could grab all the attachment details off of the attachment edit + # page but we already have working code to do so off of the bugs page, + # so re-use that. + bug_id = self.bug_id_for_attachment_id(attachment_id) + if not bug_id: + return None + attachments = self.fetch_bug(bug_id).attachments(include_obsolete=True) + for attachment in attachments: + if attachment.id() == int(attachment_id): + return attachment + return None # This should never be hit. + + def authenticate(self): + if self.authenticated: + return + + if self.dryrun: + log("Skipping log in for dry run...") + self.authenticated = True + return + + credentials = Credentials(self.bug_server_host, git_prefix="bugzilla") + + attempts = 0 + while not self.authenticated: + attempts += 1 + username, password = credentials.read_credentials() + + log("Logging in as %s..." % username) + self.browser.open(self.bug_server_url + + "index.cgi?GoAheadAndLogIn=1") + self.browser.select_form(name="login") + self.browser['Bugzilla_login'] = username + self.browser['Bugzilla_password'] = password + response = self.browser.submit() + + match = re.search("<title>(.+?)</title>", response.read()) + # If the resulting page has a title, and it contains the word + # "invalid" assume it's the login failure page. + if match and re.search("Invalid", match.group(1), re.IGNORECASE): + errorMessage = "Bugzilla login failed: %s" % match.group(1) + # raise an exception only if this was the last attempt + if attempts < 5: + log(errorMessage) + else: + raise Exception(errorMessage) + else: + self.authenticated = True + self.username = username + + def _commit_queue_flag(self, mark_for_landing, mark_for_commit_queue): + if mark_for_landing: + return '+' + elif mark_for_commit_queue: + return '?' + return 'X' + + # FIXME: mark_for_commit_queue and mark_for_landing should be joined into a single commit_flag argument. + def _fill_attachment_form(self, + description, + file_object, + mark_for_review=False, + mark_for_commit_queue=False, + mark_for_landing=False, + is_patch=False, + filename=None, + mimetype=None): + self.browser['description'] = description + if is_patch: + self.browser['ispatch'] = ("1",) + # FIXME: Should this use self._find_select_element_for_flag? + self.browser['flag_type-1'] = ('?',) if mark_for_review else ('X',) + self.browser['flag_type-3'] = (self._commit_queue_flag(mark_for_landing, mark_for_commit_queue),) + + filename = filename or "%s.patch" % timestamp() + if not mimetype: + mimetypes.add_type('text/plain', '.patch') # Make sure mimetypes knows about .patch + mimetype, _ = mimetypes.guess_type(filename) + if not mimetype: + mimetype = "text/plain" # Bugzilla might auto-guess for us and we might not need this? + self.browser.add_file(file_object, mimetype, filename, 'data') + + def _file_object_for_upload(self, file_or_string): + if hasattr(file_or_string, 'read'): + return file_or_string + # Only if file_or_string is not already encoded do we want to encode it. + if isinstance(file_or_string, unicode): + file_or_string = file_or_string.encode('utf-8') + return StringIO.StringIO(file_or_string) + + # timestamp argument is just for unittests. + def _filename_for_upload(self, file_object, bug_id, extension="txt", timestamp=timestamp): + if hasattr(file_object, "name"): + return file_object.name + return "bug-%s-%s.%s" % (bug_id, timestamp(), extension) + + def add_attachment_to_bug(self, + bug_id, + file_or_string, + description, + filename=None, + comment_text=None): + self.authenticate() + log('Adding attachment "%s" to %s' % (description, self.bug_url_for_bug_id(bug_id))) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.add_attachment_url(bug_id)) + self.browser.select_form(name="entryform") + file_object = self._file_object_for_upload(file_or_string) + filename = filename or self._filename_for_upload(file_object, bug_id) + self._fill_attachment_form(description, file_object, filename=filename) + if comment_text: + log(comment_text) + self.browser['comment'] = comment_text + self.browser.submit() + + # FIXME: The arguments to this function should be simplified and then + # this should be merged into add_attachment_to_bug + def add_patch_to_bug(self, + bug_id, + file_or_string, + description, + comment_text=None, + mark_for_review=False, + mark_for_commit_queue=False, + mark_for_landing=False): + self.authenticate() + log('Adding patch "%s" to %s' % (description, self.bug_url_for_bug_id(bug_id))) + + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.add_attachment_url(bug_id)) + self.browser.select_form(name="entryform") + file_object = self._file_object_for_upload(file_or_string) + filename = self._filename_for_upload(file_object, bug_id, extension="patch") + self._fill_attachment_form(description, + file_object, + mark_for_review=mark_for_review, + mark_for_commit_queue=mark_for_commit_queue, + mark_for_landing=mark_for_landing, + is_patch=True, + filename=filename) + if comment_text: + log(comment_text) + self.browser['comment'] = comment_text + self.browser.submit() + + # FIXME: There has to be a more concise way to write this method. + def _check_create_bug_response(self, response_html): + match = re.search("<title>Bug (?P<bug_id>\d+) Submitted</title>", + response_html) + if match: + return match.group('bug_id') + + match = re.search( + '<div id="bugzilla-body">(?P<error_message>.+)<div id="footer">', + response_html, + re.DOTALL) + error_message = "FAIL" + if match: + text_lines = BeautifulSoup( + match.group('error_message')).findAll(text=True) + error_message = "\n" + '\n'.join( + [" " + line.strip() + for line in text_lines if line.strip()]) + raise Exception("Bug not created: %s" % error_message) + + def create_bug(self, + bug_title, + bug_description, + component=None, + diff=None, + patch_description=None, + cc=None, + blocked=None, + assignee=None, + mark_for_review=False, + mark_for_commit_queue=False): + self.authenticate() + + log('Creating bug with title "%s"' % bug_title) + if self.dryrun: + log(bug_description) + # FIXME: This will make some paths fail, as they assume this returns an id. + return + + self.browser.open(self.bug_server_url + "enter_bug.cgi?product=WebKit") + self.browser.select_form(name="Create") + component_items = self.browser.find_control('component').items + component_names = map(lambda item: item.name, component_items) + if not component: + component = "New Bugs" + if component not in component_names: + component = User.prompt_with_list("Please pick a component:", component_names) + self.browser["component"] = [component] + if cc: + self.browser["cc"] = cc + if blocked: + self.browser["blocked"] = unicode(blocked) + if not assignee: + assignee = self.username + if assignee and not self.browser.find_control("assigned_to").disabled: + self.browser["assigned_to"] = assignee + self.browser["short_desc"] = bug_title + self.browser["comment"] = bug_description + + if diff: + # _fill_attachment_form expects a file-like object + # Patch files are already binary, so no encoding needed. + assert(isinstance(diff, str)) + patch_file_object = StringIO.StringIO(diff) + self._fill_attachment_form( + patch_description, + patch_file_object, + mark_for_review=mark_for_review, + mark_for_commit_queue=mark_for_commit_queue, + is_patch=True) + + response = self.browser.submit() + + bug_id = self._check_create_bug_response(response.read()) + log("Bug %s created." % bug_id) + log("%sshow_bug.cgi?id=%s" % (self.bug_server_url, bug_id)) + return bug_id + + def _find_select_element_for_flag(self, flag_name): + # FIXME: This will break if we ever re-order attachment flags + if flag_name == "review": + return self.browser.find_control(type='select', nr=0) + elif flag_name == "commit-queue": + return self.browser.find_control(type='select', nr=1) + raise Exception("Don't know how to find flag named \"%s\"" % flag_name) + + def clear_attachment_flags(self, + attachment_id, + additional_comment_text=None): + self.authenticate() + + comment_text = "Clearing flags on attachment: %s" % attachment_id + if additional_comment_text: + comment_text += "\n\n%s" % additional_comment_text + log(comment_text) + + if self.dryrun: + return + + self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) + self.browser.select_form(nr=1) + self.browser.set_value(comment_text, name='comment', nr=0) + self._find_select_element_for_flag('review').value = ("X",) + self._find_select_element_for_flag('commit-queue').value = ("X",) + self.browser.submit() + + def set_flag_on_attachment(self, + attachment_id, + flag_name, + flag_value, + comment_text=None, + additional_comment_text=None): + # FIXME: We need a way to test this function on a live bugzilla + # instance. + + self.authenticate() + + if additional_comment_text: + comment_text += "\n\n%s" % additional_comment_text + log(comment_text) + + if self.dryrun: + return + + self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) + self.browser.select_form(nr=1) + + if comment_text: + self.browser.set_value(comment_text, name='comment', nr=0) + + self._find_select_element_for_flag(flag_name).value = (flag_value,) + self.browser.submit() + + # FIXME: All of these bug editing methods have a ridiculous amount of + # copy/paste code. + + def obsolete_attachment(self, attachment_id, comment_text=None): + self.authenticate() + + log("Obsoleting attachment: %s" % attachment_id) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) + self.browser.select_form(nr=1) + self.browser.find_control('isobsolete').items[0].selected = True + # Also clear any review flag (to remove it from review/commit queues) + self._find_select_element_for_flag('review').value = ("X",) + self._find_select_element_for_flag('commit-queue').value = ("X",) + if comment_text: + log(comment_text) + # Bugzilla has two textareas named 'comment', one is somehow + # hidden. We want the first. + self.browser.set_value(comment_text, name='comment', nr=0) + self.browser.submit() + + def add_cc_to_bug(self, bug_id, email_address_list): + self.authenticate() + + log("Adding %s to the CC list for bug %s" % (email_address_list, + bug_id)) + if self.dryrun: + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + self.browser["newcc"] = ", ".join(email_address_list) + self.browser.submit() + + def post_comment_to_bug(self, bug_id, comment_text, cc=None): + self.authenticate() + + log("Adding comment to bug %s" % bug_id) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + self.browser["comment"] = comment_text + if cc: + self.browser["newcc"] = ", ".join(cc) + self.browser.submit() + + def close_bug_as_fixed(self, bug_id, comment_text=None): + self.authenticate() + + log("Closing bug %s as fixed" % bug_id) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + if comment_text: + self.browser['comment'] = comment_text + self.browser['bug_status'] = ['RESOLVED'] + self.browser['resolution'] = ['FIXED'] + self.browser.submit() + + def reassign_bug(self, bug_id, assignee, comment_text=None): + self.authenticate() + + log("Assigning bug %s to %s" % (bug_id, assignee)) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + if comment_text: + log(comment_text) + self.browser["comment"] = comment_text + self.browser["assigned_to"] = assignee + self.browser.submit() + + def reopen_bug(self, bug_id, comment_text): + self.authenticate() + + log("Re-opening bug %s" % bug_id) + # Bugzilla requires a comment when re-opening a bug, so we know it will + # never be None. + log(comment_text) + if self.dryrun: + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + bug_status = self.browser.find_control("bug_status", type="select") + # This is a hack around the fact that ClientForm.ListControl seems to + # have no simpler way to ask if a control has an item named "REOPENED" + # without using exceptions for control flow. + possible_bug_statuses = map(lambda item: item.name, bug_status.items) + if "REOPENED" in possible_bug_statuses: + bug_status.value = ["REOPENED"] + # If the bug was never confirmed it will not have a "REOPENED" + # state, but only an "UNCONFIRMED" state. + elif "UNCONFIRMED" in possible_bug_statuses: + bug_status.value = ["UNCONFIRMED"] + else: + # FIXME: This logic is slightly backwards. We won't print this + # message if the bug is already open with state "UNCONFIRMED". + log("Did not reopen bug %s, it appears to already be open with status %s." % (bug_id, bug_status.value)) + self.browser['comment'] = comment_text + self.browser.submit() diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py new file mode 100644 index 0000000..1d08ca5 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py @@ -0,0 +1,392 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest +import datetime +import StringIO + +from .bugzilla import Bugzilla, BugzillaQueries, parse_bug_id + +from webkitpy.common.system.outputcapture import OutputCapture +from webkitpy.tool.mocktool import MockBrowser +from webkitpy.thirdparty.mock import Mock +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + + +class BugzillaTest(unittest.TestCase): + _example_attachment = ''' + <attachment + isobsolete="1" + ispatch="1" + isprivate="0" + > + <attachid>33721</attachid> + <date>2009-07-29 10:23 PDT</date> + <desc>Fixed whitespace issue</desc> + <filename>patch</filename> + <type>text/plain</type> + <size>9719</size> + <attacher>christian.plesner.hansen@gmail.com</attacher> + <flag name="review" + id="17931" + status="+" + setter="one@test.com" + /> + <flag name="commit-queue" + id="17932" + status="+" + setter="two@test.com" + /> + </attachment> +''' + _expected_example_attachment_parsing = { + 'attach_date': datetime.datetime(2009, 07, 29, 10, 23), + 'bug_id' : 100, + 'is_obsolete' : True, + 'is_patch' : True, + 'id' : 33721, + 'url' : "https://bugs.webkit.org/attachment.cgi?id=33721", + 'name' : "Fixed whitespace issue", + 'type' : "text/plain", + 'review' : '+', + 'reviewer_email' : 'one@test.com', + 'commit-queue' : '+', + 'committer_email' : 'two@test.com', + 'attacher_email' : 'christian.plesner.hansen@gmail.com', + } + + def test_url_creation(self): + # FIXME: These would be all better as doctests + bugs = Bugzilla() + self.assertEquals(None, bugs.bug_url_for_bug_id(None)) + self.assertEquals(None, bugs.short_bug_url_for_bug_id(None)) + self.assertEquals(None, bugs.attachment_url_for_id(None)) + + def test_parse_bug_id(self): + # FIXME: These would be all better as doctests + bugs = Bugzilla() + self.assertEquals(12345, parse_bug_id("http://webkit.org/b/12345")) + self.assertEquals(12345, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?id=12345")) + self.assertEquals(12345, parse_bug_id(bugs.short_bug_url_for_bug_id(12345))) + self.assertEquals(12345, parse_bug_id(bugs.bug_url_for_bug_id(12345))) + self.assertEquals(12345, parse_bug_id(bugs.bug_url_for_bug_id(12345, xml=True))) + + # Our bug parser is super-fragile, but at least we're testing it. + self.assertEquals(None, parse_bug_id("http://www.webkit.org/b/12345")) + self.assertEquals(None, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?ctype=xml&id=12345")) + + _bug_xml = """ + <bug> + <bug_id>32585</bug_id> + <creation_ts>2009-12-15 15:17 PST</creation_ts> + <short_desc>bug to test webkit-patch and commit-queue failures</short_desc> + <delta_ts>2009-12-27 21:04:50 PST</delta_ts> + <reporter_accessible>1</reporter_accessible> + <cclist_accessible>1</cclist_accessible> + <classification_id>1</classification_id> + <classification>Unclassified</classification> + <product>WebKit</product> + <component>Tools / Tests</component> + <version>528+ (Nightly build)</version> + <rep_platform>PC</rep_platform> + <op_sys>Mac OS X 10.5</op_sys> + <bug_status>NEW</bug_status> + <priority>P2</priority> + <bug_severity>Normal</bug_severity> + <target_milestone>---</target_milestone> + <everconfirmed>1</everconfirmed> + <reporter name="Eric Seidel">eric@webkit.org</reporter> + <assigned_to name="Nobody">webkit-unassigned@lists.webkit.org</assigned_to> + <cc>foo@bar.com</cc> + <cc>example@example.com</cc> + <long_desc isprivate="0"> + <who name="Eric Seidel">eric@webkit.org</who> + <bug_when>2009-12-15 15:17:28 PST</bug_when> + <thetext>bug to test webkit-patch and commit-queue failures + +Ignore this bug. Just for testing failure modes of webkit-patch and the commit-queue.</thetext> + </long_desc> + <attachment + isobsolete="0" + ispatch="1" + isprivate="0" + > + <attachid>45548</attachid> + <date>2009-12-27 23:51 PST</date> + <desc>Patch</desc> + <filename>bug-32585-20091228005112.patch</filename> + <type>text/plain</type> + <size>10882</size> + <attacher>mjs@apple.com</attacher> + + <token>1261988248-dc51409e9c421a4358f365fa8bec8357</token> + <data encoding="base64">SW5kZXg6IFdlYktpdC9tYWMvQ2hhbmdlTG9nCj09PT09PT09PT09PT09PT09PT09PT09PT09PT09 +removed-because-it-was-really-long +ZEZpbmlzaExvYWRXaXRoUmVhc29uOnJlYXNvbl07Cit9CisKIEBlbmQKIAogI2VuZGlmCg== +</data> + + <flag name="review" + id="27602" + status="?" + setter="mjs@apple.com" + /> + </attachment> + </bug> +""" + + _single_bug_xml = """ +<?xml version="1.0" encoding="UTF-8" standalone="yes" ?> +<!DOCTYPE bugzilla SYSTEM "https://bugs.webkit.org/bugzilla.dtd"> +<bugzilla version="3.2.3" + urlbase="https://bugs.webkit.org/" + maintainer="admin@webkit.org" + exporter="eric@webkit.org" +> +%s +</bugzilla> +""" % _bug_xml + + _expected_example_bug_parsing = { + "id" : 32585, + "title" : u"bug to test webkit-patch and commit-queue failures", + "cc_emails" : ["foo@bar.com", "example@example.com"], + "reporter_email" : "eric@webkit.org", + "assigned_to_email" : "webkit-unassigned@lists.webkit.org", + "bug_status": "NEW", + "attachments" : [{ + "attach_date": datetime.datetime(2009, 12, 27, 23, 51), + 'name': u'Patch', + 'url' : "https://bugs.webkit.org/attachment.cgi?id=45548", + 'is_obsolete': False, + 'review': '?', + 'is_patch': True, + 'attacher_email': 'mjs@apple.com', + 'bug_id': 32585, + 'type': 'text/plain', + 'id': 45548 + }], + } + + # FIXME: This should move to a central location and be shared by more unit tests. + def _assert_dictionaries_equal(self, actual, expected): + # Make sure we aren't parsing more or less than we expect + self.assertEquals(sorted(actual.keys()), sorted(expected.keys())) + + for key, expected_value in expected.items(): + self.assertEquals(actual[key], expected_value, ("Failure for key: %s: Actual='%s' Expected='%s'" % (key, actual[key], expected_value))) + + def test_parse_bug_dictionary_from_xml(self): + bug = Bugzilla()._parse_bug_dictionary_from_xml(self._single_bug_xml) + self._assert_dictionaries_equal(bug, self._expected_example_bug_parsing) + + _sample_multi_bug_xml = """ +<bugzilla version="3.2.3" urlbase="https://bugs.webkit.org/" maintainer="admin@webkit.org" exporter="eric@webkit.org"> + %s + %s +</bugzilla> +""" % (_bug_xml, _bug_xml) + + def test_parse_bugs_from_xml(self): + bugzilla = Bugzilla() + bugs = bugzilla._parse_bugs_from_xml(self._sample_multi_bug_xml) + self.assertEquals(len(bugs), 2) + self.assertEquals(bugs[0].id(), self._expected_example_bug_parsing['id']) + bugs = bugzilla._parse_bugs_from_xml("") + self.assertEquals(len(bugs), 0) + + # This could be combined into test_bug_parsing later if desired. + def test_attachment_parsing(self): + bugzilla = Bugzilla() + soup = BeautifulSoup(self._example_attachment) + attachment_element = soup.find("attachment") + attachment = bugzilla._parse_attachment_element(attachment_element, self._expected_example_attachment_parsing['bug_id']) + self.assertTrue(attachment) + self._assert_dictionaries_equal(attachment, self._expected_example_attachment_parsing) + + _sample_attachment_detail_page = """ +<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" + "http://www.w3.org/TR/html4/loose.dtd"> +<html> + <head> + <title> + Attachment 41073 Details for Bug 27314</title> +<link rel="Top" href="https://bugs.webkit.org/"> + <link rel="Up" href="show_bug.cgi?id=27314"> +""" + + def test_attachment_detail_bug_parsing(self): + bugzilla = Bugzilla() + self.assertEquals(27314, bugzilla._parse_bug_id_from_attachment_page(self._sample_attachment_detail_page)) + + def test_add_cc_to_bug(self): + bugzilla = Bugzilla() + bugzilla.browser = MockBrowser() + bugzilla.authenticate = lambda: None + expected_stderr = "Adding ['adam@example.com'] to the CC list for bug 42\n" + OutputCapture().assert_outputs(self, bugzilla.add_cc_to_bug, [42, ["adam@example.com"]], expected_stderr=expected_stderr) + + def _mock_control_item(self, name): + mock_item = Mock() + mock_item.name = name + return mock_item + + def _mock_find_control(self, item_names=[], selected_index=0): + mock_control = Mock() + mock_control.items = [self._mock_control_item(name) for name in item_names] + mock_control.value = [item_names[selected_index]] if item_names else None + return lambda name, type: mock_control + + def _assert_reopen(self, item_names=None, selected_index=None, extra_stderr=None): + bugzilla = Bugzilla() + bugzilla.browser = MockBrowser() + bugzilla.authenticate = lambda: None + + mock_find_control = self._mock_find_control(item_names, selected_index) + bugzilla.browser.find_control = mock_find_control + expected_stderr = "Re-opening bug 42\n['comment']\n" + if extra_stderr: + expected_stderr += extra_stderr + OutputCapture().assert_outputs(self, bugzilla.reopen_bug, [42, ["comment"]], expected_stderr=expected_stderr) + + def test_reopen_bug(self): + self._assert_reopen(item_names=["REOPENED", "RESOLVED", "CLOSED"], selected_index=1) + self._assert_reopen(item_names=["UNCONFIRMED", "RESOLVED", "CLOSED"], selected_index=1) + extra_stderr = "Did not reopen bug 42, it appears to already be open with status ['NEW'].\n" + self._assert_reopen(item_names=["NEW", "RESOLVED"], selected_index=0, extra_stderr=extra_stderr) + + def test_file_object_for_upload(self): + bugzilla = Bugzilla() + file_object = StringIO.StringIO() + unicode_tor = u"WebKit \u2661 Tor Arne Vestb\u00F8!" + utf8_tor = unicode_tor.encode("utf-8") + self.assertEqual(bugzilla._file_object_for_upload(file_object), file_object) + self.assertEqual(bugzilla._file_object_for_upload(utf8_tor).read(), utf8_tor) + self.assertEqual(bugzilla._file_object_for_upload(unicode_tor).read(), utf8_tor) + + def test_filename_for_upload(self): + bugzilla = Bugzilla() + mock_file = Mock() + mock_file.name = "foo" + self.assertEqual(bugzilla._filename_for_upload(mock_file, 1234), 'foo') + mock_timestamp = lambda: "now" + filename = bugzilla._filename_for_upload(StringIO.StringIO(), 1234, extension="patch", timestamp=mock_timestamp) + self.assertEqual(filename, "bug-1234-now.patch") + + +class BugzillaQueriesTest(unittest.TestCase): + _sample_request_page = """ +<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" + "http://www.w3.org/TR/html4/loose.dtd"> +<html> + <head> + <title>Request Queue</title> + </head> +<body> + +<h3>Flag: review</h3> + <table class="requests" cellspacing="0" cellpadding="4" border="1"> + <tr> + <th>Requester</th> + <th>Requestee</th> + <th>Bug</th> + <th>Attachment</th> + <th>Created</th> + </tr> + <tr> + <td>Shinichiro Hamaji <hamaji@chromium.org></td> + <td></td> + <td><a href="show_bug.cgi?id=30015">30015: text-transform:capitalize is failing in CSS2.1 test suite</a></td> + <td><a href="attachment.cgi?id=40511&action=review"> +40511: Patch v0</a></td> + <td>2009-10-02 04:58 PST</td> + </tr> + <tr> + <td>Zan Dobersek <zandobersek@gmail.com></td> + <td></td> + <td><a href="show_bug.cgi?id=26304">26304: [GTK] Add controls for playing html5 video.</a></td> + <td><a href="attachment.cgi?id=40722&action=review"> +40722: Media controls, the simple approach</a></td> + <td>2009-10-06 09:13 PST</td> + </tr> + <tr> + <td>Zan Dobersek <zandobersek@gmail.com></td> + <td></td> + <td><a href="show_bug.cgi?id=26304">26304: [GTK] Add controls for playing html5 video.</a></td> + <td><a href="attachment.cgi?id=40723&action=review"> +40723: Adjust the media slider thumb size</a></td> + <td>2009-10-06 09:15 PST</td> + </tr> + </table> +</body> +</html> +""" + _sample_quip_page = u""" +<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" + "http://www.w3.org/TR/html4/loose.dtd"> +<html> + <head> + <title>Bugzilla Quip System</title> + </head> + <body> + <h2> + + Existing quips: + </h2> + <ul> + <li>Everything should be made as simple as possible, but not simpler. - Albert Einstein</li> + <li>Good artists copy. Great artists steal. - Pablo Picasso</li> + <li>\u00e7gua mole em pedra dura, tanto bate at\u008e que fura.</li> + + </ul> + </body> +</html> +""" + + def _assert_result_count(self, queries, html, count): + self.assertEquals(queries._parse_result_count(html), count) + + def test_parse_result_count(self): + queries = BugzillaQueries(None) + # Pages with results, always list the count at least twice. + self._assert_result_count(queries, '<span class="bz_result_count">314 bugs found.</span><span class="bz_result_count">314 bugs found.</span>', 314) + self._assert_result_count(queries, '<span class="bz_result_count">Zarro Boogs found.</span>', 0) + self._assert_result_count(queries, '<span class="bz_result_count">\n \nOne bug found.</span>', 1) + self.assertRaises(Exception, queries._parse_result_count, ['Invalid']) + + def test_request_page_parsing(self): + queries = BugzillaQueries(None) + self.assertEquals([40511, 40722, 40723], queries._parse_attachment_ids_request_query(self._sample_request_page)) + + def test_quip_page_parsing(self): + queries = BugzillaQueries(None) + expected_quips = ["Everything should be made as simple as possible, but not simpler. - Albert Einstein", "Good artists copy. Great artists steal. - Pablo Picasso", u"\u00e7gua mole em pedra dura, tanto bate at\u008e que fura."] + self.assertEquals(expected_quips, queries._parse_quips(self._sample_quip_page)) + + def test_load_query(self): + queries = BugzillaQueries(Mock()) + queries._load_query("request.cgi?action=queue&type=review&group=type") diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/__init__.py b/Tools/Scripts/webkitpy/common/net/buildbot/__init__.py new file mode 100644 index 0000000..631ef6b --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/buildbot/__init__.py @@ -0,0 +1,5 @@ +# Required for Python to search this directory for module files + +# We only export public API here. +# It's unclear if Builder and Build need to be public. +from .buildbot import BuildBot, Builder, Build diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/buildbot.py b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot.py new file mode 100644 index 0000000..3cb6da5 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot.py @@ -0,0 +1,463 @@ +# Copyright (c) 2009, Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# WebKit's Python module for interacting with WebKit's buildbot + +try: + import json +except ImportError: + # python 2.5 compatibility + import webkitpy.thirdparty.simplejson as json + +import operator +import re +import urllib +import urllib2 + +from webkitpy.common.net.failuremap import FailureMap +from webkitpy.common.net.layouttestresults import LayoutTestResults +from webkitpy.common.net.regressionwindow import RegressionWindow +from webkitpy.common.system.logutils import get_logger +from webkitpy.thirdparty.autoinstalled.mechanize import Browser +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + +_log = get_logger(__file__) + + +class Builder(object): + def __init__(self, name, buildbot): + self._name = name + self._buildbot = buildbot + self._builds_cache = {} + self._revision_to_build_number = None + self._browser = Browser() + self._browser.set_handle_robots(False) # The builder pages are excluded by robots.txt + + def name(self): + return self._name + + def results_url(self): + return "http://%s/results/%s" % (self._buildbot.buildbot_host, self.url_encoded_name()) + + def url_encoded_name(self): + return urllib.quote(self._name) + + def url(self): + return "http://%s/builders/%s" % (self._buildbot.buildbot_host, self.url_encoded_name()) + + # This provides a single place to mock + def _fetch_build(self, build_number): + build_dictionary = self._buildbot._fetch_build_dictionary(self, build_number) + if not build_dictionary: + return None + return Build(self, + build_number=int(build_dictionary['number']), + revision=int(build_dictionary['sourceStamp']['revision']), + is_green=(build_dictionary['results'] == 0) # Undocumented, 0 seems to mean "pass" + ) + + def build(self, build_number): + if not build_number: + return None + cached_build = self._builds_cache.get(build_number) + if cached_build: + return cached_build + + build = self._fetch_build(build_number) + self._builds_cache[build_number] = build + return build + + def force_build(self, username="webkit-patch", comments=None): + def predicate(form): + try: + return form.find_control("username") + except Exception, e: + return False + self._browser.open(self.url()) + self._browser.select_form(predicate=predicate) + self._browser["username"] = username + if comments: + self._browser["comments"] = comments + return self._browser.submit() + + file_name_regexp = re.compile(r"r(?P<revision>\d+) \((?P<build_number>\d+)\)") + def _revision_and_build_for_filename(self, filename): + # Example: "r47483 (1)/" or "r47483 (1).zip" + match = self.file_name_regexp.match(filename) + return (int(match.group("revision")), int(match.group("build_number"))) + + def _fetch_revision_to_build_map(self): + # All _fetch requests go through _buildbot for easier mocking + # FIXME: This should use NetworkTransaction's 404 handling instead. + try: + # FIXME: This method is horribly slow due to the huge network load. + # FIXME: This is a poor way to do revision -> build mapping. + # Better would be to ask buildbot through some sort of API. + print "Loading revision/build list from %s." % self.results_url() + print "This may take a while..." + result_files = self._buildbot._fetch_twisted_directory_listing(self.results_url()) + except urllib2.HTTPError, error: + if error.code != 404: + raise + result_files = [] + + # This assumes there was only one build per revision, which is false but we don't care for now. + return dict([self._revision_and_build_for_filename(file_info["filename"]) for file_info in result_files]) + + def _revision_to_build_map(self): + if not self._revision_to_build_number: + self._revision_to_build_number = self._fetch_revision_to_build_map() + return self._revision_to_build_number + + def revision_build_pairs_with_results(self): + return self._revision_to_build_map().items() + + # This assumes there can be only one build per revision, which is false, but we don't care for now. + def build_for_revision(self, revision, allow_failed_lookups=False): + # NOTE: This lookup will fail if that exact revision was never built. + build_number = self._revision_to_build_map().get(int(revision)) + if not build_number: + return None + build = self.build(build_number) + if not build and allow_failed_lookups: + # Builds for old revisions with fail to lookup via buildbot's json api. + build = Build(self, + build_number=build_number, + revision=revision, + is_green=False, + ) + return build + + def find_regression_window(self, red_build, look_back_limit=30): + if not red_build or red_build.is_green(): + return RegressionWindow(None, None) + common_failures = None + current_build = red_build + build_after_current_build = None + look_back_count = 0 + while current_build: + if current_build.is_green(): + # current_build can't possibly have any failures in common + # with red_build because it's green. + break + results = current_build.layout_test_results() + # We treat a lack of results as if all the test failed. + # This occurs, for example, when we can't compile at all. + if results: + failures = set(results.failing_tests()) + if common_failures == None: + common_failures = failures + else: + common_failures = common_failures.intersection(failures) + if not common_failures: + # current_build doesn't have any failures in common with + # the red build we're worried about. We assume that any + # failures in current_build were due to flakiness. + break + look_back_count += 1 + if look_back_count > look_back_limit: + return RegressionWindow(None, current_build, failing_tests=common_failures) + build_after_current_build = current_build + current_build = current_build.previous_build() + # We must iterate at least once because red_build is red. + assert(build_after_current_build) + # Current build must either be green or have no failures in common + # with red build, so we've found our failure transition. + return RegressionWindow(current_build, build_after_current_build, failing_tests=common_failures) + + def find_blameworthy_regression_window(self, red_build_number, look_back_limit=30, avoid_flakey_tests=True): + red_build = self.build(red_build_number) + regression_window = self.find_regression_window(red_build, look_back_limit) + if not regression_window.build_before_failure(): + return None # We ran off the limit of our search + # If avoid_flakey_tests, require at least 2 bad builds before we + # suspect a real failure transition. + if avoid_flakey_tests and regression_window.failing_build() == red_build: + return None + return regression_window + + +class Build(object): + def __init__(self, builder, build_number, revision, is_green): + self._builder = builder + self._number = build_number + self._revision = revision + self._is_green = is_green + self._layout_test_results = None + + @staticmethod + def build_url(builder, build_number): + return "%s/builds/%s" % (builder.url(), build_number) + + def url(self): + return self.build_url(self.builder(), self._number) + + def results_url(self): + results_directory = "r%s (%s)" % (self.revision(), self._number) + return "%s/%s" % (self._builder.results_url(), urllib.quote(results_directory)) + + def _fetch_results_html(self): + results_html = "%s/results.html" % (self.results_url()) + # FIXME: This should use NetworkTransaction's 404 handling instead. + try: + # It seems this can return None if the url redirects and then returns 404. + return urllib2.urlopen(results_html) + except urllib2.HTTPError, error: + if error.code != 404: + raise + + def layout_test_results(self): + if not self._layout_test_results: + # FIXME: This should cache that the result was a 404 and stop hitting the network. + self._layout_test_results = LayoutTestResults.results_from_string(self._fetch_results_html()) + return self._layout_test_results + + def builder(self): + return self._builder + + def revision(self): + return self._revision + + def is_green(self): + return self._is_green + + def previous_build(self): + # previous_build() allows callers to avoid assuming build numbers are sequential. + # They may not be sequential across all master changes, or when non-trunk builds are made. + return self._builder.build(self._number - 1) + + +class BuildBot(object): + # FIXME: This should move into some sort of webkit_config.py + default_host = "build.webkit.org" + + def __init__(self, host=default_host): + self.buildbot_host = host + self._builder_by_name = {} + + # If any core builder is red we should not be landing patches. Other + # builders should be added to this list once they are known to be + # reliable. + # See https://bugs.webkit.org/show_bug.cgi?id=33296 and related bugs. + self.core_builder_names_regexps = [ + "SnowLeopard.*Build", + "SnowLeopard.*\(Test", # Exclude WebKit2 for now. + "Leopard", + "Tiger", + "Windows.*Build", + "GTK.*32", + "GTK.*64.*Debug", # Disallow the 64-bit Release bot which is broken. + "Qt", + "Chromium.*Release$", + ] + + def _parse_last_build_cell(self, builder, cell): + status_link = cell.find('a') + if status_link: + # Will be either a revision number or a build number + revision_string = status_link.string + # If revision_string has non-digits assume it's not a revision number. + builder['built_revision'] = int(revision_string) \ + if not re.match('\D', revision_string) \ + else None + + # FIXME: We treat slave lost as green even though it is not to + # work around the Qts bot being on a broken internet connection. + # The real fix is https://bugs.webkit.org/show_bug.cgi?id=37099 + builder['is_green'] = not re.search('fail', cell.renderContents()) or \ + not not re.search('lost', cell.renderContents()) + + status_link_regexp = r"builders/(?P<builder_name>.*)/builds/(?P<build_number>\d+)" + link_match = re.match(status_link_regexp, status_link['href']) + builder['build_number'] = int(link_match.group("build_number")) + else: + # We failed to find a link in the first cell, just give up. This + # can happen if a builder is just-added, the first cell will just + # be "no build" + # Other parts of the code depend on is_green being present. + builder['is_green'] = False + builder['built_revision'] = None + builder['build_number'] = None + + def _parse_current_build_cell(self, builder, cell): + activity_lines = cell.renderContents().split("<br />") + builder["activity"] = activity_lines[0] # normally "building" or "idle" + # The middle lines document how long left for any current builds. + match = re.match("(?P<pending_builds>\d) pending", activity_lines[-1]) + builder["pending_builds"] = int(match.group("pending_builds")) if match else 0 + + def _parse_builder_status_from_row(self, status_row): + status_cells = status_row.findAll('td') + builder = {} + + # First cell is the name + name_link = status_cells[0].find('a') + builder["name"] = unicode(name_link.string) + + self._parse_last_build_cell(builder, status_cells[1]) + self._parse_current_build_cell(builder, status_cells[2]) + return builder + + def _matches_regexps(self, builder_name, name_regexps): + for name_regexp in name_regexps: + if re.match(name_regexp, builder_name): + return True + return False + + # FIXME: Should move onto Builder + def _is_core_builder(self, builder_name): + return self._matches_regexps(builder_name, self.core_builder_names_regexps) + + # FIXME: This method needs to die, but is used by a unit test at the moment. + def _builder_statuses_with_names_matching_regexps(self, builder_statuses, name_regexps): + return [builder for builder in builder_statuses if self._matches_regexps(builder["name"], name_regexps)] + + def red_core_builders(self): + return [builder for builder in self.core_builder_statuses() if not builder["is_green"]] + + def red_core_builders_names(self): + return [builder["name"] for builder in self.red_core_builders()] + + def idle_red_core_builders(self): + return [builder for builder in self.red_core_builders() if builder["activity"] == "idle"] + + def core_builders_are_green(self): + return not self.red_core_builders() + + # FIXME: These _fetch methods should move to a networking class. + def _fetch_build_dictionary(self, builder, build_number): + try: + base = "http://%s" % self.buildbot_host + path = urllib.quote("json/builders/%s/builds/%s" % (builder.name(), + build_number)) + url = "%s/%s" % (base, path) + jsondata = urllib2.urlopen(url) + return json.load(jsondata) + except urllib2.URLError, err: + build_url = Build.build_url(builder, build_number) + _log.error("Error fetching data for %s build %s (%s): %s" % (builder.name(), build_number, build_url, err)) + return None + except ValueError, err: + build_url = Build.build_url(builder, build_number) + _log.error("Error decoding json data from %s: %s" % (build_url, err)) + return None + + def _fetch_one_box_per_builder(self): + build_status_url = "http://%s/one_box_per_builder" % self.buildbot_host + return urllib2.urlopen(build_status_url) + + def _file_cell_text(self, file_cell): + """Traverses down through firstChild elements until one containing a string is found, then returns that string""" + element = file_cell + while element.string is None and element.contents: + element = element.contents[0] + return element.string + + def _parse_twisted_file_row(self, file_row): + string_or_empty = lambda string: unicode(string) if string else u"" + file_cells = file_row.findAll('td') + return { + "filename": string_or_empty(self._file_cell_text(file_cells[0])), + "size": string_or_empty(self._file_cell_text(file_cells[1])), + "type": string_or_empty(self._file_cell_text(file_cells[2])), + "encoding": string_or_empty(self._file_cell_text(file_cells[3])), + } + + def _parse_twisted_directory_listing(self, page): + soup = BeautifulSoup(page) + # HACK: Match only table rows with a class to ignore twisted header/footer rows. + file_rows = soup.find('table').findAll('tr', {'class': re.compile(r'\b(?:directory|file)\b')}) + return [self._parse_twisted_file_row(file_row) for file_row in file_rows] + + # FIXME: There should be a better way to get this information directly from twisted. + def _fetch_twisted_directory_listing(self, url): + return self._parse_twisted_directory_listing(urllib2.urlopen(url)) + + def builders(self): + return [self.builder_with_name(status["name"]) for status in self.builder_statuses()] + + # This method pulls from /one_box_per_builder as an efficient way to get information about + def builder_statuses(self): + soup = BeautifulSoup(self._fetch_one_box_per_builder()) + return [self._parse_builder_status_from_row(status_row) for status_row in soup.find('table').findAll('tr')] + + def core_builder_statuses(self): + return [builder for builder in self.builder_statuses() if self._is_core_builder(builder["name"])] + + def builder_with_name(self, name): + builder = self._builder_by_name.get(name) + if not builder: + builder = Builder(name, self) + self._builder_by_name[name] = builder + return builder + + def failure_map(self, only_core_builders=True): + builder_statuses = self.core_builder_statuses() if only_core_builders else self.builder_statuses() + failure_map = FailureMap() + revision_to_failing_bots = {} + for builder_status in builder_statuses: + if builder_status["is_green"]: + continue + builder = self.builder_with_name(builder_status["name"]) + regression_window = builder.find_blameworthy_regression_window(builder_status["build_number"]) + if regression_window: + failure_map.add_regression_window(builder, regression_window) + return failure_map + + # This makes fewer requests than calling Builder.latest_build would. It grabs all builder + # statuses in one request using self.builder_statuses (fetching /one_box_per_builder instead of builder pages). + def _latest_builds_from_builders(self, only_core_builders=True): + builder_statuses = self.core_builder_statuses() if only_core_builders else self.builder_statuses() + return [self.builder_with_name(status["name"]).build(status["build_number"]) for status in builder_statuses] + + def _build_at_or_before_revision(self, build, revision): + while build: + if build.revision() <= revision: + return build + build = build.previous_build() + + def last_green_revision(self, only_core_builders=True): + builds = self._latest_builds_from_builders(only_core_builders) + target_revision = builds[0].revision() + # An alternate way to do this would be to start at one revision and walk backwards + # checking builder.build_for_revision, however build_for_revision is very slow on first load. + while True: + # Make builds agree on revision + builds = [self._build_at_or_before_revision(build, target_revision) for build in builds] + if None in builds: # One of the builds failed to load from the server. + return None + min_revision = min(map(lambda build: build.revision(), builds)) + if min_revision != target_revision: + target_revision = min_revision + continue # Builds don't all agree on revision, keep searching + # Check to make sure they're all green + all_are_green = reduce(operator.and_, map(lambda build: build.is_green(), builds)) + if not all_are_green: + target_revision -= 1 + continue + return min_revision diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py new file mode 100644 index 0000000..57290d1 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py @@ -0,0 +1,418 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.layouttestresults import LayoutTestResults +from webkitpy.common.net.buildbot import BuildBot, Builder, Build +from webkitpy.layout_tests.layout_package import test_results +from webkitpy.layout_tests.layout_package import test_failures +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + + +class BuilderTest(unittest.TestCase): + def _mock_test_result(self, testname): + return test_results.TestResult(testname, [test_failures.FailureTextMismatch()]) + + def _install_fetch_build(self, failure): + def _mock_fetch_build(build_number): + build = Build( + builder=self.builder, + build_number=build_number, + revision=build_number + 1000, + is_green=build_number < 4 + ) + results = [self._mock_test_result(testname) for testname in failure(build_number)] + build._layout_test_results = LayoutTestResults(results) + return build + self.builder._fetch_build = _mock_fetch_build + + def setUp(self): + self.buildbot = BuildBot() + self.builder = Builder(u"Test Builder \u2661", self.buildbot) + self._install_fetch_build(lambda build_number: ["test1", "test2"]) + + def test_find_regression_window(self): + regression_window = self.builder.find_regression_window(self.builder.build(10)) + self.assertEqual(regression_window.build_before_failure().revision(), 1003) + self.assertEqual(regression_window.failing_build().revision(), 1004) + + regression_window = self.builder.find_regression_window(self.builder.build(10), look_back_limit=2) + self.assertEqual(regression_window.build_before_failure(), None) + self.assertEqual(regression_window.failing_build().revision(), 1008) + + def test_none_build(self): + self.builder._fetch_build = lambda build_number: None + regression_window = self.builder.find_regression_window(self.builder.build(10)) + self.assertEqual(regression_window.build_before_failure(), None) + self.assertEqual(regression_window.failing_build(), None) + + def test_flaky_tests(self): + self._install_fetch_build(lambda build_number: ["test1"] if build_number % 2 else ["test2"]) + regression_window = self.builder.find_regression_window(self.builder.build(10)) + self.assertEqual(regression_window.build_before_failure().revision(), 1009) + self.assertEqual(regression_window.failing_build().revision(), 1010) + + def test_failure_and_flaky(self): + self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"]) + regression_window = self.builder.find_regression_window(self.builder.build(10)) + self.assertEqual(regression_window.build_before_failure().revision(), 1003) + self.assertEqual(regression_window.failing_build().revision(), 1004) + + def test_no_results(self): + self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"]) + regression_window = self.builder.find_regression_window(self.builder.build(10)) + self.assertEqual(regression_window.build_before_failure().revision(), 1003) + self.assertEqual(regression_window.failing_build().revision(), 1004) + + def test_failure_after_flaky(self): + self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number > 6 else ["test3"]) + regression_window = self.builder.find_regression_window(self.builder.build(10)) + self.assertEqual(regression_window.build_before_failure().revision(), 1006) + self.assertEqual(regression_window.failing_build().revision(), 1007) + + def test_find_blameworthy_regression_window(self): + self.assertEqual(self.builder.find_blameworthy_regression_window(10).revisions(), [1004]) + self.assertEqual(self.builder.find_blameworthy_regression_window(10, look_back_limit=2), None) + # Flakey test avoidance requires at least 2 red builds: + self.assertEqual(self.builder.find_blameworthy_regression_window(4), None) + self.assertEqual(self.builder.find_blameworthy_regression_window(4, avoid_flakey_tests=False).revisions(), [1004]) + # Green builder: + self.assertEqual(self.builder.find_blameworthy_regression_window(3), None) + + def test_build_caching(self): + self.assertEqual(self.builder.build(10), self.builder.build(10)) + + def test_build_and_revision_for_filename(self): + expectations = { + "r47483 (1)/" : (47483, 1), + "r47483 (1).zip" : (47483, 1), + } + for filename, revision_and_build in expectations.items(): + self.assertEqual(self.builder._revision_and_build_for_filename(filename), revision_and_build) + + +class BuildTest(unittest.TestCase): + def test_layout_test_results(self): + build = Build(None, None, None, None) + build._fetch_results_html = lambda: None + # Test that layout_test_results() returns None if the fetch fails. + self.assertEqual(build.layout_test_results(), None) + + +class BuildBotTest(unittest.TestCase): + + _example_one_box_status = ''' + <table> + <tr> + <td class="box"><a href="builders/Windows%20Debug%20%28Tests%29">Windows Debug (Tests)</a></td> + <td align="center" class="LastBuild box success"><a href="builders/Windows%20Debug%20%28Tests%29/builds/3693">47380</a><br />build<br />successful</td> + <td align="center" class="Activity building">building<br />ETA in<br />~ 14 mins<br />at 13:40</td> + <tr> + <td class="box"><a href="builders/SnowLeopard%20Intel%20Release">SnowLeopard Intel Release</a></td> + <td class="LastBuild box" >no build</td> + <td align="center" class="Activity building">building<br />< 1 min</td> + <tr> + <td class="box"><a href="builders/Qt%20Linux%20Release">Qt Linux Release</a></td> + <td align="center" class="LastBuild box failure"><a href="builders/Qt%20Linux%20Release/builds/654">47383</a><br />failed<br />compile-webkit</td> + <td align="center" class="Activity idle">idle<br />3 pending</td> + <tr> + <td class="box"><a href="builders/Qt%20Windows%2032-bit%20Debug">Qt Windows 32-bit Debug</a></td> + <td align="center" class="LastBuild box failure"><a href="builders/Qt%20Windows%2032-bit%20Debug/builds/2090">60563</a><br />failed<br />failed<br />slave<br />lost</td> + <td align="center" class="Activity building">building<br />ETA in<br />~ 5 mins<br />at 08:25</td> + </table> +''' + _expected_example_one_box_parsings = [ + { + 'is_green': True, + 'build_number' : 3693, + 'name': u'Windows Debug (Tests)', + 'built_revision': 47380, + 'activity': 'building', + 'pending_builds': 0, + }, + { + 'is_green': False, + 'build_number' : None, + 'name': u'SnowLeopard Intel Release', + 'built_revision': None, + 'activity': 'building', + 'pending_builds': 0, + }, + { + 'is_green': False, + 'build_number' : 654, + 'name': u'Qt Linux Release', + 'built_revision': 47383, + 'activity': 'idle', + 'pending_builds': 3, + }, + { + 'is_green': True, + 'build_number' : 2090, + 'name': u'Qt Windows 32-bit Debug', + 'built_revision': 60563, + 'activity': 'building', + 'pending_builds': 0, + }, + ] + + def test_status_parsing(self): + buildbot = BuildBot() + + soup = BeautifulSoup(self._example_one_box_status) + status_table = soup.find("table") + input_rows = status_table.findAll('tr') + + for x in range(len(input_rows)): + status_row = input_rows[x] + expected_parsing = self._expected_example_one_box_parsings[x] + + builder = buildbot._parse_builder_status_from_row(status_row) + + # Make sure we aren't parsing more or less than we expect + self.assertEquals(builder.keys(), expected_parsing.keys()) + + for key, expected_value in expected_parsing.items(): + self.assertEquals(builder[key], expected_value, ("Builder %d parse failure for key: %s: Actual='%s' Expected='%s'" % (x, key, builder[key], expected_value))) + + def test_core_builder_methods(self): + buildbot = BuildBot() + + # Override builder_statuses function to not touch the network. + def example_builder_statuses(): # We could use instancemethod() to bind 'self' but we don't need to. + return BuildBotTest._expected_example_one_box_parsings + buildbot.builder_statuses = example_builder_statuses + + buildbot.core_builder_names_regexps = [ 'Leopard', "Windows.*Build" ] + self.assertEquals(buildbot.red_core_builders_names(), []) + self.assertTrue(buildbot.core_builders_are_green()) + + buildbot.core_builder_names_regexps = [ 'SnowLeopard', 'Qt' ] + self.assertEquals(buildbot.red_core_builders_names(), [ u'SnowLeopard Intel Release', u'Qt Linux Release' ]) + self.assertFalse(buildbot.core_builders_are_green()) + + def test_builder_name_regexps(self): + buildbot = BuildBot() + + # For complete testing, this list should match the list of builders at build.webkit.org: + example_builders = [ + {'name': u'Tiger Intel Release', }, + {'name': u'Leopard Intel Release (Build)', }, + {'name': u'Leopard Intel Release (Tests)', }, + {'name': u'Leopard Intel Debug (Build)', }, + {'name': u'Leopard Intel Debug (Tests)', }, + {'name': u'SnowLeopard Intel Release (Build)', }, + {'name': u'SnowLeopard Intel Release (Tests)', }, + {'name': u'SnowLeopard Intel Release (WebKit2 Tests)', }, + {'name': u'SnowLeopard Intel Leaks', }, + {'name': u'Windows Release (Build)', }, + {'name': u'Windows Release (Tests)', }, + {'name': u'Windows Debug (Build)', }, + {'name': u'Windows Debug (Tests)', }, + {'name': u'GTK Linux 32-bit Release', }, + {'name': u'GTK Linux 32-bit Debug', }, + {'name': u'GTK Linux 64-bit Debug', }, + {'name': u'GTK Linux 64-bit Release', }, + {'name': u'Qt Linux Release', }, + {'name': u'Qt Linux Release minimal', }, + {'name': u'Qt Linux ARMv5 Release', }, + {'name': u'Qt Linux ARMv7 Release', }, + {'name': u'Qt Windows 32-bit Release', }, + {'name': u'Qt Windows 32-bit Debug', }, + {'name': u'Chromium Linux Release', }, + {'name': u'Chromium Mac Release', }, + {'name': u'Chromium Win Release', }, + {'name': u'Chromium Linux Release (Tests)', }, + {'name': u'Chromium Mac Release (Tests)', }, + {'name': u'Chromium Win Release (Tests)', }, + {'name': u'New run-webkit-tests', }, + ] + name_regexps = [ + "SnowLeopard.*Build", + "SnowLeopard.*\(Test", + "Leopard", + "Tiger", + "Windows.*Build", + "GTK.*32", + "GTK.*64.*Debug", # Disallow the 64-bit Release bot which is broken. + "Qt", + "Chromium.*Release$", + ] + expected_builders = [ + {'name': u'Tiger Intel Release', }, + {'name': u'Leopard Intel Release (Build)', }, + {'name': u'Leopard Intel Release (Tests)', }, + {'name': u'Leopard Intel Debug (Build)', }, + {'name': u'Leopard Intel Debug (Tests)', }, + {'name': u'SnowLeopard Intel Release (Build)', }, + {'name': u'SnowLeopard Intel Release (Tests)', }, + {'name': u'Windows Release (Build)', }, + {'name': u'Windows Debug (Build)', }, + {'name': u'GTK Linux 32-bit Release', }, + {'name': u'GTK Linux 32-bit Debug', }, + {'name': u'GTK Linux 64-bit Debug', }, + {'name': u'Qt Linux Release', }, + {'name': u'Qt Linux Release minimal', }, + {'name': u'Qt Linux ARMv5 Release', }, + {'name': u'Qt Linux ARMv7 Release', }, + {'name': u'Qt Windows 32-bit Release', }, + {'name': u'Qt Windows 32-bit Debug', }, + {'name': u'Chromium Linux Release', }, + {'name': u'Chromium Mac Release', }, + {'name': u'Chromium Win Release', }, + ] + + # This test should probably be updated if the default regexp list changes + self.assertEquals(buildbot.core_builder_names_regexps, name_regexps) + + builders = buildbot._builder_statuses_with_names_matching_regexps(example_builders, name_regexps) + self.assertEquals(builders, expected_builders) + + def test_builder_with_name(self): + buildbot = BuildBot() + + builder = buildbot.builder_with_name("Test Builder") + self.assertEqual(builder.name(), "Test Builder") + self.assertEqual(builder.url(), "http://build.webkit.org/builders/Test%20Builder") + self.assertEqual(builder.url_encoded_name(), "Test%20Builder") + self.assertEqual(builder.results_url(), "http://build.webkit.org/results/Test%20Builder") + + # Override _fetch_build_dictionary function to not touch the network. + def mock_fetch_build_dictionary(self, build_number): + build_dictionary = { + "sourceStamp": { + "revision" : 2 * build_number, + }, + "number" : int(build_number), + "results" : build_number % 2, # 0 means pass + } + return build_dictionary + buildbot._fetch_build_dictionary = mock_fetch_build_dictionary + + build = builder.build(10) + self.assertEqual(build.builder(), builder) + self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/10") + self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r20%20%2810%29") + self.assertEqual(build.revision(), 20) + self.assertEqual(build.is_green(), True) + + build = build.previous_build() + self.assertEqual(build.builder(), builder) + self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/9") + self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r18%20%289%29") + self.assertEqual(build.revision(), 18) + self.assertEqual(build.is_green(), False) + + self.assertEqual(builder.build(None), None) + + _example_directory_listing = ''' +<h1>Directory listing for /results/SnowLeopard Intel Leaks/</h1> + +<table> + <tr class="alt"> + <th>Filename</th> + <th>Size</th> + <th>Content type</th> + <th>Content encoding</th> + </tr> +<tr class="directory "> + <td><a href="r47483%20%281%29/"><b>r47483 (1)/</b></a></td> + <td><b></b></td> + <td><b>[Directory]</b></td> + <td><b></b></td> +</tr> +<tr class="file alt"> + <td><a href="r47484%20%282%29.zip">r47484 (2).zip</a></td> + <td>89K</td> + <td>[application/zip]</td> + <td></td> +</tr> +''' + _expected_files = [ + { + "filename" : "r47483 (1)/", + "size" : "", + "type" : "[Directory]", + "encoding" : "", + }, + { + "filename" : "r47484 (2).zip", + "size" : "89K", + "type" : "[application/zip]", + "encoding" : "", + }, + ] + + def test_parse_build_to_revision_map(self): + buildbot = BuildBot() + files = buildbot._parse_twisted_directory_listing(self._example_directory_listing) + self.assertEqual(self._expected_files, files) + + # Revision, is_green + # Ordered from newest (highest number) to oldest. + fake_builder1 = [ + [2, False], + [1, True], + ] + fake_builder2 = [ + [2, False], + [1, True], + ] + fake_builders = [ + fake_builder1, + fake_builder2, + ] + def _build_from_fake(self, fake_builder, index): + if index >= len(fake_builder): + return None + fake_build = fake_builder[index] + build = Build( + builder=fake_builder, + build_number=index, + revision=fake_build[0], + is_green=fake_build[1], + ) + def mock_previous_build(): + return self._build_from_fake(fake_builder, index + 1) + build.previous_build = mock_previous_build + return build + + def _fake_builds_at_index(self, index): + return [self._build_from_fake(builder, index) for builder in self.fake_builders] + + def test_last_green_revision(self): + buildbot = BuildBot() + def mock_builds_from_builders(only_core_builders): + return self._fake_builds_at_index(0) + buildbot._latest_builds_from_builders = mock_builds_from_builders + self.assertEqual(buildbot.last_green_revision(), 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/Tools/Scripts/webkitpy/common/net/credentials.py b/Tools/Scripts/webkitpy/common/net/credentials.py new file mode 100644 index 0000000..30480b3 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/credentials.py @@ -0,0 +1,155 @@ +# Copyright (c) 2009 Google Inc. All rights reserved. +# Copyright (c) 2009 Apple Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Python module for reading stored web credentials from the OS. + +import getpass +import os +import platform +import re + +from webkitpy.common.checkout.scm import Git +from webkitpy.common.system.executive import Executive, ScriptError +from webkitpy.common.system.user import User +from webkitpy.common.system.deprecated_logging import log + +try: + # Use keyring, a cross platform keyring interface, as a fallback: + # http://pypi.python.org/pypi/keyring + import keyring +except ImportError: + keyring = None + + +class Credentials(object): + _environ_prefix = "webkit_bugzilla_" + + def __init__(self, host, git_prefix=None, executive=None, cwd=os.getcwd(), + keyring=keyring): + self.host = host + self.git_prefix = "%s." % git_prefix if git_prefix else "" + self.executive = executive or Executive() + self.cwd = cwd + self._keyring = keyring + + def _credentials_from_git(self): + try: + if not Git.in_working_directory(self.cwd): + return (None, None) + return (Git.read_git_config(self.git_prefix + "username"), + Git.read_git_config(self.git_prefix + "password")) + except OSError, e: + # Catch and ignore OSError exceptions such as "no such file + # or directory" (OSError errno 2), which imply that the Git + # command cannot be found/is not installed. + pass + return (None, None) + + def _keychain_value_with_label(self, label, source_text): + match = re.search("%s\"(?P<value>.+)\"" % label, + source_text, + re.MULTILINE) + if match: + return match.group('value') + + def _is_mac_os_x(self): + return platform.mac_ver()[0] + + def _parse_security_tool_output(self, security_output): + username = self._keychain_value_with_label("^\s*\"acct\"<blob>=", + security_output) + password = self._keychain_value_with_label("^password: ", + security_output) + return [username, password] + + def _run_security_tool(self, username=None): + security_command = [ + "/usr/bin/security", + "find-internet-password", + "-g", + "-s", + self.host, + ] + if username: + security_command += ["-a", username] + + log("Reading Keychain for %s account and password. " + "Click \"Allow\" to continue..." % self.host) + try: + return self.executive.run_command(security_command) + except ScriptError: + # Failed to either find a keychain entry or somekind of OS-related + # error occured (for instance, couldn't find the /usr/sbin/security + # command). + log("Could not find a keychain entry for %s." % self.host) + return None + + def _credentials_from_keychain(self, username=None): + if not self._is_mac_os_x(): + return [username, None] + + security_output = self._run_security_tool(username) + if security_output: + return self._parse_security_tool_output(security_output) + else: + return [None, None] + + def _read_environ(self, key): + environ_key = self._environ_prefix + key + return os.environ.get(environ_key.upper()) + + def _credentials_from_environment(self): + return (self._read_environ("username"), self._read_environ("password")) + + def _offer_to_store_credentials_in_keyring(self, username, password): + if not self._keyring: + return + if not User().confirm("Store password in system keyring?", User.DEFAULT_NO): + return + self._keyring.set_password(self.host, username, password) + + def read_credentials(self): + username, password = self._credentials_from_environment() + # FIXME: We don't currently support pulling the username from one + # source and the password from a separate source. + if not username or not password: + username, password = self._credentials_from_git() + if not username or not password: + username, password = self._credentials_from_keychain(username) + + if username and not password and self._keyring: + password = self._keyring.get_password(self.host, username) + + if not username: + username = User.prompt("%s login: " % self.host) + if not password: + password = getpass.getpass("%s password for %s: " % (self.host, username)) + self._offer_to_store_credentials_in_keyring(username, password) + + return (username, password) diff --git a/Tools/Scripts/webkitpy/common/net/credentials_unittest.py b/Tools/Scripts/webkitpy/common/net/credentials_unittest.py new file mode 100644 index 0000000..6f2d909 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/credentials_unittest.py @@ -0,0 +1,176 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from __future__ import with_statement + +import os +import tempfile +import unittest +from webkitpy.common.net.credentials import Credentials +from webkitpy.common.system.executive import Executive +from webkitpy.common.system.outputcapture import OutputCapture +from webkitpy.thirdparty.mock import Mock + + +# FIXME: Other unit tests probably want this class. +class _TemporaryDirectory(object): + def __init__(self, **kwargs): + self._kwargs = kwargs + self._directory_path = None + + def __enter__(self): + self._directory_path = tempfile.mkdtemp(**self._kwargs) + return self._directory_path + + def __exit__(self, type, value, traceback): + os.rmdir(self._directory_path) + + +class CredentialsTest(unittest.TestCase): + example_security_output = """keychain: "/Users/test/Library/Keychains/login.keychain" +class: "inet" +attributes: + 0x00000007 <blob>="bugs.webkit.org (test@webkit.org)" + 0x00000008 <blob>=<NULL> + "acct"<blob>="test@webkit.org" + "atyp"<blob>="form" + "cdat"<timedate>=0x32303039303832353233353231365A00 "20090825235216Z\000" + "crtr"<uint32>=<NULL> + "cusi"<sint32>=<NULL> + "desc"<blob>="Web form password" + "icmt"<blob>="default" + "invi"<sint32>=<NULL> + "mdat"<timedate>=0x32303039303930393137323635315A00 "20090909172651Z\000" + "nega"<sint32>=<NULL> + "path"<blob>=<NULL> + "port"<uint32>=0x00000000 + "prot"<blob>=<NULL> + "ptcl"<uint32>="htps" + "scrp"<sint32>=<NULL> + "sdmn"<blob>=<NULL> + "srvr"<blob>="bugs.webkit.org" + "type"<uint32>=<NULL> +password: "SECRETSAUCE" +""" + + def test_keychain_lookup_on_non_mac(self): + class FakeCredentials(Credentials): + def _is_mac_os_x(self): + return False + credentials = FakeCredentials("bugs.webkit.org") + self.assertEqual(credentials._is_mac_os_x(), False) + self.assertEqual(credentials._credentials_from_keychain("foo"), ["foo", None]) + + def test_security_output_parse(self): + credentials = Credentials("bugs.webkit.org") + self.assertEqual(credentials._parse_security_tool_output(self.example_security_output), ["test@webkit.org", "SECRETSAUCE"]) + + def test_security_output_parse_entry_not_found(self): + credentials = Credentials("foo.example.com") + if not credentials._is_mac_os_x(): + return # This test does not run on a non-Mac. + + # Note, we ignore the captured output because it is already covered + # by the test case CredentialsTest._assert_security_call (below). + outputCapture = OutputCapture() + outputCapture.capture_output() + self.assertEqual(credentials._run_security_tool(), None) + outputCapture.restore_output() + + def _assert_security_call(self, username=None): + executive_mock = Mock() + credentials = Credentials("example.com", executive=executive_mock) + + expected_stderr = "Reading Keychain for example.com account and password. Click \"Allow\" to continue...\n" + OutputCapture().assert_outputs(self, credentials._run_security_tool, [username], expected_stderr=expected_stderr) + + security_args = ["/usr/bin/security", "find-internet-password", "-g", "-s", "example.com"] + if username: + security_args += ["-a", username] + executive_mock.run_command.assert_called_with(security_args) + + def test_security_calls(self): + self._assert_security_call() + self._assert_security_call(username="foo") + + def test_credentials_from_environment(self): + executive_mock = Mock() + credentials = Credentials("example.com", executive=executive_mock) + + saved_environ = os.environ.copy() + os.environ['WEBKIT_BUGZILLA_USERNAME'] = "foo" + os.environ['WEBKIT_BUGZILLA_PASSWORD'] = "bar" + username, password = credentials._credentials_from_environment() + self.assertEquals(username, "foo") + self.assertEquals(password, "bar") + os.environ = saved_environ + + def test_read_credentials_without_git_repo(self): + # FIXME: This should share more code with test_keyring_without_git_repo + class FakeCredentials(Credentials): + def _is_mac_os_x(self): + return True + + def _credentials_from_keychain(self, username): + return ("test@webkit.org", "SECRETSAUCE") + + def _credentials_from_environment(self): + return (None, None) + + with _TemporaryDirectory(suffix="not_a_git_repo") as temp_dir_path: + credentials = FakeCredentials("bugs.webkit.org", cwd=temp_dir_path) + # FIXME: Using read_credentials here seems too broad as higher-priority + # credential source could be affected by the user's environment. + self.assertEqual(credentials.read_credentials(), ("test@webkit.org", "SECRETSAUCE")) + + + def test_keyring_without_git_repo(self): + # FIXME: This should share more code with test_read_credentials_without_git_repo + class MockKeyring(object): + def get_password(self, host, username): + return "NOMNOMNOM" + + class FakeCredentials(Credentials): + def _is_mac_os_x(self): + return True + + def _credentials_from_keychain(self, username): + return ("test@webkit.org", None) + + def _credentials_from_environment(self): + return (None, None) + + with _TemporaryDirectory(suffix="not_a_git_repo") as temp_dir_path: + credentials = FakeCredentials("fake.hostname", cwd=temp_dir_path, keyring=MockKeyring()) + # FIXME: Using read_credentials here seems too broad as higher-priority + # credential source could be affected by the user's environment. + self.assertEqual(credentials.read_credentials(), ("test@webkit.org", "NOMNOMNOM")) + + +if __name__ == '__main__': + unittest.main() diff --git a/Tools/Scripts/webkitpy/common/net/failuremap.py b/Tools/Scripts/webkitpy/common/net/failuremap.py new file mode 100644 index 0000000..48cd3e6 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/failuremap.py @@ -0,0 +1,85 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +# FIXME: This probably belongs in the buildbot module. +class FailureMap(object): + def __init__(self): + self._failures = [] + + def add_regression_window(self, builder, regression_window): + self._failures.append({ + 'builder': builder, + 'regression_window': regression_window, + }) + + def is_empty(self): + return not self._failures + + def failing_revisions(self): + failing_revisions = [failure_info['regression_window'].revisions() + for failure_info in self._failures] + return sorted(set(sum(failing_revisions, []))) + + def builders_failing_for(self, revision): + return self._builders_failing_because_of([revision]) + + def tests_failing_for(self, revision): + tests = [failure_info['regression_window'].failing_tests() + for failure_info in self._failures + if revision in failure_info['regression_window'].revisions() + and failure_info['regression_window'].failing_tests()] + result = set() + for test in tests: + result = result.union(test) + return sorted(result) + + def _old_failures(self, is_old_failure): + return filter(lambda revision: is_old_failure(revision), + self.failing_revisions()) + + def _builders_failing_because_of(self, revisions): + revision_set = set(revisions) + return [failure_info['builder'] for failure_info in self._failures + if revision_set.intersection( + failure_info['regression_window'].revisions())] + + # FIXME: We should re-process old failures after some time delay. + # https://bugs.webkit.org/show_bug.cgi?id=36581 + def filter_out_old_failures(self, is_old_failure): + old_failures = self._old_failures(is_old_failure) + old_failing_builder_names = set([builder.name() + for builder in self._builders_failing_because_of(old_failures)]) + + # We filter out all the failing builders that could have been caused + # by old_failures. We could miss some new failures this way, but + # emperically, this reduces the amount of spam we generate. + failures = self._failures + self._failures = [failure_info for failure_info in failures + if failure_info['builder'].name() not in old_failing_builder_names] + self._cache = {} diff --git a/Tools/Scripts/webkitpy/common/net/failuremap_unittest.py b/Tools/Scripts/webkitpy/common/net/failuremap_unittest.py new file mode 100644 index 0000000..2f0b49d --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/failuremap_unittest.py @@ -0,0 +1,76 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.buildbot import Build +from webkitpy.common.net.failuremap import * +from webkitpy.common.net.regressionwindow import RegressionWindow +from webkitpy.tool.mocktool import MockBuilder + + +class FailureMapTest(unittest.TestCase): + builder1 = MockBuilder("Builder1") + builder2 = MockBuilder("Builder2") + + build1a = Build(builder1, build_number=22, revision=1233, is_green=True) + build1b = Build(builder1, build_number=23, revision=1234, is_green=False) + build2a = Build(builder2, build_number=89, revision=1233, is_green=True) + build2b = Build(builder2, build_number=90, revision=1235, is_green=False) + + regression_window1 = RegressionWindow(build1a, build1b, failing_tests=[u'test1', u'test1']) + regression_window2 = RegressionWindow(build2a, build2b, failing_tests=[u'test1']) + + def _make_failure_map(self): + failure_map = FailureMap() + failure_map.add_regression_window(self.builder1, self.regression_window1) + failure_map.add_regression_window(self.builder2, self.regression_window2) + return failure_map + + def test_failing_revisions(self): + failure_map = self._make_failure_map() + self.assertEquals(failure_map.failing_revisions(), [1234, 1235]) + + def test_new_failures(self): + failure_map = self._make_failure_map() + failure_map.filter_out_old_failures(lambda revision: False) + self.assertEquals(failure_map.failing_revisions(), [1234, 1235]) + + def test_new_failures_with_old_revisions(self): + failure_map = self._make_failure_map() + failure_map.filter_out_old_failures(lambda revision: revision == 1234) + self.assertEquals(failure_map.failing_revisions(), []) + + def test_new_failures_with_more_old_revisions(self): + failure_map = self._make_failure_map() + failure_map.filter_out_old_failures(lambda revision: revision == 1235) + self.assertEquals(failure_map.failing_revisions(), [1234]) + + def test_tests_failing_for(self): + failure_map = self._make_failure_map() + self.assertEquals(failure_map.tests_failing_for(1234), [u'test1']) diff --git a/Tools/Scripts/webkitpy/common/net/irc/__init__.py b/Tools/Scripts/webkitpy/common/net/irc/__init__.py new file mode 100644 index 0000000..ef65bee --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/irc/__init__.py @@ -0,0 +1 @@ +# Required for Python to search this directory for module files diff --git a/Tools/Scripts/webkitpy/common/net/irc/ircbot.py b/Tools/Scripts/webkitpy/common/net/irc/ircbot.py new file mode 100644 index 0000000..f742867 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/irc/ircbot.py @@ -0,0 +1,91 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import webkitpy.common.config.irc as config_irc + +from webkitpy.common.thread.messagepump import MessagePump, MessagePumpDelegate +from webkitpy.thirdparty.autoinstalled.irc import ircbot +from webkitpy.thirdparty.autoinstalled.irc import irclib + + +class IRCBotDelegate(object): + def irc_message_received(self, nick, message): + raise NotImplementedError, "subclasses must implement" + + def irc_nickname(self): + raise NotImplementedError, "subclasses must implement" + + def irc_password(self): + raise NotImplementedError, "subclasses must implement" + + +class IRCBot(ircbot.SingleServerIRCBot, MessagePumpDelegate): + # FIXME: We should get this information from a config file. + def __init__(self, + message_queue, + delegate): + self._message_queue = message_queue + self._delegate = delegate + ircbot.SingleServerIRCBot.__init__( + self, + [( + config_irc.server, + config_irc.port, + self._delegate.irc_password() + )], + self._delegate.irc_nickname(), + self._delegate.irc_nickname()) + self._channel = config_irc.channel + + # ircbot.SingleServerIRCBot methods + + def on_nicknameinuse(self, connection, event): + connection.nick(connection.get_nickname() + "_") + + def on_welcome(self, connection, event): + connection.join(self._channel) + self._message_pump = MessagePump(self, self._message_queue) + + def on_pubmsg(self, connection, event): + nick = irclib.nm_to_n(event.source()) + request = event.arguments()[0].split(":", 1) + if len(request) > 1 and irclib.irc_lower(request[0]) == irclib.irc_lower(self.connection.get_nickname()): + response = self._delegate.irc_message_received(nick, request[1]) + if response: + connection.privmsg(self._channel, response) + + # MessagePumpDelegate methods + + def schedule(self, interval, callback): + self.connection.execute_delayed(interval, callback) + + def message_available(self, message): + self.connection.privmsg(self._channel, message) + + def final_message_delivered(self): + self.die() diff --git a/Tools/Scripts/webkitpy/common/net/irc/ircproxy.py b/Tools/Scripts/webkitpy/common/net/irc/ircproxy.py new file mode 100644 index 0000000..13348b4 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/irc/ircproxy.py @@ -0,0 +1,62 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import threading + +from webkitpy.common.net.irc.ircbot import IRCBot +from webkitpy.common.thread.threadedmessagequeue import ThreadedMessageQueue +from webkitpy.common.system.deprecated_logging import log + + +class _IRCThread(threading.Thread): + def __init__(self, message_queue, irc_delegate, irc_bot): + threading.Thread.__init__(self) + self.setDaemon(True) + self._message_queue = message_queue + self._irc_delegate = irc_delegate + self._irc_bot = irc_bot + + def run(self): + bot = self._irc_bot(self._message_queue, self._irc_delegate) + bot.start() + + +class IRCProxy(object): + def __init__(self, irc_delegate, irc_bot=IRCBot): + log("Connecting to IRC") + self._message_queue = ThreadedMessageQueue() + self._child_thread = _IRCThread(self._message_queue, irc_delegate, irc_bot) + self._child_thread.start() + + def post(self, message): + self._message_queue.post(message) + + def disconnect(self): + log("Disconnecting from IRC...") + self._message_queue.stop() + self._child_thread.join() diff --git a/Tools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py b/Tools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py new file mode 100644 index 0000000..b44ce40 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py @@ -0,0 +1,43 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.irc.ircproxy import IRCProxy +from webkitpy.common.system.outputcapture import OutputCapture +from webkitpy.thirdparty.mock import Mock + +class IRCProxyTest(unittest.TestCase): + def test_trivial(self): + def fun(): + proxy = IRCProxy(Mock(), Mock()) + proxy.post("hello") + proxy.disconnect() + + expected_stderr = "Connecting to IRC\nDisconnecting from IRC...\n" + OutputCapture().assert_outputs(self, fun, expected_stderr=expected_stderr) diff --git a/Tools/Scripts/webkitpy/common/net/layouttestresults.py b/Tools/Scripts/webkitpy/common/net/layouttestresults.py new file mode 100644 index 0000000..28caad4 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/layouttestresults.py @@ -0,0 +1,149 @@ +# Copyright (c) 2010, Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# A module for parsing results.html files generated by old-run-webkit-tests +# This class is one big hack and only needs to exist until we transition to new-run-webkit-tests. + +from webkitpy.common.system.deprecated_logging import log +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup, SoupStrainer +from webkitpy.layout_tests.layout_package import test_results +from webkitpy.layout_tests.layout_package import test_failures + + +# FIXME: This should be unified with all the layout test results code in the layout_tests package +# This doesn't belong in common.net, but we don't have a better place for it yet. +def path_for_layout_test(test_name): + return "LayoutTests/%s" % test_name + + +# FIXME: This should be unified with all the layout test results code in the layout_tests package +# This doesn't belong in common.net, but we don't have a better place for it yet. +class LayoutTestResults(object): + """This class knows how to parse old-run-webkit-tests results.html files.""" + + stderr_key = u'Tests that had stderr output:' + fail_key = u'Tests where results did not match expected results:' + timeout_key = u'Tests that timed out:' + crash_key = u'Tests that caused the DumpRenderTree tool to crash:' + missing_key = u'Tests that had no expected results (probably new):' + + expected_keys = [ + stderr_key, + fail_key, + crash_key, + timeout_key, + missing_key, + ] + + @classmethod + def _failures_from_fail_row(self, row): + # Look at all anchors in this row, and guess what type + # of new-run-webkit-test failures they equate to. + failures = set() + test_name = None + for anchor in row.findAll("a"): + anchor_text = unicode(anchor.string) + if not test_name: + test_name = anchor_text + continue + if anchor_text in ["expected image", "image diffs"] or '%' in anchor_text: + failures.add(test_failures.FailureImageHashMismatch()) + elif anchor_text in ["expected", "actual", "diff", "pretty diff"]: + failures.add(test_failures.FailureTextMismatch()) + else: + log("Unhandled link text in results.html parsing: %s. Please file a bug against webkitpy." % anchor_text) + # FIXME: Its possible the row contained no links due to ORWT brokeness. + # We should probably assume some type of failure anyway. + return failures + + @classmethod + def _failures_from_row(cls, row, table_title): + if table_title == cls.fail_key: + return cls._failures_from_fail_row(row) + if table_title == cls.crash_key: + return [test_failures.FailureCrash()] + if table_title == cls.timeout_key: + return [test_failures.FailureTimeout()] + if table_title == cls.missing_key: + return [test_failures.FailureMissingResult(), test_failures.FailureMissingImageHash(), test_failures.FailureMissingImage()] + return None + + @classmethod + def _test_result_from_row(cls, row, table_title): + test_name = unicode(row.find("a").string) + failures = cls._failures_from_row(row, table_title) + # TestResult is a class designed to work with new-run-webkit-tests. + # old-run-webkit-tests does not save quite enough information in results.html for us to parse. + # FIXME: It's unclear if test_name should include LayoutTests or not. + return test_results.TestResult(test_name, failures) + + @classmethod + def _parse_results_table(cls, table): + table_title = unicode(table.findPreviousSibling("p").string) + if table_title not in cls.expected_keys: + # This Exception should only ever be hit if run-webkit-tests changes its results.html format. + raise Exception("Unhandled title: %s" % table_title) + # Ignore stderr failures. Everyone ignores them anyway. + if table_title == cls.stderr_key: + return [] + # FIXME: We might end with two TestResults object for the same test if it appears in more than one row. + return [cls._test_result_from_row(row, table_title) for row in table.findAll("tr")] + + @classmethod + def _parse_results_html(cls, page): + tables = BeautifulSoup(page).findAll("table") + return sum([cls._parse_results_table(table) for table in tables], []) + + @classmethod + def results_from_string(cls, string): + if not string: + return None + test_results = cls._parse_results_html(string) + if not test_results: + return None + return cls(test_results) + + def __init__(self, test_results): + self._test_results = test_results + + def test_results(self): + return self._test_results + + def results_matching_failure_types(self, failure_types): + return [result for result in self._test_results if result.has_failure_matching_types(failure_types)] + + def tests_matching_failure_types(self, failure_types): + return [result.filename for result in self.results_matching_failure_types(failure_types)] + + def failing_test_results(self): + # These should match the "fail", "crash", and "timeout" keys. + failure_types = [test_failures.FailureTextMismatch, test_failures.FailureImageHashMismatch, test_failures.FailureCrash, test_failures.FailureTimeout] + return self.results_matching_failure_types(failure_types) + + def failing_tests(self): + return [result.filename for result in self.failing_test_results()] diff --git a/Tools/Scripts/webkitpy/common/net/layouttestresults_unittest.py b/Tools/Scripts/webkitpy/common/net/layouttestresults_unittest.py new file mode 100644 index 0000000..01b91b8 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/layouttestresults_unittest.py @@ -0,0 +1,88 @@ +# Copyright (c) 2010, Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.layouttestresults import LayoutTestResults +from webkitpy.common.system.outputcapture import OutputCapture +from webkitpy.layout_tests.layout_package import test_results +from webkitpy.layout_tests.layout_package import test_failures +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + + +class LayoutTestResultsTest(unittest.TestCase): + _example_results_html = """ +<html> +<head> +<title>Layout Test Results</title> +</head> +<body> +<p>Tests that had stderr output:</p> +<table> +<tr> +<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/accessibility/aria-activedescendant-crash.html">accessibility/aria-activedescendant-crash.html</a></td> +<td><a href="accessibility/aria-activedescendant-crash-stderr.txt">stderr</a></td> +</tr> +<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/http/tests/security/canvas-remote-read-svg-image.html">http/tests/security/canvas-remote-read-svg-image.html</a></td> +<td><a href="http/tests/security/canvas-remote-read-svg-image-stderr.txt">stderr</a></td> +</tr> +</table><p>Tests that had no expected results (probably new):</p> +<table> +<tr> +<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/fast/repaint/no-caret-repaint-in-non-content-editable-element.html">fast/repaint/no-caret-repaint-in-non-content-editable-element.html</a></td> +<td><a href="fast/repaint/no-caret-repaint-in-non-content-editable-element-actual.txt">result</a></td> +</tr> +</table></body> +</html> +""" + + def test_parse_layout_test_results(self): + failures = [test_failures.FailureMissingResult(), test_failures.FailureMissingImageHash(), test_failures.FailureMissingImage()] + testname = 'fast/repaint/no-caret-repaint-in-non-content-editable-element.html' + expected_results = [test_results.TestResult(testname, failures)] + + results = LayoutTestResults._parse_results_html(self._example_results_html) + self.assertEqual(expected_results, results) + + def test_results_from_string(self): + self.assertEqual(LayoutTestResults.results_from_string(None), None) + self.assertEqual(LayoutTestResults.results_from_string(""), None) + results = LayoutTestResults.results_from_string(self._example_results_html) + self.assertEqual(len(results.failing_tests()), 0) + + def test_failures_from_fail_row(self): + row = BeautifulSoup("<tr><td><a>test.hml</a></td><td><a>expected image</a></td><td><a>25%</a></td></tr>") + test_name = unicode(row.find("a").string) + # Even if the caller has already found the test name, findAll inside _failures_from_fail_row will see it again. + failures = OutputCapture().assert_outputs(self, LayoutTestResults._failures_from_fail_row, [row]) + self.assertEqual(len(failures), 1) + self.assertEqual(type(sorted(failures)[0]), test_failures.FailureImageHashMismatch) + + row = BeautifulSoup("<tr><td><a>test.hml</a><a>foo</a></td></tr>") + expected_stderr = "Unhandled link text in results.html parsing: foo. Please file a bug against webkitpy.\n" + OutputCapture().assert_outputs(self, LayoutTestResults._failures_from_fail_row, [row], expected_stderr=expected_stderr) diff --git a/Tools/Scripts/webkitpy/common/net/networktransaction.py b/Tools/Scripts/webkitpy/common/net/networktransaction.py new file mode 100644 index 0000000..21cc71f --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/networktransaction.py @@ -0,0 +1,71 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import logging +import time + +from webkitpy.thirdparty.autoinstalled import mechanize +from webkitpy.common.system.deprecated_logging import log + + +_log = logging.getLogger(__name__) + + +class NetworkTimeout(Exception): + pass + + +class NetworkTransaction(object): + def __init__(self, initial_backoff_seconds=10, grown_factor=1.5, timeout_seconds=(10 * 60), convert_404_to_None=False): + self._initial_backoff_seconds = initial_backoff_seconds + self._grown_factor = grown_factor + self._timeout_seconds = timeout_seconds + self._convert_404_to_None = convert_404_to_None + + def run(self, request): + self._total_sleep = 0 + self._backoff_seconds = self._initial_backoff_seconds + while True: + try: + return request() + # FIXME: We should catch urllib2.HTTPError here too. + except mechanize.HTTPError, e: + if self._convert_404_to_None and e.code == 404: + return None + self._check_for_timeout() + _log.warn("Received HTTP status %s loading \"%s\". Retrying in %s seconds..." % (e.code, e.filename, self._backoff_seconds)) + self._sleep() + + def _check_for_timeout(self): + if self._total_sleep + self._backoff_seconds > self._timeout_seconds: + raise NetworkTimeout() + + def _sleep(self): + time.sleep(self._backoff_seconds) + self._total_sleep += self._backoff_seconds + self._backoff_seconds *= self._grown_factor diff --git a/Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py b/Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py new file mode 100644 index 0000000..c4cd4e0 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py @@ -0,0 +1,93 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.networktransaction import NetworkTransaction, NetworkTimeout +from webkitpy.common.system.logtesting import LoggingTestCase +from webkitpy.thirdparty.autoinstalled.mechanize import HTTPError + + +class NetworkTransactionTest(LoggingTestCase): + exception = Exception("Test exception") + + def test_success(self): + transaction = NetworkTransaction() + self.assertEqual(transaction.run(lambda: 42), 42) + + def _raise_exception(self): + raise self.exception + + def test_exception(self): + transaction = NetworkTransaction() + did_process_exception = False + did_throw_exception = True + try: + transaction.run(lambda: self._raise_exception()) + did_throw_exception = False + except Exception, e: + did_process_exception = True + self.assertEqual(e, self.exception) + self.assertTrue(did_throw_exception) + self.assertTrue(did_process_exception) + + def _raise_500_error(self): + self._run_count += 1 + if self._run_count < 3: + raise HTTPError("http://example.com/", 500, "internal server error", None, None) + return 42 + + def _raise_404_error(self): + raise HTTPError("http://foo.com/", 404, "not found", None, None) + + def test_retry(self): + self._run_count = 0 + transaction = NetworkTransaction(initial_backoff_seconds=0) + self.assertEqual(transaction.run(lambda: self._raise_500_error()), 42) + self.assertEqual(self._run_count, 3) + self.assertLog(['WARNING: Received HTTP status 500 loading "http://example.com/". ' + 'Retrying in 0 seconds...\n', + 'WARNING: Received HTTP status 500 loading "http://example.com/". ' + 'Retrying in 0.0 seconds...\n']) + + def test_convert_404_to_None(self): + transaction = NetworkTransaction(convert_404_to_None=True) + self.assertEqual(transaction.run(lambda: self._raise_404_error()), None) + + def test_timeout(self): + self._run_count = 0 + transaction = NetworkTransaction(initial_backoff_seconds=60*60, timeout_seconds=60) + did_process_exception = False + did_throw_exception = True + try: + transaction.run(lambda: self._raise_500_error()) + did_throw_exception = False + except NetworkTimeout, e: + did_process_exception = True + self.assertTrue(did_throw_exception) + self.assertTrue(did_process_exception) diff --git a/Tools/Scripts/webkitpy/common/net/regressionwindow.py b/Tools/Scripts/webkitpy/common/net/regressionwindow.py new file mode 100644 index 0000000..3960ba2 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/regressionwindow.py @@ -0,0 +1,52 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +# FIXME: This probably belongs in the buildbot module. +class RegressionWindow(object): + def __init__(self, build_before_failure, failing_build, failing_tests=None): + self._build_before_failure = build_before_failure + self._failing_build = failing_build + self._failing_tests = failing_tests + self._revisions = None + + def build_before_failure(self): + return self._build_before_failure + + def failing_build(self): + return self._failing_build + + def failing_tests(self): + return self._failing_tests + + def revisions(self): + # Cache revisions to avoid excessive allocations. + if not self._revisions: + self._revisions = range(self._failing_build.revision(), self._build_before_failure.revision(), -1) + self._revisions.reverse() + return self._revisions diff --git a/Tools/Scripts/webkitpy/common/net/statusserver.py b/Tools/Scripts/webkitpy/common/net/statusserver.py new file mode 100644 index 0000000..abd298a --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/statusserver.py @@ -0,0 +1,167 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from webkitpy.common.net.networktransaction import NetworkTransaction +from webkitpy.common.system.deprecated_logging import log +from webkitpy.thirdparty.autoinstalled.mechanize import Browser +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + +import logging +import urllib2 + + +_log = logging.getLogger("webkitpy.common.net.statusserver") + + +class StatusServer: + default_host = "queues.webkit.org" + + def __init__(self, host=default_host, browser=None, bot_id=None): + self.set_host(host) + self._browser = browser or Browser() + self.set_bot_id(bot_id) + + def set_host(self, host): + self.host = host + self.url = "http://%s" % self.host + + def set_bot_id(self, bot_id): + self.bot_id = bot_id + + def results_url_for_status(self, status_id): + return "%s/results/%s" % (self.url, status_id) + + def _add_patch(self, patch): + if not patch: + return + if patch.bug_id(): + self._browser["bug_id"] = unicode(patch.bug_id()) + if patch.id(): + self._browser["patch_id"] = unicode(patch.id()) + + def _add_results_file(self, results_file): + if not results_file: + return + self._browser.add_file(results_file, "text/plain", "results.txt", 'results_file') + + # 500 is the AppEngine limit for TEXT fields (which most of our fields are). + # Exceeding the limit will result in a 500 error from the server. + def _set_field(self, field_name, value, limit=500): + if len(value) > limit: + _log.warn("Attempted to set %s to value exceeding %s characters, truncating." % (field_name, limit)) + self._browser[field_name] = value[:limit] + + def _post_status_to_server(self, queue_name, status, patch, results_file): + if results_file: + # We might need to re-wind the file if we've already tried to post it. + results_file.seek(0) + + update_status_url = "%s/update-status" % self.url + self._browser.open(update_status_url) + self._browser.select_form(name="update_status") + self._browser["queue_name"] = queue_name + if self.bot_id: + self._browser["bot_id"] = self.bot_id + self._add_patch(patch) + self._set_field("status", status, limit=500) + self._add_results_file(results_file) + return self._browser.submit().read() # This is the id of the newly created status object. + + def _post_svn_revision_to_server(self, svn_revision_number, broken_bot): + update_svn_revision_url = "%s/update-svn-revision" % self.url + self._browser.open(update_svn_revision_url) + self._browser.select_form(name="update_svn_revision") + self._browser["number"] = unicode(svn_revision_number) + self._browser["broken_bot"] = broken_bot + return self._browser.submit().read() + + def _post_work_items_to_server(self, queue_name, work_items): + update_work_items_url = "%s/update-work-items" % self.url + self._browser.open(update_work_items_url) + self._browser.select_form(name="update_work_items") + self._browser["queue_name"] = queue_name + work_items = map(unicode, work_items) # .join expects strings + self._browser["work_items"] = " ".join(work_items) + return self._browser.submit().read() + + def _post_work_item_to_ews(self, attachment_id): + submit_to_ews_url = "%s/submit-to-ews" % self.url + self._browser.open(submit_to_ews_url) + self._browser.select_form(name="submit_to_ews") + self._browser["attachment_id"] = unicode(attachment_id) + self._browser.submit() + + def submit_to_ews(self, attachment_id): + _log.info("Submitting attachment %s to EWS queues" % attachment_id) + return NetworkTransaction().run(lambda: self._post_work_item_to_ews(attachment_id)) + + def next_work_item(self, queue_name): + _log.debug("Fetching next work item for %s" % queue_name) + patch_status_url = "%s/next-patch/%s" % (self.url, queue_name) + return self._fetch_url(patch_status_url) + + def _post_release_work_item(self, queue_name, patch): + release_patch_url = "%s/release-patch" % (self.url) + self._browser.open(release_patch_url) + self._browser.select_form(name="release_patch") + self._browser["queue_name"] = queue_name + self._browser["attachment_id"] = unicode(patch.id()) + self._browser.submit() + + def release_work_item(self, queue_name, patch): + _log.info("Releasing work item %s from %s" % (patch.id(), queue_name)) + return NetworkTransaction(convert_404_to_None=True).run(lambda: self._post_release_work_item(queue_name, patch)) + + def update_work_items(self, queue_name, work_items): + _log.debug("Recording work items: %s for %s" % (work_items, queue_name)) + return NetworkTransaction().run(lambda: self._post_work_items_to_server(queue_name, work_items)) + + def update_status(self, queue_name, status, patch=None, results_file=None): + log(status) + return NetworkTransaction().run(lambda: self._post_status_to_server(queue_name, status, patch, results_file)) + + def update_svn_revision(self, svn_revision_number, broken_bot): + log("SVN revision: %s broke %s" % (svn_revision_number, broken_bot)) + return NetworkTransaction().run(lambda: self._post_svn_revision_to_server(svn_revision_number, broken_bot)) + + def _fetch_url(self, url): + # FIXME: This should use NetworkTransaction's 404 handling instead. + try: + return urllib2.urlopen(url).read() + except urllib2.HTTPError, e: + if e.code == 404: + return None + raise e + + def patch_status(self, queue_name, patch_id): + patch_status_url = "%s/patch-status/%s/%s" % (self.url, queue_name, patch_id) + return self._fetch_url(patch_status_url) + + def svn_revision(self, svn_revision_number): + svn_revision_url = "%s/svn-revision/%s" % (self.url, svn_revision_number) + return self._fetch_url(svn_revision_url) diff --git a/Tools/Scripts/webkitpy/common/net/statusserver_unittest.py b/Tools/Scripts/webkitpy/common/net/statusserver_unittest.py new file mode 100644 index 0000000..1169ba0 --- /dev/null +++ b/Tools/Scripts/webkitpy/common/net/statusserver_unittest.py @@ -0,0 +1,43 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.statusserver import StatusServer +from webkitpy.common.system.outputcapture import OutputCaptureTestCaseBase +from webkitpy.tool.mocktool import MockBrowser + + +class StatusServerTest(OutputCaptureTestCaseBase): + def test_url_for_issue(self): + mock_browser = MockBrowser() + status_server = StatusServer(browser=mock_browser, bot_id='123') + status_server.update_status('queue name', 'the status') + self.assertEqual('queue name', mock_browser.params['queue_name']) + self.assertEqual('the status', mock_browser.params['status']) + self.assertEqual('123', mock_browser.params['bot_id']) |