diff options
Diffstat (limited to 'WebKitTools/Scripts/webkitpy/common/net')
16 files changed, 2941 insertions, 0 deletions
diff --git a/WebKitTools/Scripts/webkitpy/common/net/__init__.py b/WebKitTools/Scripts/webkitpy/common/net/__init__.py new file mode 100644 index 0000000..ef65bee --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/__init__.py @@ -0,0 +1 @@ +# Required for Python to search this directory for module files diff --git a/WebKitTools/Scripts/webkitpy/common/net/bugzilla.py b/WebKitTools/Scripts/webkitpy/common/net/bugzilla.py new file mode 100644 index 0000000..6920d67 --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/bugzilla.py @@ -0,0 +1,835 @@ +# Copyright (c) 2009 Google Inc. All rights reserved. +# Copyright (c) 2009 Apple Inc. All rights reserved. +# Copyright (c) 2010 Research In Motion Limited. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# WebKit's Python module for interacting with Bugzilla + +import os.path +import re +import subprocess + +from datetime import datetime # used in timestamp() + +from webkitpy.common.system.deprecated_logging import error, log +from webkitpy.common.config import committers +from webkitpy.common.net.credentials import Credentials +from webkitpy.common.system.ospath import relpath +from webkitpy.common.system.user import User +from webkitpy.thirdparty.autoinstalled.mechanize import Browser +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup, SoupStrainer + + +def parse_bug_id(message): + if not message: + return None + match = re.search("http\://webkit\.org/b/(?P<bug_id>\d+)", message) + if match: + return int(match.group('bug_id')) + match = re.search( + Bugzilla.bug_server_regex + "show_bug\.cgi\?id=(?P<bug_id>\d+)", + message) + if match: + return int(match.group('bug_id')) + return None + + +def timestamp(): + return datetime.now().strftime("%Y%m%d%H%M%S") + + +class Attachment(object): + + rollout_preamble = "ROLLOUT of r" + + def __init__(self, attachment_dictionary, bug): + self._attachment_dictionary = attachment_dictionary + self._bug = bug + self._reviewer = None + self._committer = None + + def _bugzilla(self): + return self._bug._bugzilla + + def id(self): + return int(self._attachment_dictionary.get("id")) + + def attacher_is_committer(self): + return self._bugzilla.committers.committer_by_email( + patch.attacher_email()) + + def attacher_email(self): + return self._attachment_dictionary.get("attacher_email") + + def bug(self): + return self._bug + + def bug_id(self): + return int(self._attachment_dictionary.get("bug_id")) + + def is_patch(self): + return not not self._attachment_dictionary.get("is_patch") + + def is_obsolete(self): + return not not self._attachment_dictionary.get("is_obsolete") + + def is_rollout(self): + return self.name().startswith(self.rollout_preamble) + + def name(self): + return self._attachment_dictionary.get("name") + + def review(self): + return self._attachment_dictionary.get("review") + + def commit_queue(self): + return self._attachment_dictionary.get("commit-queue") + + def url(self): + # FIXME: This should just return + # self._bugzilla().attachment_url_for_id(self.id()). scm_unittest.py + # depends on the current behavior. + return self._attachment_dictionary.get("url") + + def _validate_flag_value(self, flag): + email = self._attachment_dictionary.get("%s_email" % flag) + if not email: + return None + committer = getattr(self._bugzilla().committers, + "%s_by_email" % flag)(email) + if committer: + return committer + log("Warning, attachment %s on bug %s has invalid %s (%s)" % ( + self._attachment_dictionary['id'], + self._attachment_dictionary['bug_id'], flag, email)) + + def reviewer(self): + if not self._reviewer: + self._reviewer = self._validate_flag_value("reviewer") + return self._reviewer + + def committer(self): + if not self._committer: + self._committer = self._validate_flag_value("committer") + return self._committer + + +class Bug(object): + # FIXME: This class is kinda a hack for now. It exists so we have one + # place to hold bug logic, even if much of the code deals with + # dictionaries still. + + def __init__(self, bug_dictionary, bugzilla): + self.bug_dictionary = bug_dictionary + self._bugzilla = bugzilla + + def id(self): + return self.bug_dictionary["id"] + + def assigned_to_email(self): + return self.bug_dictionary["assigned_to_email"] + + # FIXME: This information should be stored in some sort of webkit_config.py instead of here. + unassigned_emails = frozenset([ + "webkit-unassigned@lists.webkit.org", + "webkit-qt-unassigned@trolltech.com", + ]) + def is_unassigned(self): + return self.assigned_to_email() in self.unassigned_emails + + # Rarely do we actually want obsolete attachments + def attachments(self, include_obsolete=False): + attachments = self.bug_dictionary["attachments"] + if not include_obsolete: + attachments = filter(lambda attachment: + not attachment["is_obsolete"], attachments) + return [Attachment(attachment, self) for attachment in attachments] + + def patches(self, include_obsolete=False): + return [patch for patch in self.attachments(include_obsolete) + if patch.is_patch()] + + def unreviewed_patches(self): + return [patch for patch in self.patches() if patch.review() == "?"] + + def reviewed_patches(self, include_invalid=False): + patches = [patch for patch in self.patches() if patch.review() == "+"] + if include_invalid: + return patches + # Checking reviewer() ensures that it was both reviewed and has a valid + # reviewer. + return filter(lambda patch: patch.reviewer(), patches) + + def commit_queued_patches(self, include_invalid=False): + patches = [patch for patch in self.patches() + if patch.commit_queue() == "+"] + if include_invalid: + return patches + # Checking committer() ensures that it was both commit-queue+'d and has + # a valid committer. + return filter(lambda patch: patch.committer(), patches) + + +# A container for all of the logic for making and parsing buzilla queries. +class BugzillaQueries(object): + + def __init__(self, bugzilla): + self._bugzilla = bugzilla + + # Note: _load_query and _fetch_bug are the only two methods which access + # self._bugzilla. + + def _load_query(self, query): + self._bugzilla.authenticate() + + full_url = "%s%s" % (self._bugzilla.bug_server_url, query) + return self._bugzilla.browser.open(full_url) + + def _fetch_bug(self, bug_id): + return self._bugzilla.fetch_bug(bug_id) + + def _fetch_bug_ids_advanced_query(self, query): + soup = BeautifulSoup(self._load_query(query)) + # The contents of the <a> inside the cells in the first column happen + # to be the bug id. + return [int(bug_link_cell.find("a").string) + for bug_link_cell in soup('td', "first-child")] + + def _parse_attachment_ids_request_query(self, page): + digits = re.compile("\d+") + attachment_href = re.compile("attachment.cgi\?id=\d+&action=review") + attachment_links = SoupStrainer("a", href=attachment_href) + return [int(digits.search(tag["href"]).group(0)) + for tag in BeautifulSoup(page, parseOnlyThese=attachment_links)] + + def _fetch_attachment_ids_request_query(self, query): + return self._parse_attachment_ids_request_query(self._load_query(query)) + + def _parse_quips(self, page): + soup = BeautifulSoup(page, convertEntities=BeautifulSoup.HTML_ENTITIES) + quips = soup.find(text=re.compile(r"Existing quips:")).findNext("ul").findAll("li") + return [unicode(quip_entry.string) for quip_entry in quips] + + def fetch_quips(self): + return self._parse_quips(self._load_query("/quips.cgi?action=show")) + + # List of all r+'d bugs. + def fetch_bug_ids_from_pending_commit_list(self): + needs_commit_query_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review%2B" + return self._fetch_bug_ids_advanced_query(needs_commit_query_url) + + def fetch_patches_from_pending_commit_list(self): + return sum([self._fetch_bug(bug_id).reviewed_patches() + for bug_id in self.fetch_bug_ids_from_pending_commit_list()], []) + + def fetch_bug_ids_from_commit_queue(self): + commit_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=commit-queue%2B&order=Last+Changed" + return self._fetch_bug_ids_advanced_query(commit_queue_url) + + def fetch_patches_from_commit_queue(self): + # This function will only return patches which have valid committers + # set. It won't reject patches with invalid committers/reviewers. + return sum([self._fetch_bug(bug_id).commit_queued_patches() + for bug_id in self.fetch_bug_ids_from_commit_queue()], []) + + def _fetch_bug_ids_from_review_queue(self): + review_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review?" + return self._fetch_bug_ids_advanced_query(review_queue_url) + + def fetch_patches_from_review_queue(self, limit=None): + # [:None] returns the whole array. + return sum([self._fetch_bug(bug_id).unreviewed_patches() + for bug_id in self._fetch_bug_ids_from_review_queue()[:limit]], []) + + # FIXME: Why do we have both fetch_patches_from_review_queue and + # fetch_attachment_ids_from_review_queue?? + # NOTE: This is also the only client of _fetch_attachment_ids_request_query + + def fetch_attachment_ids_from_review_queue(self): + review_queue_url = "request.cgi?action=queue&type=review&group=type" + return self._fetch_attachment_ids_request_query(review_queue_url) + + +class CommitterValidator(object): + + def __init__(self, bugzilla): + self._bugzilla = bugzilla + + # _view_source_url belongs in some sort of webkit_config.py module. + def _view_source_url(self, local_path): + return "http://trac.webkit.org/browser/trunk/%s" % local_path + + def _checkout_root(self): + # FIXME: This is a hack, we would have this from scm.checkout_root + # if we had any way to get to an scm object here. + components = __file__.split(os.sep) + tools_index = components.index("WebKitTools") + return os.sep.join(components[:tools_index]) + + def _committers_py_path(self): + # extension can sometimes be .pyc, we always want .py + (path, extension) = os.path.splitext(committers.__file__) + # FIXME: When we're allowed to use python 2.6 we can use the real + # os.path.relpath + path = relpath(path, self._checkout_root()) + return ".".join([path, "py"]) + + def _flag_permission_rejection_message(self, setter_email, flag_name): + # Should come from some webkit_config.py + contribution_guidlines = "http://webkit.org/coding/contributing.html" + # This could be queried from the status_server. + queue_administrator = "eseidel@chromium.org" + # This could be queried from the tool. + queue_name = "commit-queue" + committers_list = self._committers_py_path() + message = "%s does not have %s permissions according to %s." % ( + setter_email, + flag_name, + self._view_source_url(committers_list)) + message += "\n\n- If you do not have %s rights please read %s for instructions on how to use bugzilla flags." % ( + flag_name, contribution_guidlines) + message += "\n\n- If you have %s rights please correct the error in %s by adding yourself to the file (no review needed). " % ( + flag_name, committers_list) + message += "Due to bug 30084 the %s will require a restart after your change. " % queue_name + message += "Please contact %s to request a %s restart. " % ( + queue_administrator, queue_name) + message += "After restart the %s will correctly respect your %s rights." % ( + queue_name, flag_name) + return message + + def _validate_setter_email(self, patch, result_key, rejection_function): + committer = getattr(patch, result_key)() + # If the flag is set, and we don't recognize the setter, reject the + # flag! + setter_email = patch._attachment_dictionary.get("%s_email" % result_key) + if setter_email and not committer: + rejection_function(patch.id(), + self._flag_permission_rejection_message(setter_email, + result_key)) + return False + return True + + def patches_after_rejecting_invalid_commiters_and_reviewers(self, patches): + validated_patches = [] + for patch in patches: + if (self._validate_setter_email( + patch, "reviewer", self.reject_patch_from_review_queue) + and self._validate_setter_email( + patch, "committer", self.reject_patch_from_commit_queue)): + validated_patches.append(patch) + return validated_patches + + def reject_patch_from_commit_queue(self, + attachment_id, + additional_comment_text=None): + comment_text = "Rejecting patch %s from commit-queue." % attachment_id + self._bugzilla.set_flag_on_attachment(attachment_id, + "commit-queue", + "-", + comment_text, + additional_comment_text) + + def reject_patch_from_review_queue(self, + attachment_id, + additional_comment_text=None): + comment_text = "Rejecting patch %s from review queue." % attachment_id + self._bugzilla.set_flag_on_attachment(attachment_id, + 'review', + '-', + comment_text, + additional_comment_text) + + +class Bugzilla(object): + + def __init__(self, dryrun=False, committers=committers.CommitterList()): + self.dryrun = dryrun + self.authenticated = False + self.queries = BugzillaQueries(self) + self.committers = committers + self.cached_quips = [] + + # FIXME: We should use some sort of Browser mock object when in dryrun + # mode (to prevent any mistakes). + self.browser = Browser() + # Ignore bugs.webkit.org/robots.txt until we fix it to allow this + # script. + self.browser.set_handle_robots(False) + + # FIXME: Much of this should go into some sort of config module: + bug_server_host = "bugs.webkit.org" + bug_server_regex = "https?://%s/" % re.sub('\.', '\\.', bug_server_host) + bug_server_url = "https://%s/" % bug_server_host + + def quips(self): + # We only fetch and parse the list of quips once per instantiation + # so that we do not burden bugs.webkit.org. + if not self.cached_quips and not self.dryrun: + self.cached_quips = self.queries.fetch_quips() + return self.cached_quips + + def bug_url_for_bug_id(self, bug_id, xml=False): + if not bug_id: + return None + content_type = "&ctype=xml" if xml else "" + return "%sshow_bug.cgi?id=%s%s" % (self.bug_server_url, + bug_id, + content_type) + + def short_bug_url_for_bug_id(self, bug_id): + if not bug_id: + return None + return "http://webkit.org/b/%s" % bug_id + + def attachment_url_for_id(self, attachment_id, action="view"): + if not attachment_id: + return None + action_param = "" + if action and action != "view": + action_param = "&action=%s" % action + return "%sattachment.cgi?id=%s%s" % (self.bug_server_url, + attachment_id, + action_param) + + def _parse_attachment_flag(self, + element, + flag_name, + attachment, + result_key): + flag = element.find('flag', attrs={'name': flag_name}) + if flag: + attachment[flag_name] = flag['status'] + if flag['status'] == '+': + attachment[result_key] = flag['setter'] + + def _parse_attachment_element(self, element, bug_id): + attachment = {} + attachment['bug_id'] = bug_id + attachment['is_obsolete'] = (element.has_key('isobsolete') and element['isobsolete'] == "1") + attachment['is_patch'] = (element.has_key('ispatch') and element['ispatch'] == "1") + attachment['id'] = int(element.find('attachid').string) + # FIXME: No need to parse out the url here. + attachment['url'] = self.attachment_url_for_id(attachment['id']) + attachment['name'] = unicode(element.find('desc').string) + attachment['attacher_email'] = str(element.find('attacher').string) + attachment['type'] = str(element.find('type').string) + self._parse_attachment_flag( + element, 'review', attachment, 'reviewer_email') + self._parse_attachment_flag( + element, 'commit-queue', attachment, 'committer_email') + return attachment + + def _parse_bug_page(self, page): + soup = BeautifulSoup(page) + bug = {} + bug["id"] = int(soup.find("bug_id").string) + bug["title"] = unicode(soup.find("short_desc").string) + bug["reporter_email"] = str(soup.find("reporter").string) + bug["assigned_to_email"] = str(soup.find("assigned_to").string) + bug["cc_emails"] = [str(element.string) + for element in soup.findAll('cc')] + bug["attachments"] = [self._parse_attachment_element(element, bug["id"]) for element in soup.findAll('attachment')] + return bug + + # Makes testing fetch_*_from_bug() possible until we have a better + # BugzillaNetwork abstration. + + def _fetch_bug_page(self, bug_id): + bug_url = self.bug_url_for_bug_id(bug_id, xml=True) + log("Fetching: %s" % bug_url) + return self.browser.open(bug_url) + + def fetch_bug_dictionary(self, bug_id): + try: + return self._parse_bug_page(self._fetch_bug_page(bug_id)) + except: + self.authenticate() + return self._parse_bug_page(self._fetch_bug_page(bug_id)) + + # FIXME: A BugzillaCache object should provide all these fetch_ methods. + + def fetch_bug(self, bug_id): + return Bug(self.fetch_bug_dictionary(bug_id), self) + + def _parse_bug_id_from_attachment_page(self, page): + # The "Up" relation happens to point to the bug. + up_link = BeautifulSoup(page).find('link', rel='Up') + if not up_link: + # This attachment does not exist (or you don't have permissions to + # view it). + return None + match = re.search("show_bug.cgi\?id=(?P<bug_id>\d+)", up_link['href']) + return int(match.group('bug_id')) + + def bug_id_for_attachment_id(self, attachment_id): + self.authenticate() + + attachment_url = self.attachment_url_for_id(attachment_id, 'edit') + log("Fetching: %s" % attachment_url) + page = self.browser.open(attachment_url) + return self._parse_bug_id_from_attachment_page(page) + + # FIXME: This should just return Attachment(id), which should be able to + # lazily fetch needed data. + + def fetch_attachment(self, attachment_id): + # We could grab all the attachment details off of the attachment edit + # page but we already have working code to do so off of the bugs page, + # so re-use that. + bug_id = self.bug_id_for_attachment_id(attachment_id) + if not bug_id: + return None + attachments = self.fetch_bug(bug_id).attachments(include_obsolete=True) + for attachment in attachments: + if attachment.id() == int(attachment_id): + return attachment + return None # This should never be hit. + + def authenticate(self): + if self.authenticated: + return + + if self.dryrun: + log("Skipping log in for dry run...") + self.authenticated = True + return + + attempts = 0 + while not self.authenticated: + attempts += 1 + (username, password) = Credentials( + self.bug_server_host, git_prefix="bugzilla").read_credentials() + + log("Logging in as %s..." % username) + self.browser.open(self.bug_server_url + + "index.cgi?GoAheadAndLogIn=1") + self.browser.select_form(name="login") + self.browser['Bugzilla_login'] = username + self.browser['Bugzilla_password'] = password + response = self.browser.submit() + + match = re.search("<title>(.+?)</title>", response.read()) + # If the resulting page has a title, and it contains the word + # "invalid" assume it's the login failure page. + if match and re.search("Invalid", match.group(1), re.IGNORECASE): + errorMessage = "Bugzilla login failed: %s" % match.group(1) + # raise an exception only if this was the last attempt + if attempts < 5: + log(errorMessage) + else: + raise Exception(errorMessage) + else: + self.authenticated = True + + def _fill_attachment_form(self, + description, + patch_file_object, + comment_text=None, + mark_for_review=False, + mark_for_commit_queue=False, + mark_for_landing=False, bug_id=None): + self.browser['description'] = description + self.browser['ispatch'] = ("1",) + self.browser['flag_type-1'] = ('?',) if mark_for_review else ('X',) + + if mark_for_landing: + self.browser['flag_type-3'] = ('+',) + elif mark_for_commit_queue: + self.browser['flag_type-3'] = ('?',) + else: + self.browser['flag_type-3'] = ('X',) + + if bug_id: + patch_name = "bug-%s-%s.patch" % (bug_id, timestamp()) + else: + patch_name ="%s.patch" % timestamp() + self.browser.add_file(patch_file_object, + "text/plain", + patch_name, + 'data') + + def add_patch_to_bug(self, + bug_id, + patch_file_object, + description, + comment_text=None, + mark_for_review=False, + mark_for_commit_queue=False, + mark_for_landing=False): + self.authenticate() + + log('Adding patch "%s" to %sshow_bug.cgi?id=%s' % (description, + self.bug_server_url, + bug_id)) + + if self.dryrun: + log(comment_text) + return + + self.browser.open("%sattachment.cgi?action=enter&bugid=%s" % ( + self.bug_server_url, bug_id)) + self.browser.select_form(name="entryform") + self._fill_attachment_form(description, + patch_file_object, + mark_for_review=mark_for_review, + mark_for_commit_queue=mark_for_commit_queue, + mark_for_landing=mark_for_landing, + bug_id=bug_id) + if comment_text: + log(comment_text) + self.browser['comment'] = comment_text + self.browser.submit() + + def _check_create_bug_response(self, response_html): + match = re.search("<title>Bug (?P<bug_id>\d+) Submitted</title>", + response_html) + if match: + return match.group('bug_id') + + match = re.search( + '<div id="bugzilla-body">(?P<error_message>.+)<div id="footer">', + response_html, + re.DOTALL) + error_message = "FAIL" + if match: + text_lines = BeautifulSoup( + match.group('error_message')).findAll(text=True) + error_message = "\n" + '\n'.join( + [" " + line.strip() + for line in text_lines if line.strip()]) + raise Exception("Bug not created: %s" % error_message) + + def create_bug(self, + bug_title, + bug_description, + component=None, + patch_file_object=None, + patch_description=None, + cc=None, + blocked=None, + mark_for_review=False, + mark_for_commit_queue=False): + self.authenticate() + + log('Creating bug with title "%s"' % bug_title) + if self.dryrun: + log(bug_description) + return + + self.browser.open(self.bug_server_url + "enter_bug.cgi?product=WebKit") + self.browser.select_form(name="Create") + component_items = self.browser.find_control('component').items + component_names = map(lambda item: item.name, component_items) + if not component: + component = "New Bugs" + if component not in component_names: + component = User.prompt_with_list("Please pick a component:", component_names) + self.browser["component"] = [component] + if cc: + self.browser["cc"] = cc + if blocked: + self.browser["blocked"] = str(blocked) + self.browser["short_desc"] = bug_title + self.browser["comment"] = bug_description + + if patch_file_object: + self._fill_attachment_form( + patch_description, + patch_file_object, + mark_for_review=mark_for_review, + mark_for_commit_queue=mark_for_commit_queue) + + response = self.browser.submit() + + bug_id = self._check_create_bug_response(response.read()) + log("Bug %s created." % bug_id) + log("%sshow_bug.cgi?id=%s" % (self.bug_server_url, bug_id)) + return bug_id + + def _find_select_element_for_flag(self, flag_name): + # FIXME: This will break if we ever re-order attachment flags + if flag_name == "review": + return self.browser.find_control(type='select', nr=0) + if flag_name == "commit-queue": + return self.browser.find_control(type='select', nr=1) + raise Exception("Don't know how to find flag named \"%s\"" % flag_name) + + def clear_attachment_flags(self, + attachment_id, + additional_comment_text=None): + self.authenticate() + + comment_text = "Clearing flags on attachment: %s" % attachment_id + if additional_comment_text: + comment_text += "\n\n%s" % additional_comment_text + log(comment_text) + + if self.dryrun: + return + + self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) + self.browser.select_form(nr=1) + self.browser.set_value(comment_text, name='comment', nr=0) + self._find_select_element_for_flag('review').value = ("X",) + self._find_select_element_for_flag('commit-queue').value = ("X",) + self.browser.submit() + + def set_flag_on_attachment(self, + attachment_id, + flag_name, + flag_value, + comment_text, + additional_comment_text): + # FIXME: We need a way to test this function on a live bugzilla + # instance. + + self.authenticate() + + if additional_comment_text: + comment_text += "\n\n%s" % additional_comment_text + log(comment_text) + + if self.dryrun: + return + + self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) + self.browser.select_form(nr=1) + self.browser.set_value(comment_text, name='comment', nr=0) + self._find_select_element_for_flag(flag_name).value = (flag_value,) + self.browser.submit() + + # FIXME: All of these bug editing methods have a ridiculous amount of + # copy/paste code. + + def obsolete_attachment(self, attachment_id, comment_text=None): + self.authenticate() + + log("Obsoleting attachment: %s" % attachment_id) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) + self.browser.select_form(nr=1) + self.browser.find_control('isobsolete').items[0].selected = True + # Also clear any review flag (to remove it from review/commit queues) + self._find_select_element_for_flag('review').value = ("X",) + self._find_select_element_for_flag('commit-queue').value = ("X",) + if comment_text: + log(comment_text) + # Bugzilla has two textareas named 'comment', one is somehow + # hidden. We want the first. + self.browser.set_value(comment_text, name='comment', nr=0) + self.browser.submit() + + def add_cc_to_bug(self, bug_id, email_address_list): + self.authenticate() + + log("Adding %s to the CC list for bug %s" % (email_address_list, + bug_id)) + if self.dryrun: + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + self.browser["newcc"] = ", ".join(email_address_list) + self.browser.submit() + + def post_comment_to_bug(self, bug_id, comment_text, cc=None): + self.authenticate() + + log("Adding comment to bug %s" % bug_id) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + self.browser["comment"] = comment_text + if cc: + self.browser["newcc"] = ", ".join(cc) + self.browser.submit() + + def close_bug_as_fixed(self, bug_id, comment_text=None): + self.authenticate() + + log("Closing bug %s as fixed" % bug_id) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + if comment_text: + log(comment_text) + self.browser['comment'] = comment_text + self.browser['bug_status'] = ['RESOLVED'] + self.browser['resolution'] = ['FIXED'] + self.browser.submit() + + def reassign_bug(self, bug_id, assignee, comment_text=None): + self.authenticate() + + log("Assigning bug %s to %s" % (bug_id, assignee)) + if self.dryrun: + log(comment_text) + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + if comment_text: + log(comment_text) + self.browser["comment"] = comment_text + self.browser["assigned_to"] = assignee + self.browser.submit() + + def reopen_bug(self, bug_id, comment_text): + self.authenticate() + + log("Re-opening bug %s" % bug_id) + # Bugzilla requires a comment when re-opening a bug, so we know it will + # never be None. + log(comment_text) + if self.dryrun: + return + + self.browser.open(self.bug_url_for_bug_id(bug_id)) + self.browser.select_form(name="changeform") + bug_status = self.browser.find_control("bug_status", type="select") + # This is a hack around the fact that ClientForm.ListControl seems to + # have no simpler way to ask if a control has an item named "REOPENED" + # without using exceptions for control flow. + possible_bug_statuses = map(lambda item: item.name, bug_status.items) + if "REOPENED" in possible_bug_statuses: + bug_status.value = ["REOPENED"] + else: + log("Did not reopen bug %s. " + + "It appears to already be open with status %s." % ( + bug_id, bug_status.value)) + self.browser['comment'] = comment_text + self.browser.submit() diff --git a/WebKitTools/Scripts/webkitpy/common/net/bugzilla_unittest.py b/WebKitTools/Scripts/webkitpy/common/net/bugzilla_unittest.py new file mode 100644 index 0000000..4c44cdf --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/bugzilla_unittest.py @@ -0,0 +1,347 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.config.committers import CommitterList, Reviewer, Committer +from webkitpy.common.net.bugzilla import Bugzilla, BugzillaQueries, parse_bug_id, CommitterValidator, Bug +from webkitpy.common.system.outputcapture import OutputCapture +from webkitpy.thirdparty.mock import Mock +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + + +class MockBrowser(object): + def open(self, url): + pass + + def select_form(self, name): + pass + + def __setitem__(self, key, value): + pass + + def submit(self): + pass + + +class BugTest(unittest.TestCase): + def test_is_unassigned(self): + for email in Bug.unassigned_emails: + bug = Bug({"assigned_to_email" : email}, bugzilla=None) + self.assertTrue(bug.is_unassigned()) + bug = Bug({"assigned_to_email" : "test@test.com"}, bugzilla=None) + self.assertFalse(bug.is_unassigned()) + + +class CommitterValidatorTest(unittest.TestCase): + def test_flag_permission_rejection_message(self): + validator = CommitterValidator(bugzilla=None) + self.assertEqual(validator._committers_py_path(), "WebKitTools/Scripts/webkitpy/common/config/committers.py") + expected_messsage="""foo@foo.com does not have review permissions according to http://trac.webkit.org/browser/trunk/WebKitTools/Scripts/webkitpy/common/config/committers.py. + +- If you do not have review rights please read http://webkit.org/coding/contributing.html for instructions on how to use bugzilla flags. + +- If you have review rights please correct the error in WebKitTools/Scripts/webkitpy/common/config/committers.py by adding yourself to the file (no review needed). Due to bug 30084 the commit-queue will require a restart after your change. Please contact eseidel@chromium.org to request a commit-queue restart. After restart the commit-queue will correctly respect your review rights.""" + self.assertEqual(validator._flag_permission_rejection_message("foo@foo.com", "review"), expected_messsage) + + +class BugzillaTest(unittest.TestCase): + _example_attachment = ''' + <attachment + isobsolete="1" + ispatch="1" + isprivate="0" + > + <attachid>33721</attachid> + <date>2009-07-29 10:23 PDT</date> + <desc>Fixed whitespace issue</desc> + <filename>patch</filename> + <type>text/plain</type> + <size>9719</size> + <attacher>christian.plesner.hansen@gmail.com</attacher> + <flag name="review" + id="17931" + status="+" + setter="one@test.com" + /> + <flag name="commit-queue" + id="17932" + status="+" + setter="two@test.com" + /> + </attachment> +''' + _expected_example_attachment_parsing = { + 'bug_id' : 100, + 'is_obsolete' : True, + 'is_patch' : True, + 'id' : 33721, + 'url' : "https://bugs.webkit.org/attachment.cgi?id=33721", + 'name' : "Fixed whitespace issue", + 'type' : "text/plain", + 'review' : '+', + 'reviewer_email' : 'one@test.com', + 'commit-queue' : '+', + 'committer_email' : 'two@test.com', + 'attacher_email' : 'christian.plesner.hansen@gmail.com', + } + + def test_url_creation(self): + # FIXME: These would be all better as doctests + bugs = Bugzilla() + self.assertEquals(None, bugs.bug_url_for_bug_id(None)) + self.assertEquals(None, bugs.short_bug_url_for_bug_id(None)) + self.assertEquals(None, bugs.attachment_url_for_id(None)) + + def test_parse_bug_id(self): + # FIXME: These would be all better as doctests + bugs = Bugzilla() + self.assertEquals(12345, parse_bug_id("http://webkit.org/b/12345")) + self.assertEquals(12345, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?id=12345")) + self.assertEquals(12345, parse_bug_id(bugs.short_bug_url_for_bug_id(12345))) + self.assertEquals(12345, parse_bug_id(bugs.bug_url_for_bug_id(12345))) + self.assertEquals(12345, parse_bug_id(bugs.bug_url_for_bug_id(12345, xml=True))) + + # Our bug parser is super-fragile, but at least we're testing it. + self.assertEquals(None, parse_bug_id("http://www.webkit.org/b/12345")) + self.assertEquals(None, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?ctype=xml&id=12345")) + + _example_bug = """ +<?xml version="1.0" encoding="UTF-8" standalone="yes" ?> +<!DOCTYPE bugzilla SYSTEM "https://bugs.webkit.org/bugzilla.dtd"> +<bugzilla version="3.2.3" + urlbase="https://bugs.webkit.org/" + maintainer="admin@webkit.org" + exporter="eric@webkit.org" +> + <bug> + <bug_id>32585</bug_id> + <creation_ts>2009-12-15 15:17 PST</creation_ts> + <short_desc>bug to test webkit-patch and commit-queue failures</short_desc> + <delta_ts>2009-12-27 21:04:50 PST</delta_ts> + <reporter_accessible>1</reporter_accessible> + <cclist_accessible>1</cclist_accessible> + <classification_id>1</classification_id> + <classification>Unclassified</classification> + <product>WebKit</product> + <component>Tools / Tests</component> + <version>528+ (Nightly build)</version> + <rep_platform>PC</rep_platform> + <op_sys>Mac OS X 10.5</op_sys> + <bug_status>NEW</bug_status> + <priority>P2</priority> + <bug_severity>Normal</bug_severity> + <target_milestone>---</target_milestone> + <everconfirmed>1</everconfirmed> + <reporter name="Eric Seidel">eric@webkit.org</reporter> + <assigned_to name="Nobody">webkit-unassigned@lists.webkit.org</assigned_to> + <cc>foo@bar.com</cc> + <cc>example@example.com</cc> + <long_desc isprivate="0"> + <who name="Eric Seidel">eric@webkit.org</who> + <bug_when>2009-12-15 15:17:28 PST</bug_when> + <thetext>bug to test webkit-patch and commit-queue failures + +Ignore this bug. Just for testing failure modes of webkit-patch and the commit-queue.</thetext> + </long_desc> + <attachment + isobsolete="0" + ispatch="1" + isprivate="0" + > + <attachid>45548</attachid> + <date>2009-12-27 23:51 PST</date> + <desc>Patch</desc> + <filename>bug-32585-20091228005112.patch</filename> + <type>text/plain</type> + <size>10882</size> + <attacher>mjs@apple.com</attacher> + + <token>1261988248-dc51409e9c421a4358f365fa8bec8357</token> + <data encoding="base64">SW5kZXg6IFdlYktpdC9tYWMvQ2hhbmdlTG9nCj09PT09PT09PT09PT09PT09PT09PT09PT09PT09 +removed-because-it-was-really-long +ZEZpbmlzaExvYWRXaXRoUmVhc29uOnJlYXNvbl07Cit9CisKIEBlbmQKIAogI2VuZGlmCg== +</data> + + <flag name="review" + id="27602" + status="?" + setter="mjs@apple.com" + /> + </attachment> + </bug> +</bugzilla> +""" + _expected_example_bug_parsing = { + "id" : 32585, + "title" : u"bug to test webkit-patch and commit-queue failures", + "cc_emails" : ["foo@bar.com", "example@example.com"], + "reporter_email" : "eric@webkit.org", + "assigned_to_email" : "webkit-unassigned@lists.webkit.org", + "attachments" : [{ + 'name': u'Patch', + 'url' : "https://bugs.webkit.org/attachment.cgi?id=45548", + 'is_obsolete': False, + 'review': '?', + 'is_patch': True, + 'attacher_email': 'mjs@apple.com', + 'bug_id': 32585, + 'type': 'text/plain', + 'id': 45548 + }], + } + + # FIXME: This should move to a central location and be shared by more unit tests. + def _assert_dictionaries_equal(self, actual, expected): + # Make sure we aren't parsing more or less than we expect + self.assertEquals(sorted(actual.keys()), sorted(expected.keys())) + + for key, expected_value in expected.items(): + self.assertEquals(actual[key], expected_value, ("Failure for key: %s: Actual='%s' Expected='%s'" % (key, actual[key], expected_value))) + + def test_bug_parsing(self): + bug = Bugzilla()._parse_bug_page(self._example_bug) + self._assert_dictionaries_equal(bug, self._expected_example_bug_parsing) + + # This could be combined into test_bug_parsing later if desired. + def test_attachment_parsing(self): + bugzilla = Bugzilla() + soup = BeautifulSoup(self._example_attachment) + attachment_element = soup.find("attachment") + attachment = bugzilla._parse_attachment_element(attachment_element, self._expected_example_attachment_parsing['bug_id']) + self.assertTrue(attachment) + self._assert_dictionaries_equal(attachment, self._expected_example_attachment_parsing) + + _sample_attachment_detail_page = """ +<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" + "http://www.w3.org/TR/html4/loose.dtd"> +<html> + <head> + <title> + Attachment 41073 Details for Bug 27314</title> +<link rel="Top" href="https://bugs.webkit.org/"> + <link rel="Up" href="show_bug.cgi?id=27314"> +""" + + def test_attachment_detail_bug_parsing(self): + bugzilla = Bugzilla() + self.assertEquals(27314, bugzilla._parse_bug_id_from_attachment_page(self._sample_attachment_detail_page)) + + def test_add_cc_to_bug(self): + bugzilla = Bugzilla() + bugzilla.browser = MockBrowser() + bugzilla.authenticate = lambda: None + expected_stderr = "Adding ['adam@example.com'] to the CC list for bug 42\n" + OutputCapture().assert_outputs(self, bugzilla.add_cc_to_bug, [42, ["adam@example.com"]], expected_stderr=expected_stderr) + + +class BugzillaQueriesTest(unittest.TestCase): + _sample_request_page = """ +<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" + "http://www.w3.org/TR/html4/loose.dtd"> +<html> + <head> + <title>Request Queue</title> + </head> +<body> + +<h3>Flag: review</h3> + <table class="requests" cellspacing="0" cellpadding="4" border="1"> + <tr> + <th>Requester</th> + <th>Requestee</th> + <th>Bug</th> + <th>Attachment</th> + <th>Created</th> + </tr> + <tr> + <td>Shinichiro Hamaji <hamaji@chromium.org></td> + <td></td> + <td><a href="show_bug.cgi?id=30015">30015: text-transform:capitalize is failing in CSS2.1 test suite</a></td> + <td><a href="attachment.cgi?id=40511&action=review"> +40511: Patch v0</a></td> + <td>2009-10-02 04:58 PST</td> + </tr> + <tr> + <td>Zan Dobersek <zandobersek@gmail.com></td> + <td></td> + <td><a href="show_bug.cgi?id=26304">26304: [GTK] Add controls for playing html5 video.</a></td> + <td><a href="attachment.cgi?id=40722&action=review"> +40722: Media controls, the simple approach</a></td> + <td>2009-10-06 09:13 PST</td> + </tr> + <tr> + <td>Zan Dobersek <zandobersek@gmail.com></td> + <td></td> + <td><a href="show_bug.cgi?id=26304">26304: [GTK] Add controls for playing html5 video.</a></td> + <td><a href="attachment.cgi?id=40723&action=review"> +40723: Adjust the media slider thumb size</a></td> + <td>2009-10-06 09:15 PST</td> + </tr> + </table> +</body> +</html> +""" + _sample_quip_page = u""" +<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" + "http://www.w3.org/TR/html4/loose.dtd"> +<html> + <head> + <title>Bugzilla Quip System</title> + </head> + <body> + <h2> + + Existing quips: + </h2> + <ul> + <li>Everything should be made as simple as possible, but not simpler. - Albert Einstein</li> + <li>Good artists copy. Great artists steal. - Pablo Picasso</li> + <li>\u00e7gua mole em pedra dura, tanto bate at\u008e que fura.</li> + + </ul> + </body> +</html> +""" + + def test_request_page_parsing(self): + queries = BugzillaQueries(None) + self.assertEquals([40511, 40722, 40723], queries._parse_attachment_ids_request_query(self._sample_request_page)) + + def test_quip_page_parsing(self): + queries = BugzillaQueries(None) + expected_quips = ["Everything should be made as simple as possible, but not simpler. - Albert Einstein", "Good artists copy. Great artists steal. - Pablo Picasso", u"\u00e7gua mole em pedra dura, tanto bate at\u008e que fura."] + self.assertEquals(expected_quips, queries._parse_quips(self._sample_quip_page)) + + def test_load_query(self): + queries = BugzillaQueries(Mock()) + queries._load_query("request.cgi?action=queue&type=review&group=type") + + +if __name__ == '__main__': + unittest.main() diff --git a/WebKitTools/Scripts/webkitpy/common/net/buildbot.py b/WebKitTools/Scripts/webkitpy/common/net/buildbot.py new file mode 100644 index 0000000..753e909 --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/buildbot.py @@ -0,0 +1,495 @@ +# Copyright (c) 2009, Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# WebKit's Python module for interacting with WebKit's buildbot + +import operator +import re +import urllib +import urllib2 +import xmlrpclib + +from webkitpy.common.system.logutils import get_logger +from webkitpy.thirdparty.autoinstalled.mechanize import Browser +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + + +_log = get_logger(__file__) + + +class Builder(object): + def __init__(self, name, buildbot): + self._name = unicode(name) + self._buildbot = buildbot + self._builds_cache = {} + self._revision_to_build_number = None + self._browser = Browser() + self._browser.set_handle_robots(False) # The builder pages are excluded by robots.txt + + def name(self): + return self._name + + def results_url(self): + return "http://%s/results/%s" % (self._buildbot.buildbot_host, self.url_encoded_name()) + + def url_encoded_name(self): + return urllib.quote(self._name) + + def url(self): + return "http://%s/builders/%s" % (self._buildbot.buildbot_host, self.url_encoded_name()) + + # This provides a single place to mock + def _fetch_build(self, build_number): + build_dictionary = self._buildbot._fetch_xmlrpc_build_dictionary(self, build_number) + if not build_dictionary: + return None + return Build(self, + build_number=int(build_dictionary['number']), + revision=int(build_dictionary['revision']), + is_green=(build_dictionary['results'] == 0) # Undocumented, buildbot XMLRPC, 0 seems to mean "pass" + ) + + def build(self, build_number): + if not build_number: + return None + cached_build = self._builds_cache.get(build_number) + if cached_build: + return cached_build + + build = self._fetch_build(build_number) + self._builds_cache[build_number] = build + return build + + def force_build(self, username="webkit-patch", comments=None): + def predicate(form): + try: + return form.find_control("username") + except Exception, e: + return False + self._browser.open(self.url()) + self._browser.select_form(predicate=predicate) + self._browser["username"] = username + if comments: + self._browser["comments"] = comments + return self._browser.submit() + + file_name_regexp = re.compile(r"r(?P<revision>\d+) \((?P<build_number>\d+)\)") + def _revision_and_build_for_filename(self, filename): + # Example: "r47483 (1)/" or "r47483 (1).zip" + match = self.file_name_regexp.match(filename) + return (int(match.group("revision")), int(match.group("build_number"))) + + def _fetch_revision_to_build_map(self): + # All _fetch requests go through _buildbot for easier mocking + try: + # FIXME: This method is horribly slow due to the huge network load. + # FIXME: This is a poor way to do revision -> build mapping. + # Better would be to ask buildbot through some sort of API. + print "Loading revision/build list from %s." % self.results_url() + print "This may take a while..." + result_files = self._buildbot._fetch_twisted_directory_listing(self.results_url()) + except urllib2.HTTPError, error: + if error.code != 404: + raise + result_files = [] + + # This assumes there was only one build per revision, which is false but we don't care for now. + return dict([self._revision_and_build_for_filename(file_info["filename"]) for file_info in result_files]) + + def _revision_to_build_map(self): + if not self._revision_to_build_number: + self._revision_to_build_number = self._fetch_revision_to_build_map() + return self._revision_to_build_number + + def revision_build_pairs_with_results(self): + return self._revision_to_build_map().items() + + # This assumes there can be only one build per revision, which is false, but we don't care for now. + def build_for_revision(self, revision, allow_failed_lookups=False): + # NOTE: This lookup will fail if that exact revision was never built. + build_number = self._revision_to_build_map().get(int(revision)) + if not build_number: + return None + build = self.build(build_number) + if not build and allow_failed_lookups: + # Builds for old revisions with fail to lookup via buildbot's xmlrpc api. + build = Build(self, + build_number=build_number, + revision=revision, + is_green=False, + ) + return build + + def find_failure_transition(self, red_build, look_back_limit=30): + if not red_build or red_build.is_green(): + return (None, None) + common_failures = None + current_build = red_build + build_after_current_build = None + look_back_count = 0 + while current_build: + if current_build.is_green(): + # current_build can't possibly have any failures in common + # with red_build because it's green. + break + results = current_build.layout_test_results() + # We treat a lack of results as if all the test failed. + # This occurs, for example, when we can't compile at all. + if results: + failures = set(results.failing_tests()) + if common_failures == None: + common_failures = failures + common_failures = common_failures.intersection(failures) + if not common_failures: + # current_build doesn't have any failures in common with + # the red build we're worried about. We assume that any + # failures in current_build were due to flakiness. + break + look_back_count += 1 + if look_back_count > look_back_limit: + return (None, current_build) + build_after_current_build = current_build + current_build = current_build.previous_build() + # We must iterate at least once because red_build is red. + assert(build_after_current_build) + # Current build must either be green or have no failures in common + # with red build, so we've found our failure transition. + return (current_build, build_after_current_build) + + # FIXME: This likely does not belong on Builder + def suspect_revisions_for_transition(self, last_good_build, first_bad_build): + suspect_revisions = range(first_bad_build.revision(), + last_good_build.revision(), + -1) + suspect_revisions.reverse() + return suspect_revisions + + def blameworthy_revisions(self, red_build_number, look_back_limit=30, avoid_flakey_tests=True): + red_build = self.build(red_build_number) + (last_good_build, first_bad_build) = \ + self.find_failure_transition(red_build, look_back_limit) + if not last_good_build: + return [] # We ran off the limit of our search + # If avoid_flakey_tests, require at least 2 bad builds before we + # suspect a real failure transition. + if avoid_flakey_tests and first_bad_build == red_build: + return [] + return self.suspect_revisions_for_transition(last_good_build, first_bad_build) + + +# FIXME: This should be unified with all the layout test results code in the layout_tests package +class LayoutTestResults(object): + stderr_key = u'Tests that had stderr output:' + fail_key = u'Tests where results did not match expected results:' + timeout_key = u'Tests that timed out:' + crash_key = u'Tests that caused the DumpRenderTree tool to crash:' + missing_key = u'Tests that had no expected results (probably new):' + + expected_keys = [ + stderr_key, + fail_key, + crash_key, + timeout_key, + missing_key, + ] + + @classmethod + def _parse_results_html(cls, page): + parsed_results = {} + tables = BeautifulSoup(page).findAll("table") + for table in tables: + table_title = table.findPreviousSibling("p").string + if table_title not in cls.expected_keys: + # This Exception should only ever be hit if run-webkit-tests changes its results.html format. + raise Exception("Unhandled title: %s" % str(table_title)) + # We might want to translate table titles into identifiers before storing. + parsed_results[table_title] = [row.find("a").string for row in table.findAll("tr")] + + return parsed_results + + @classmethod + def _fetch_results_html(cls, base_url): + results_html = "%s/results.html" % base_url + # FIXME: We need to move this sort of 404 logic into NetworkTransaction or similar. + try: + page = urllib2.urlopen(results_html) + return cls._parse_results_html(page) + except urllib2.HTTPError, error: + if error.code != 404: + raise + + @classmethod + def results_from_url(cls, base_url): + parsed_results = cls._fetch_results_html(base_url) + if not parsed_results: + return None + return cls(base_url, parsed_results) + + def __init__(self, base_url, parsed_results): + self._base_url = base_url + self._parsed_results = parsed_results + + def parsed_results(self): + return self._parsed_results + + def failing_tests(self): + failing_keys = [self.fail_key, self.crash_key, self.timeout_key] + return sorted(sum([tests for key, tests in self._parsed_results.items() if key in failing_keys], [])) + + +class Build(object): + def __init__(self, builder, build_number, revision, is_green): + self._builder = builder + self._number = build_number + self._revision = revision + self._is_green = is_green + self._layout_test_results = None + + @staticmethod + def build_url(builder, build_number): + return "%s/builds/%s" % (builder.url(), build_number) + + def url(self): + return self.build_url(self.builder(), self._number) + + def results_url(self): + results_directory = "r%s (%s)" % (self.revision(), self._number) + return "%s/%s" % (self._builder.results_url(), urllib.quote(results_directory)) + + def layout_test_results(self): + if not self._layout_test_results: + self._layout_test_results = LayoutTestResults.results_from_url(self.results_url()) + return self._layout_test_results + + def builder(self): + return self._builder + + def revision(self): + return self._revision + + def is_green(self): + return self._is_green + + def previous_build(self): + # previous_build() allows callers to avoid assuming build numbers are sequential. + # They may not be sequential across all master changes, or when non-trunk builds are made. + return self._builder.build(self._number - 1) + + +class BuildBot(object): + # FIXME: This should move into some sort of webkit_config.py + default_host = "build.webkit.org" + + def __init__(self, host=default_host): + self.buildbot_host = host + self._builder_by_name = {} + + # If any core builder is red we should not be landing patches. Other + # builders should be added to this list once they are known to be + # reliable. + # See https://bugs.webkit.org/show_bug.cgi?id=33296 and related bugs. + self.core_builder_names_regexps = [ + "SnowLeopard.*Build", + "SnowLeopard.*Test", + "Leopard", + "Tiger", + "Windows.*Build", + "Windows.*Debug.*Test", + "GTK", + "Qt", + "Chromium", + ] + + def _parse_last_build_cell(self, builder, cell): + status_link = cell.find('a') + if status_link: + # Will be either a revision number or a build number + revision_string = status_link.string + # If revision_string has non-digits assume it's not a revision number. + builder['built_revision'] = int(revision_string) \ + if not re.match('\D', revision_string) \ + else None + builder['is_green'] = not re.search('fail', cell.renderContents()) + + status_link_regexp = r"builders/(?P<builder_name>.*)/builds/(?P<build_number>\d+)" + link_match = re.match(status_link_regexp, status_link['href']) + builder['build_number'] = int(link_match.group("build_number")) + else: + # We failed to find a link in the first cell, just give up. This + # can happen if a builder is just-added, the first cell will just + # be "no build" + # Other parts of the code depend on is_green being present. + builder['is_green'] = False + builder['built_revision'] = None + builder['build_number'] = None + + def _parse_current_build_cell(self, builder, cell): + activity_lines = cell.renderContents().split("<br />") + builder["activity"] = activity_lines[0] # normally "building" or "idle" + # The middle lines document how long left for any current builds. + match = re.match("(?P<pending_builds>\d) pending", activity_lines[-1]) + builder["pending_builds"] = int(match.group("pending_builds")) if match else 0 + + def _parse_builder_status_from_row(self, status_row): + status_cells = status_row.findAll('td') + builder = {} + + # First cell is the name + name_link = status_cells[0].find('a') + builder["name"] = name_link.string + + self._parse_last_build_cell(builder, status_cells[1]) + self._parse_current_build_cell(builder, status_cells[2]) + return builder + + def _matches_regexps(self, builder_name, name_regexps): + for name_regexp in name_regexps: + if re.match(name_regexp, builder_name): + return True + return False + + # FIXME: Should move onto Builder + def _is_core_builder(self, builder_name): + return self._matches_regexps(builder_name, self.core_builder_names_regexps) + + # FIXME: This method needs to die, but is used by a unit test at the moment. + def _builder_statuses_with_names_matching_regexps(self, builder_statuses, name_regexps): + return [builder for builder in builder_statuses if self._matches_regexps(builder["name"], name_regexps)] + + def red_core_builders(self): + return [builder for builder in self.core_builder_statuses() if not builder["is_green"]] + + def red_core_builders_names(self): + return [builder["name"] for builder in self.red_core_builders()] + + def idle_red_core_builders(self): + return [builder for builder in self.red_core_builders() if builder["activity"] == "idle"] + + def core_builders_are_green(self): + return not self.red_core_builders() + + # FIXME: These _fetch methods should move to a networking class. + def _fetch_xmlrpc_build_dictionary(self, builder, build_number): + # The buildbot XMLRPC API is super-limited. + # For one, you cannot fetch info on builds which are incomplete. + proxy = xmlrpclib.ServerProxy("http://%s/xmlrpc" % self.buildbot_host, allow_none=True) + try: + return proxy.getBuild(builder.name(), int(build_number)) + except xmlrpclib.Fault, err: + build_url = Build.build_url(builder, build_number) + _log.error("Error fetching data for %s build %s (%s): %s" % (builder.name(), build_number, build_url, err)) + return None + + def _fetch_one_box_per_builder(self): + build_status_url = "http://%s/one_box_per_builder" % self.buildbot_host + return urllib2.urlopen(build_status_url) + + def _parse_twisted_file_row(self, file_row): + string_or_empty = lambda string: str(string) if string else "" + file_cells = file_row.findAll('td') + return { + "filename" : string_or_empty(file_cells[0].find("a").string), + "size" : string_or_empty(file_cells[1].string), + "type" : string_or_empty(file_cells[2].string), + "encoding" : string_or_empty(file_cells[3].string), + } + + def _parse_twisted_directory_listing(self, page): + soup = BeautifulSoup(page) + # HACK: Match only table rows with a class to ignore twisted header/footer rows. + file_rows = soup.find('table').findAll('tr', { "class" : True }) + return [self._parse_twisted_file_row(file_row) for file_row in file_rows] + + # FIXME: There should be a better way to get this information directly from twisted. + def _fetch_twisted_directory_listing(self, url): + return self._parse_twisted_directory_listing(urllib2.urlopen(url)) + + def builders(self): + return [self.builder_with_name(status["name"]) for status in self.builder_statuses()] + + # This method pulls from /one_box_per_builder as an efficient way to get information about + def builder_statuses(self): + soup = BeautifulSoup(self._fetch_one_box_per_builder()) + return [self._parse_builder_status_from_row(status_row) for status_row in soup.find('table').findAll('tr')] + + def core_builder_statuses(self): + return [builder for builder in self.builder_statuses() if self._is_core_builder(builder["name"])] + + def builder_with_name(self, name): + builder = self._builder_by_name.get(name) + if not builder: + builder = Builder(name, self) + self._builder_by_name[name] = builder + return builder + + def revisions_causing_failures(self, only_core_builders=True): + builder_statuses = self.core_builder_statuses() if only_core_builders else self.builder_statuses() + revision_to_failing_bots = {} + for builder_status in builder_statuses: + if builder_status["is_green"]: + continue + builder = self.builder_with_name(builder_status["name"]) + revisions = builder.blameworthy_revisions(builder_status["build_number"]) + for revision in revisions: + failing_bots = revision_to_failing_bots.get(revision, []) + failing_bots.append(builder) + revision_to_failing_bots[revision] = failing_bots + return revision_to_failing_bots + + # This makes fewer requests than calling Builder.latest_build would. It grabs all builder + # statuses in one request using self.builder_statuses (fetching /one_box_per_builder instead of builder pages). + def _latest_builds_from_builders(self, only_core_builders=True): + builder_statuses = self.core_builder_statuses() if only_core_builders else self.builder_statuses() + return [self.builder_with_name(status["name"]).build(status["build_number"]) for status in builder_statuses] + + def _build_at_or_before_revision(self, build, revision): + while build: + if build.revision() <= revision: + return build + build = build.previous_build() + + def last_green_revision(self, only_core_builders=True): + builds = self._latest_builds_from_builders(only_core_builders) + target_revision = builds[0].revision() + # An alternate way to do this would be to start at one revision and walk backwards + # checking builder.build_for_revision, however build_for_revision is very slow on first load. + while True: + # Make builds agree on revision + builds = [self._build_at_or_before_revision(build, target_revision) for build in builds] + if None in builds: # One of the builds failed to load from the server. + return None + min_revision = min(map(lambda build: build.revision(), builds)) + if min_revision != target_revision: + target_revision = min_revision + continue # Builds don't all agree on revision, keep searching + # Check to make sure they're all green + all_are_green = reduce(operator.and_, map(lambda build: build.is_green(), builds)) + if not all_are_green: + target_revision -= 1 + continue + return min_revision diff --git a/WebKitTools/Scripts/webkitpy/common/net/buildbot_unittest.py b/WebKitTools/Scripts/webkitpy/common/net/buildbot_unittest.py new file mode 100644 index 0000000..f765f6e --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/buildbot_unittest.py @@ -0,0 +1,433 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.buildbot import BuildBot, Builder, Build, LayoutTestResults + +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + + +class BuilderTest(unittest.TestCase): + def _install_fetch_build(self, failure): + def _mock_fetch_build(build_number): + build = Build( + builder=self.builder, + build_number=build_number, + revision=build_number + 1000, + is_green=build_number < 4 + ) + build._layout_test_results = LayoutTestResults( + "http://buildbot.example.com/foo", { + LayoutTestResults.fail_key: failure(build_number), + }) + return build + self.builder._fetch_build = _mock_fetch_build + + def setUp(self): + self.buildbot = BuildBot() + self.builder = Builder("Test Builder", self.buildbot) + self._install_fetch_build(lambda build_number: ["test1", "test2"]) + + def test_find_failure_transition(self): + (green_build, red_build) = self.builder.find_failure_transition(self.builder.build(10)) + self.assertEqual(green_build.revision(), 1003) + self.assertEqual(red_build.revision(), 1004) + + (green_build, red_build) = self.builder.find_failure_transition(self.builder.build(10), look_back_limit=2) + self.assertEqual(green_build, None) + self.assertEqual(red_build.revision(), 1008) + + def test_none_build(self): + self.builder._fetch_build = lambda build_number: None + (green_build, red_build) = self.builder.find_failure_transition(self.builder.build(10)) + self.assertEqual(green_build, None) + self.assertEqual(red_build, None) + + def test_flaky_tests(self): + self._install_fetch_build(lambda build_number: ["test1"] if build_number % 2 else ["test2"]) + (green_build, red_build) = self.builder.find_failure_transition(self.builder.build(10)) + self.assertEqual(green_build.revision(), 1009) + self.assertEqual(red_build.revision(), 1010) + + def test_failure_and_flaky(self): + self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"]) + (green_build, red_build) = self.builder.find_failure_transition(self.builder.build(10)) + self.assertEqual(green_build.revision(), 1003) + self.assertEqual(red_build.revision(), 1004) + + def test_no_results(self): + self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"]) + (green_build, red_build) = self.builder.find_failure_transition(self.builder.build(10)) + self.assertEqual(green_build.revision(), 1003) + self.assertEqual(red_build.revision(), 1004) + + def test_failure_after_flaky(self): + self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number > 6 else ["test3"]) + (green_build, red_build) = self.builder.find_failure_transition(self.builder.build(10)) + self.assertEqual(green_build.revision(), 1006) + self.assertEqual(red_build.revision(), 1007) + + def test_blameworthy_revisions(self): + self.assertEqual(self.builder.blameworthy_revisions(10), [1004]) + self.assertEqual(self.builder.blameworthy_revisions(10, look_back_limit=2), []) + # Flakey test avoidance requires at least 2 red builds: + self.assertEqual(self.builder.blameworthy_revisions(4), []) + self.assertEqual(self.builder.blameworthy_revisions(4, avoid_flakey_tests=False), [1004]) + # Green builder: + self.assertEqual(self.builder.blameworthy_revisions(3), []) + + def test_build_caching(self): + self.assertEqual(self.builder.build(10), self.builder.build(10)) + + def test_build_and_revision_for_filename(self): + expectations = { + "r47483 (1)/" : (47483, 1), + "r47483 (1).zip" : (47483, 1), + } + for filename, revision_and_build in expectations.items(): + self.assertEqual(self.builder._revision_and_build_for_filename(filename), revision_and_build) + + +class LayoutTestResultsTest(unittest.TestCase): + _example_results_html = """ +<html> +<head> +<title>Layout Test Results</title> +</head> +<body> +<p>Tests that had stderr output:</p> +<table> +<tr> +<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/accessibility/aria-activedescendant-crash.html">accessibility/aria-activedescendant-crash.html</a></td> +<td><a href="accessibility/aria-activedescendant-crash-stderr.txt">stderr</a></td> +</tr> +<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/http/tests/security/canvas-remote-read-svg-image.html">http/tests/security/canvas-remote-read-svg-image.html</a></td> +<td><a href="http/tests/security/canvas-remote-read-svg-image-stderr.txt">stderr</a></td> +</tr> +</table><p>Tests that had no expected results (probably new):</p> +<table> +<tr> +<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/fast/repaint/no-caret-repaint-in-non-content-editable-element.html">fast/repaint/no-caret-repaint-in-non-content-editable-element.html</a></td> +<td><a href="fast/repaint/no-caret-repaint-in-non-content-editable-element-actual.txt">result</a></td> +</tr> +</table></body> +</html> +""" + + _expected_layout_test_results = { + 'Tests that had stderr output:' : [ + 'accessibility/aria-activedescendant-crash.html' + ], + 'Tests that had no expected results (probably new):' : [ + 'fast/repaint/no-caret-repaint-in-non-content-editable-element.html' + ] + } + def test_parse_layout_test_results(self): + results = LayoutTestResults._parse_results_html(self._example_results_html) + self.assertEqual(self._expected_layout_test_results, results) + + +class BuildBotTest(unittest.TestCase): + + _example_one_box_status = ''' + <table> + <tr> + <td class="box"><a href="builders/Windows%20Debug%20%28Tests%29">Windows Debug (Tests)</a></td> + <td align="center" class="LastBuild box success"><a href="builders/Windows%20Debug%20%28Tests%29/builds/3693">47380</a><br />build<br />successful</td> + <td align="center" class="Activity building">building<br />ETA in<br />~ 14 mins<br />at 13:40</td> + <tr> + <td class="box"><a href="builders/SnowLeopard%20Intel%20Release">SnowLeopard Intel Release</a></td> + <td class="LastBuild box" >no build</td> + <td align="center" class="Activity building">building<br />< 1 min</td> + <tr> + <td class="box"><a href="builders/Qt%20Linux%20Release">Qt Linux Release</a></td> + <td align="center" class="LastBuild box failure"><a href="builders/Qt%20Linux%20Release/builds/654">47383</a><br />failed<br />compile-webkit</td> + <td align="center" class="Activity idle">idle<br />3 pending</td> + </table> +''' + _expected_example_one_box_parsings = [ + { + 'is_green': True, + 'build_number' : 3693, + 'name': u'Windows Debug (Tests)', + 'built_revision': 47380, + 'activity': 'building', + 'pending_builds': 0, + }, + { + 'is_green': False, + 'build_number' : None, + 'name': u'SnowLeopard Intel Release', + 'built_revision': None, + 'activity': 'building', + 'pending_builds': 0, + }, + { + 'is_green': False, + 'build_number' : 654, + 'name': u'Qt Linux Release', + 'built_revision': 47383, + 'activity': 'idle', + 'pending_builds': 3, + }, + ] + + def test_status_parsing(self): + buildbot = BuildBot() + + soup = BeautifulSoup(self._example_one_box_status) + status_table = soup.find("table") + input_rows = status_table.findAll('tr') + + for x in range(len(input_rows)): + status_row = input_rows[x] + expected_parsing = self._expected_example_one_box_parsings[x] + + builder = buildbot._parse_builder_status_from_row(status_row) + + # Make sure we aren't parsing more or less than we expect + self.assertEquals(builder.keys(), expected_parsing.keys()) + + for key, expected_value in expected_parsing.items(): + self.assertEquals(builder[key], expected_value, ("Builder %d parse failure for key: %s: Actual='%s' Expected='%s'" % (x, key, builder[key], expected_value))) + + def test_core_builder_methods(self): + buildbot = BuildBot() + + # Override builder_statuses function to not touch the network. + def example_builder_statuses(): # We could use instancemethod() to bind 'self' but we don't need to. + return BuildBotTest._expected_example_one_box_parsings + buildbot.builder_statuses = example_builder_statuses + + buildbot.core_builder_names_regexps = [ 'Leopard', "Windows.*Build" ] + self.assertEquals(buildbot.red_core_builders_names(), []) + self.assertTrue(buildbot.core_builders_are_green()) + + buildbot.core_builder_names_regexps = [ 'SnowLeopard', 'Qt' ] + self.assertEquals(buildbot.red_core_builders_names(), [ u'SnowLeopard Intel Release', u'Qt Linux Release' ]) + self.assertFalse(buildbot.core_builders_are_green()) + + def test_builder_name_regexps(self): + buildbot = BuildBot() + + # For complete testing, this list should match the list of builders at build.webkit.org: + example_builders = [ + {'name': u'Tiger Intel Release', }, + {'name': u'Leopard Intel Release (Build)', }, + {'name': u'Leopard Intel Release (Tests)', }, + {'name': u'Leopard Intel Debug (Build)', }, + {'name': u'Leopard Intel Debug (Tests)', }, + {'name': u'SnowLeopard Intel Release (Build)', }, + {'name': u'SnowLeopard Intel Release (Tests)', }, + {'name': u'SnowLeopard Intel Leaks', }, + {'name': u'Windows Release (Build)', }, + {'name': u'Windows Release (Tests)', }, + {'name': u'Windows Debug (Build)', }, + {'name': u'Windows Debug (Tests)', }, + {'name': u'GTK Linux 32-bit Release', }, + {'name': u'GTK Linux 32-bit Debug', }, + {'name': u'GTK Linux 64-bit Debug', }, + {'name': u'GTK Linux 64-bit Release', }, + {'name': u'Qt Linux Release', }, + {'name': u'Qt Linux Release minimal', }, + {'name': u'Qt Linux ARMv5 Release', }, + {'name': u'Qt Linux ARMv7 Release', }, + {'name': u'Qt Windows 32-bit Release', }, + {'name': u'Qt Windows 32-bit Debug', }, + {'name': u'Chromium Linux Release', }, + {'name': u'Chromium Mac Release', }, + {'name': u'Chromium Win Release', }, + {'name': u'New run-webkit-tests', }, + ] + name_regexps = [ + "SnowLeopard.*Build", + "SnowLeopard.*Test", + "Leopard", + "Tiger", + "Windows.*Build", + "Windows.*Debug.*Test", + "GTK", + "Qt", + "Chromium", + ] + expected_builders = [ + {'name': u'Tiger Intel Release', }, + {'name': u'Leopard Intel Release (Build)', }, + {'name': u'Leopard Intel Release (Tests)', }, + {'name': u'Leopard Intel Debug (Build)', }, + {'name': u'Leopard Intel Debug (Tests)', }, + {'name': u'SnowLeopard Intel Release (Build)', }, + {'name': u'SnowLeopard Intel Release (Tests)', }, + {'name': u'Windows Release (Build)', }, + {'name': u'Windows Debug (Build)', }, + {'name': u'Windows Debug (Tests)', }, + {'name': u'GTK Linux 32-bit Release', }, + {'name': u'GTK Linux 32-bit Debug', }, + {'name': u'GTK Linux 64-bit Debug', }, + {'name': u'GTK Linux 64-bit Release', }, + {'name': u'Qt Linux Release', }, + {'name': u'Qt Linux Release minimal', }, + {'name': u'Qt Linux ARMv5 Release', }, + {'name': u'Qt Linux ARMv7 Release', }, + {'name': u'Qt Windows 32-bit Release', }, + {'name': u'Qt Windows 32-bit Debug', }, + {'name': u'Chromium Linux Release', }, + {'name': u'Chromium Mac Release', }, + {'name': u'Chromium Win Release', }, + ] + + # This test should probably be updated if the default regexp list changes + self.assertEquals(buildbot.core_builder_names_regexps, name_regexps) + + builders = buildbot._builder_statuses_with_names_matching_regexps(example_builders, name_regexps) + self.assertEquals(builders, expected_builders) + + def test_builder_with_name(self): + buildbot = BuildBot() + + builder = buildbot.builder_with_name("Test Builder") + self.assertEqual(builder.name(), "Test Builder") + self.assertEqual(builder.url(), "http://build.webkit.org/builders/Test%20Builder") + self.assertEqual(builder.url_encoded_name(), "Test%20Builder") + self.assertEqual(builder.results_url(), "http://build.webkit.org/results/Test%20Builder") + + # Override _fetch_xmlrpc_build_dictionary function to not touch the network. + def mock_fetch_xmlrpc_build_dictionary(self, build_number): + build_dictionary = { + "revision" : 2 * build_number, + "number" : int(build_number), + "results" : build_number % 2, # 0 means pass + } + return build_dictionary + buildbot._fetch_xmlrpc_build_dictionary = mock_fetch_xmlrpc_build_dictionary + + build = builder.build(10) + self.assertEqual(build.builder(), builder) + self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/10") + self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r20%20%2810%29") + self.assertEqual(build.revision(), 20) + self.assertEqual(build.is_green(), True) + + build = build.previous_build() + self.assertEqual(build.builder(), builder) + self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/9") + self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r18%20%289%29") + self.assertEqual(build.revision(), 18) + self.assertEqual(build.is_green(), False) + + self.assertEqual(builder.build(None), None) + + _example_directory_listing = ''' +<h1>Directory listing for /results/SnowLeopard Intel Leaks/</h1> + +<table> + <thead> + <tr> + <th>Filename</th> + <th>Size</th> + <th>Content type</th> + <th>Content encoding</th> + </tr> + </thead> + <tbody> +<tr class="odd"> + <td><a href="r47483%20%281%29/">r47483 (1)/</a></td> + <td></td> + <td>[Directory]</td> + <td></td> +</tr> +<tr class="odd"> + <td><a href="r47484%20%282%29.zip">r47484 (2).zip</a></td> + <td>89K</td> + <td>[application/zip]</td> + <td></td> +</tr> +''' + _expected_files = [ + { + "filename" : "r47483 (1)/", + "size" : "", + "type" : "[Directory]", + "encoding" : "", + }, + { + "filename" : "r47484 (2).zip", + "size" : "89K", + "type" : "[application/zip]", + "encoding" : "", + }, + ] + + def test_parse_build_to_revision_map(self): + buildbot = BuildBot() + files = buildbot._parse_twisted_directory_listing(self._example_directory_listing) + self.assertEqual(self._expected_files, files) + + # Revision, is_green + # Ordered from newest (highest number) to oldest. + fake_builder1 = [ + [2, False], + [1, True], + ] + fake_builder2 = [ + [2, False], + [1, True], + ] + fake_builders = [ + fake_builder1, + fake_builder2, + ] + def _build_from_fake(self, fake_builder, index): + if index >= len(fake_builder): + return None + fake_build = fake_builder[index] + build = Build( + builder=fake_builder, + build_number=index, + revision=fake_build[0], + is_green=fake_build[1], + ) + def mock_previous_build(): + return self._build_from_fake(fake_builder, index + 1) + build.previous_build = mock_previous_build + return build + + def _fake_builds_at_index(self, index): + return [self._build_from_fake(builder, index) for builder in self.fake_builders] + + def test_last_green_revision(self): + buildbot = BuildBot() + def mock_builds_from_builders(only_core_builders): + return self._fake_builds_at_index(0) + buildbot._latest_builds_from_builders = mock_builds_from_builders + self.assertEqual(buildbot.last_green_revision(), 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/WebKitTools/Scripts/webkitpy/common/net/credentials.py b/WebKitTools/Scripts/webkitpy/common/net/credentials.py new file mode 100644 index 0000000..1d5f83d --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/credentials.py @@ -0,0 +1,126 @@ +# Copyright (c) 2009 Google Inc. All rights reserved. +# Copyright (c) 2009 Apple Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Python module for reading stored web credentials from the OS. + +import getpass +import os +import platform +import re + +from webkitpy.common.checkout.scm import Git +from webkitpy.common.system.executive import Executive, ScriptError +from webkitpy.common.system.user import User +from webkitpy.common.system.deprecated_logging import log + + +class Credentials(object): + + def __init__(self, host, git_prefix=None, executive=None, cwd=os.getcwd()): + self.host = host + self.git_prefix = "%s." % git_prefix if git_prefix else "" + self.executive = executive or Executive() + self.cwd = cwd + + def _credentials_from_git(self): + return [Git.read_git_config(self.git_prefix + "username"), + Git.read_git_config(self.git_prefix + "password")] + + def _keychain_value_with_label(self, label, source_text): + match = re.search("%s\"(?P<value>.+)\"" % label, + source_text, + re.MULTILINE) + if match: + return match.group('value') + + def _is_mac_os_x(self): + return platform.mac_ver()[0] + + def _parse_security_tool_output(self, security_output): + username = self._keychain_value_with_label("^\s*\"acct\"<blob>=", + security_output) + password = self._keychain_value_with_label("^password: ", + security_output) + return [username, password] + + def _run_security_tool(self, username=None): + security_command = [ + "/usr/bin/security", + "find-internet-password", + "-g", + "-s", + self.host, + ] + if username: + security_command += ["-a", username] + + log("Reading Keychain for %s account and password. " + "Click \"Allow\" to continue..." % self.host) + try: + return self.executive.run_command(security_command) + except ScriptError: + # Failed to either find a keychain entry or somekind of OS-related + # error occured (for instance, couldn't find the /usr/sbin/security + # command). + log("Could not find a keychain entry for %s." % self.host) + return None + + def _credentials_from_keychain(self, username=None): + if not self._is_mac_os_x(): + return [username, None] + + security_output = self._run_security_tool(username) + if security_output: + return self._parse_security_tool_output(security_output) + else: + return [None, None] + + def read_credentials(self): + username = None + password = None + + try: + if Git.in_working_directory(self.cwd): + (username, password) = self._credentials_from_git() + except OSError, e: + # Catch and ignore OSError exceptions such as "no such file + # or directory" (OSError errno 2), which imply that the Git + # command cannot be found/is not installed. + pass + + if not username or not password: + (username, password) = self._credentials_from_keychain(username) + + if not username: + username = User.prompt("%s login: " % self.host) + if not password: + password = getpass.getpass("%s password for %s: " % (self.host, + username)) + + return [username, password] diff --git a/WebKitTools/Scripts/webkitpy/common/net/credentials_unittest.py b/WebKitTools/Scripts/webkitpy/common/net/credentials_unittest.py new file mode 100644 index 0000000..9a42bdd --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/credentials_unittest.py @@ -0,0 +1,117 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import tempfile +import unittest +from webkitpy.common.net.credentials import Credentials +from webkitpy.common.system.executive import Executive +from webkitpy.common.system.outputcapture import OutputCapture +from webkitpy.thirdparty.mock import Mock + +class CredentialsTest(unittest.TestCase): + example_security_output = """keychain: "/Users/test/Library/Keychains/login.keychain" +class: "inet" +attributes: + 0x00000007 <blob>="bugs.webkit.org (test@webkit.org)" + 0x00000008 <blob>=<NULL> + "acct"<blob>="test@webkit.org" + "atyp"<blob>="form" + "cdat"<timedate>=0x32303039303832353233353231365A00 "20090825235216Z\000" + "crtr"<uint32>=<NULL> + "cusi"<sint32>=<NULL> + "desc"<blob>="Web form password" + "icmt"<blob>="default" + "invi"<sint32>=<NULL> + "mdat"<timedate>=0x32303039303930393137323635315A00 "20090909172651Z\000" + "nega"<sint32>=<NULL> + "path"<blob>=<NULL> + "port"<uint32>=0x00000000 + "prot"<blob>=<NULL> + "ptcl"<uint32>="htps" + "scrp"<sint32>=<NULL> + "sdmn"<blob>=<NULL> + "srvr"<blob>="bugs.webkit.org" + "type"<uint32>=<NULL> +password: "SECRETSAUCE" +""" + + def test_keychain_lookup_on_non_mac(self): + class FakeCredentials(Credentials): + def _is_mac_os_x(self): + return False + credentials = FakeCredentials("bugs.webkit.org") + self.assertEqual(credentials._is_mac_os_x(), False) + self.assertEqual(credentials._credentials_from_keychain("foo"), ["foo", None]) + + def test_security_output_parse(self): + credentials = Credentials("bugs.webkit.org") + self.assertEqual(credentials._parse_security_tool_output(self.example_security_output), ["test@webkit.org", "SECRETSAUCE"]) + + def test_security_output_parse_entry_not_found(self): + credentials = Credentials("foo.example.com") + if not credentials._is_mac_os_x(): + return # This test does not run on a non-Mac. + + # Note, we ignore the captured output because it is already covered + # by the test case CredentialsTest._assert_security_call (below). + outputCapture = OutputCapture() + outputCapture.capture_output() + self.assertEqual(credentials._run_security_tool(), None) + outputCapture.restore_output() + + def _assert_security_call(self, username=None): + executive_mock = Mock() + credentials = Credentials("example.com", executive=executive_mock) + + expected_stderr = "Reading Keychain for example.com account and password. Click \"Allow\" to continue...\n" + OutputCapture().assert_outputs(self, credentials._run_security_tool, [username], expected_stderr=expected_stderr) + + security_args = ["/usr/bin/security", "find-internet-password", "-g", "-s", "example.com"] + if username: + security_args += ["-a", username] + executive_mock.run_command.assert_called_with(security_args) + + def test_security_calls(self): + self._assert_security_call() + self._assert_security_call(username="foo") + + def test_read_credentials_without_git_repo(self): + class FakeCredentials(Credentials): + def _is_mac_os_x(self): + return True + def _credentials_from_keychain(self, username): + return ["test@webkit.org", "SECRETSAUCE"] + + temp_dir_path = tempfile.mkdtemp(suffix="not_a_git_repo") + credentials = FakeCredentials("bugs.webkit.org", cwd=temp_dir_path) + self.assertEqual(credentials.read_credentials(), ["test@webkit.org", "SECRETSAUCE"]) + os.rmdir(temp_dir_path) + +if __name__ == '__main__': + unittest.main() diff --git a/WebKitTools/Scripts/webkitpy/common/net/irc/__init__.py b/WebKitTools/Scripts/webkitpy/common/net/irc/__init__.py new file mode 100644 index 0000000..ef65bee --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/irc/__init__.py @@ -0,0 +1 @@ +# Required for Python to search this directory for module files diff --git a/WebKitTools/Scripts/webkitpy/common/net/irc/ircbot.py b/WebKitTools/Scripts/webkitpy/common/net/irc/ircbot.py new file mode 100644 index 0000000..f742867 --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/irc/ircbot.py @@ -0,0 +1,91 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import webkitpy.common.config.irc as config_irc + +from webkitpy.common.thread.messagepump import MessagePump, MessagePumpDelegate +from webkitpy.thirdparty.autoinstalled.irc import ircbot +from webkitpy.thirdparty.autoinstalled.irc import irclib + + +class IRCBotDelegate(object): + def irc_message_received(self, nick, message): + raise NotImplementedError, "subclasses must implement" + + def irc_nickname(self): + raise NotImplementedError, "subclasses must implement" + + def irc_password(self): + raise NotImplementedError, "subclasses must implement" + + +class IRCBot(ircbot.SingleServerIRCBot, MessagePumpDelegate): + # FIXME: We should get this information from a config file. + def __init__(self, + message_queue, + delegate): + self._message_queue = message_queue + self._delegate = delegate + ircbot.SingleServerIRCBot.__init__( + self, + [( + config_irc.server, + config_irc.port, + self._delegate.irc_password() + )], + self._delegate.irc_nickname(), + self._delegate.irc_nickname()) + self._channel = config_irc.channel + + # ircbot.SingleServerIRCBot methods + + def on_nicknameinuse(self, connection, event): + connection.nick(connection.get_nickname() + "_") + + def on_welcome(self, connection, event): + connection.join(self._channel) + self._message_pump = MessagePump(self, self._message_queue) + + def on_pubmsg(self, connection, event): + nick = irclib.nm_to_n(event.source()) + request = event.arguments()[0].split(":", 1) + if len(request) > 1 and irclib.irc_lower(request[0]) == irclib.irc_lower(self.connection.get_nickname()): + response = self._delegate.irc_message_received(nick, request[1]) + if response: + connection.privmsg(self._channel, response) + + # MessagePumpDelegate methods + + def schedule(self, interval, callback): + self.connection.execute_delayed(interval, callback) + + def message_available(self, message): + self.connection.privmsg(self._channel, message) + + def final_message_delivered(self): + self.die() diff --git a/WebKitTools/Scripts/webkitpy/common/net/irc/ircproxy.py b/WebKitTools/Scripts/webkitpy/common/net/irc/ircproxy.py new file mode 100644 index 0000000..13348b4 --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/irc/ircproxy.py @@ -0,0 +1,62 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import threading + +from webkitpy.common.net.irc.ircbot import IRCBot +from webkitpy.common.thread.threadedmessagequeue import ThreadedMessageQueue +from webkitpy.common.system.deprecated_logging import log + + +class _IRCThread(threading.Thread): + def __init__(self, message_queue, irc_delegate, irc_bot): + threading.Thread.__init__(self) + self.setDaemon(True) + self._message_queue = message_queue + self._irc_delegate = irc_delegate + self._irc_bot = irc_bot + + def run(self): + bot = self._irc_bot(self._message_queue, self._irc_delegate) + bot.start() + + +class IRCProxy(object): + def __init__(self, irc_delegate, irc_bot=IRCBot): + log("Connecting to IRC") + self._message_queue = ThreadedMessageQueue() + self._child_thread = _IRCThread(self._message_queue, irc_delegate, irc_bot) + self._child_thread.start() + + def post(self, message): + self._message_queue.post(message) + + def disconnect(self): + log("Disconnecting from IRC...") + self._message_queue.stop() + self._child_thread.join() diff --git a/WebKitTools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py b/WebKitTools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py new file mode 100644 index 0000000..b44ce40 --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py @@ -0,0 +1,43 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.irc.ircproxy import IRCProxy +from webkitpy.common.system.outputcapture import OutputCapture +from webkitpy.thirdparty.mock import Mock + +class IRCProxyTest(unittest.TestCase): + def test_trivial(self): + def fun(): + proxy = IRCProxy(Mock(), Mock()) + proxy.post("hello") + proxy.disconnect() + + expected_stderr = "Connecting to IRC\nDisconnecting from IRC...\n" + OutputCapture().assert_outputs(self, fun, expected_stderr=expected_stderr) diff --git a/WebKitTools/Scripts/webkitpy/common/net/networktransaction.py b/WebKitTools/Scripts/webkitpy/common/net/networktransaction.py new file mode 100644 index 0000000..c82fc6f --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/networktransaction.py @@ -0,0 +1,68 @@ +# Copyright (C) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import logging +import time + +from webkitpy.thirdparty.autoinstalled.mechanize import HTTPError +from webkitpy.common.system.deprecated_logging import log + + +_log = logging.getLogger(__name__) + + +class NetworkTimeout(Exception): + pass + + +class NetworkTransaction(object): + def __init__(self, initial_backoff_seconds=10, grown_factor=1.5, timeout_seconds=10*60): + self._initial_backoff_seconds = initial_backoff_seconds + self._grown_factor = grown_factor + self._timeout_seconds = timeout_seconds + + def run(self, request): + self._total_sleep = 0 + self._backoff_seconds = self._initial_backoff_seconds + while True: + try: + return request() + except HTTPError, e: + self._check_for_timeout() + _log.warn("Received HTTP status %s from server. Retrying in " + "%s seconds..." % (e.code, self._backoff_seconds)) + self._sleep() + + def _check_for_timeout(self): + if self._total_sleep + self._backoff_seconds > self._timeout_seconds: + raise NetworkTimeout() + + def _sleep(self): + time.sleep(self._backoff_seconds) + self._total_sleep += self._backoff_seconds + self._backoff_seconds *= self._grown_factor diff --git a/WebKitTools/Scripts/webkitpy/common/net/networktransaction_unittest.py b/WebKitTools/Scripts/webkitpy/common/net/networktransaction_unittest.py new file mode 100644 index 0000000..cd0702b --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/networktransaction_unittest.py @@ -0,0 +1,86 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.networktransaction import NetworkTransaction, NetworkTimeout +from webkitpy.common.system.logtesting import LoggingTestCase +from webkitpy.thirdparty.autoinstalled.mechanize import HTTPError + + +class NetworkTransactionTest(LoggingTestCase): + exception = Exception("Test exception") + + def test_success(self): + transaction = NetworkTransaction() + self.assertEqual(transaction.run(lambda: 42), 42) + + def _raise_exception(self): + raise self.exception + + def test_exception(self): + transaction = NetworkTransaction() + did_process_exception = False + did_throw_exception = True + try: + transaction.run(lambda: self._raise_exception()) + did_throw_exception = False + except Exception, e: + did_process_exception = True + self.assertEqual(e, self.exception) + self.assertTrue(did_throw_exception) + self.assertTrue(did_process_exception) + + def _raise_http_error(self): + self._run_count += 1 + if self._run_count < 3: + raise HTTPError("http://example.com/", 500, "inteneral server error", None, None) + return 42 + + def test_retry(self): + self._run_count = 0 + transaction = NetworkTransaction(initial_backoff_seconds=0) + self.assertEqual(transaction.run(lambda: self._raise_http_error()), 42) + self.assertEqual(self._run_count, 3) + self.assertLog(['WARNING: Received HTTP status 500 from server. ' + 'Retrying in 0 seconds...\n', + 'WARNING: Received HTTP status 500 from server. ' + 'Retrying in 0.0 seconds...\n']) + + def test_timeout(self): + self._run_count = 0 + transaction = NetworkTransaction(initial_backoff_seconds=60*60, timeout_seconds=60) + did_process_exception = False + did_throw_exception = True + try: + transaction.run(lambda: self._raise_http_error()) + did_throw_exception = False + except NetworkTimeout, e: + did_process_exception = True + self.assertTrue(did_throw_exception) + self.assertTrue(did_process_exception) diff --git a/WebKitTools/Scripts/webkitpy/common/net/rietveld.py b/WebKitTools/Scripts/webkitpy/common/net/rietveld.py new file mode 100644 index 0000000..a9e5b1a --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/rietveld.py @@ -0,0 +1,89 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import re +import stat + +import webkitpy.common.config as config +from webkitpy.common.system.deprecated_logging import log +from webkitpy.common.system.executive import ScriptError +import webkitpy.thirdparty.autoinstalled.rietveld.upload as upload + + +def parse_codereview_issue(message): + if not message: + return None + match = re.search(config.codereview_server_regex + + "(?P<codereview_issue>\d+)", + message) + if match: + return int(match.group('codereview_issue')) + + +class Rietveld(object): + def __init__(self, executive, dryrun=False): + self.dryrun = dryrun + self._executive = executive + self._upload_py = upload.__file__ + # Chop off the last character so we modify permissions on the py file instead of the pyc. + if os.path.splitext(self._upload_py)[1] == ".pyc": + self._upload_py = self._upload_py[:-1] + os.chmod(self._upload_py, os.stat(self._upload_py).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) + + def url_for_issue(self, codereview_issue): + if not codereview_issue: + return None + return "%s%s" % (config.codereview_server_url, codereview_issue) + + def post(self, message=None, codereview_issue=None, cc=None): + if not message: + raise ScriptError("Rietveld requires a message.") + + args = [ + self._upload_py, + "--assume_yes", + "--server=%s" % config.codereview_server_host, + "--message=%s" % message, + ] + if codereview_issue: + args.append("--issue=%s" % codereview_issue) + if cc: + args.append("--cc=%s" % cc) + + if self.dryrun: + log("Would have run %s" % args) + return + + output = self._executive.run_and_throw_if_fail(args) + match = re.search("Issue created\. URL: " + + config.codereview_server_regex + + "(?P<codereview_issue>\d+)", + output) + if match: + return int(match.group('codereview_issue')) diff --git a/WebKitTools/Scripts/webkitpy/common/net/rietveld_unittest.py b/WebKitTools/Scripts/webkitpy/common/net/rietveld_unittest.py new file mode 100644 index 0000000..9c5a29e --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/rietveld_unittest.py @@ -0,0 +1,39 @@ +# Copyright (c) 2010 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest + +from webkitpy.common.net.rietveld import Rietveld +from webkitpy.thirdparty.mock import Mock + + +class RietveldTest(unittest.TestCase): + def test_url_for_issue(self): + rietveld = Rietveld(Mock()) + self.assertEqual(rietveld.url_for_issue(34223), + "https://wkrietveld.appspot.com/34223") diff --git a/WebKitTools/Scripts/webkitpy/common/net/statusserver.py b/WebKitTools/Scripts/webkitpy/common/net/statusserver.py new file mode 100644 index 0000000..e8987a9 --- /dev/null +++ b/WebKitTools/Scripts/webkitpy/common/net/statusserver.py @@ -0,0 +1,108 @@ +# Copyright (C) 2009 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from webkitpy.common.net.networktransaction import NetworkTransaction +from webkitpy.common.system.deprecated_logging import log +from webkitpy.thirdparty.autoinstalled.mechanize import Browser +from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup + +import urllib2 + + +class StatusServer: + default_host = "webkit-commit-queue.appspot.com" + + def __init__(self, host=default_host): + self.set_host(host) + self.browser = Browser() + + def set_host(self, host): + self.host = host + self.url = "http://%s" % self.host + + def results_url_for_status(self, status_id): + return "%s/results/%s" % (self.url, status_id) + + def _add_patch(self, patch): + if not patch: + return + if patch.bug_id(): + self.browser["bug_id"] = str(patch.bug_id()) + if patch.id(): + self.browser["patch_id"] = str(patch.id()) + + def _add_results_file(self, results_file): + if not results_file: + return + self.browser.add_file(results_file, "text/plain", "results.txt", 'results_file') + + def _post_status_to_server(self, queue_name, status, patch, results_file): + if results_file: + # We might need to re-wind the file if we've already tried to post it. + results_file.seek(0) + + update_status_url = "%s/update-status" % self.url + self.browser.open(update_status_url) + self.browser.select_form(name="update_status") + self.browser["queue_name"] = queue_name + self._add_patch(patch) + self.browser["status"] = status + self._add_results_file(results_file) + return self.browser.submit().read() # This is the id of the newly created status object. + + def _post_svn_revision_to_server(self, svn_revision_number, broken_bot): + update_svn_revision_url = "%s/update-svn-revision" % self.url + self.browser.open(update_svn_revision_url) + self.browser.select_form(name="update_svn_revision") + self.browser["number"] = str(svn_revision_number) + self.browser["broken_bot"] = broken_bot + return self.browser.submit().read() + + def update_status(self, queue_name, status, patch=None, results_file=None): + log(status) + return NetworkTransaction().run(lambda: self._post_status_to_server(queue_name, status, patch, results_file)) + + def update_svn_revision(self, svn_revision_number, broken_bot): + log("SVN revision: %s broke %s" % (svn_revision_number, broken_bot)) + return NetworkTransaction().run(lambda: self._post_svn_revision_to_server(svn_revision_number, broken_bot)) + + def _fetch_url(self, url): + try: + return urllib2.urlopen(url).read() + except urllib2.HTTPError, e: + if e.code == 404: + return None + raise e + + def patch_status(self, queue_name, patch_id): + patch_status_url = "%s/patch-status/%s/%s" % (self.url, queue_name, patch_id) + return self._fetch_url(patch_status_url) + + def svn_revision(self, svn_revision_number): + svn_revision_url = "%s/svn-revision/%s" % (self.url, svn_revision_number) + return self._fetch_url(svn_revision_url) |