# Copyright (c) 2009, Google Inc. All rights reserved. # Copyright (c) 2009 Apple Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # WebKit's Python module for interacting with Bugzilla import getpass import platform import re import subprocess import urllib2 from datetime import datetime # used in timestamp() # Import WebKit-specific modules. from modules.logging import error, log from modules.committers import CommitterList # WebKit includes a built copy of BeautifulSoup in Scripts/modules # so this import should always succeed. from .BeautifulSoup import BeautifulSoup, SoupStrainer try: from mechanize import Browser except ImportError, e: print """ mechanize is required. To install: sudo easy_install mechanize Or from the web: http://wwwsearch.sourceforge.net/mechanize/ """ exit(1) def credentials_from_git(): return [read_config("username"), read_config("password")] def credentials_from_keychain(username=None): if not is_mac_os_x(): return [username, None] command = "/usr/bin/security %s -g -s %s" % ("find-internet-password", Bugzilla.bug_server_host) if username: command += " -a %s" % username log('Reading Keychain for %s account and password. Click "Allow" to continue...' % Bugzilla.bug_server_host) keychain_process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) value = keychain_process.communicate()[0] exit_code = keychain_process.wait() if exit_code: return [username, None] match = re.search('^\s*"acct"="(?P.+)"', value, re.MULTILINE) if match: username = match.group('username') password = None match = re.search('^password: "(?P.+)"', value, re.MULTILINE) if match: password = match.group('password') return [username, password] def is_mac_os_x(): return platform.mac_ver()[0] def parse_bug_id(message): match = re.search("http\://webkit\.org/b/(?P\d+)", message) if match: return int(match.group('bug_id')) match = re.search(Bugzilla.bug_server_regex + "show_bug\.cgi\?id=(?P\d+)", message) if match: return int(match.group('bug_id')) return None # FIXME: This should not depend on git for config storage def read_config(key): # Need a way to read from svn too config_process = subprocess.Popen("git config --get bugzilla.%s" % key, stdout=subprocess.PIPE, shell=True) value = config_process.communicate()[0] return_code = config_process.wait() if return_code: return None return value.rstrip('\n') def read_credentials(): (username, password) = credentials_from_git() if not username or not password: (username, password) = credentials_from_keychain(username) if not username: username = raw_input("Bugzilla login: ") if not password: password = getpass.getpass("Bugzilla password for %s: " % username) return [username, password] def timestamp(): return datetime.now().strftime("%Y%m%d%H%M%S") class BugzillaError(Exception): pass class Bugzilla: def __init__(self, dryrun=False, committers=CommitterList()): self.dryrun = dryrun self.authenticated = False self.browser = Browser() # Ignore bugs.webkit.org/robots.txt until we fix it to allow this script self.browser.set_handle_robots(False) self.committers = committers # Defaults (until we support better option parsing): bug_server_host = "bugs.webkit.org" bug_server_regex = "https?://%s/" % re.sub('\.', '\\.', bug_server_host) bug_server_url = "https://%s/" % bug_server_host def bug_url_for_bug_id(self, bug_id, xml=False): content_type = "&ctype=xml" if xml else "" return "%sshow_bug.cgi?id=%s%s" % (self.bug_server_url, bug_id, content_type) def short_bug_url_for_bug_id(self, bug_id): return "http://webkit.org/b/%s" % bug_id def attachment_url_for_id(self, attachment_id, action="view"): action_param = "" if action and action != "view": action_param = "&action=%s" % action return "%sattachment.cgi?id=%s%s" % (self.bug_server_url, attachment_id, action_param) def _parse_attachment_flag(self, element, flag_name, attachment, result_key): flag = element.find('flag', attrs={'name' : flag_name}) if flag: attachment[flag_name] = flag['status'] if flag['status'] == '+': attachment[result_key] = flag['setter'] def _parse_attachment_element(self, element, bug_id): attachment = {} attachment['bug_id'] = bug_id attachment['is_obsolete'] = (element.has_key('isobsolete') and element['isobsolete'] == "1") attachment['is_patch'] = (element.has_key('ispatch') and element['ispatch'] == "1") attachment['id'] = int(element.find('attachid').string) attachment['url'] = self.attachment_url_for_id(attachment['id']) attachment['name'] = unicode(element.find('desc').string) attachment['attacher_email'] = str(element.find('attacher').string) attachment['type'] = str(element.find('type').string) self._parse_attachment_flag(element, 'review', attachment, 'reviewer_email') self._parse_attachment_flag(element, 'commit-queue', attachment, 'committer_email') return attachment def fetch_attachments_from_bug(self, bug_id): bug_url = self.bug_url_for_bug_id(bug_id, xml=True) log("Fetching: %s" % bug_url) page = urllib2.urlopen(bug_url) soup = BeautifulSoup(page) attachments = [] for element in soup.findAll('attachment'): attachment = self._parse_attachment_element(element, bug_id) attachments.append(attachment) return attachments def _parse_bug_id_from_attachment_page(self, page): up_link = BeautifulSoup(page).find('link', rel='Up') # The "Up" relation happens to point to the bug. if not up_link: return None # This attachment does not exist (or you don't have permissions to view it). match = re.search("show_bug.cgi\?id=(?P\d+)", up_link['href']) return int(match.group('bug_id')) def bug_id_for_attachment_id(self, attachment_id): attachment_url = self.attachment_url_for_id(attachment_id, 'edit') log("Fetching: %s" % attachment_url) page = urllib2.urlopen(attachment_url) return self._parse_bug_id_from_attachment_page(page) # This should really return an Attachment object # which can lazily fetch any missing data. def fetch_attachment(self, attachment_id): # We could grab all the attachment details off of the attachment edit page # but we already have working code to do so off of the bugs page, so re-use that. bug_id = self.bug_id_for_attachment_id(attachment_id) if not bug_id: return None attachments = self.fetch_attachments_from_bug(bug_id) for attachment in attachments: # FIXME: Once we have a real Attachment class we shouldn't paper over this possible comparison failure # and we should remove the int() == int() hacks and leave it just ==. if int(attachment['id']) == int(attachment_id): self._validate_committer_and_reviewer(attachment) return attachment return None # This should never be hit. def fetch_title_from_bug(self, bug_id): bug_url = self.bug_url_for_bug_id(bug_id, xml=True) page = urllib2.urlopen(bug_url) soup = BeautifulSoup(page) return soup.find('short_desc').string def fetch_patches_from_bug(self, bug_id): patches = [] for attachment in self.fetch_attachments_from_bug(bug_id): if attachment['is_patch'] and not attachment['is_obsolete']: patches.append(attachment) return patches # _view_source_link belongs in some sort of webkit_config.py module. def _view_source_link(self, local_path): return "http://trac.webkit.org/browser/trunk/%s" % local_path def _flag_permission_rejection_message(self, setter_email, flag_name): committer_list = "WebKitTools/Scripts/modules/committers.py" contribution_guidlines_url = "http://webkit.org/coding/contributing.html" rejection_message = "%s does not have %s permissions according to %s." % (setter_email, flag_name, self._view_source_link(committer_list)) rejection_message += "\n\n- If you have %s rights please correct the error in %s by adding yourself to the file (no review needed) and then set the %s flag again." % (flag_name, committer_list, flag_name) rejection_message += "\n\n- If you do not have %s rights please read %s for instructions on how to use bugzilla flags." % (flag_name, contribution_guidlines_url) return rejection_message def _validate_setter_email(self, patch, result_key, lookup_function, rejection_function, reject_invalid_patches): setter_email = patch.get(result_key + '_email') if not setter_email: return None committer = lookup_function(setter_email) if committer: patch[result_key] = committer.full_name return patch[result_key] if reject_invalid_patches: rejection_function(patch['id'], self._flag_permission_rejection_message(setter_email, result_key)) else: log("Warning, attachment %s on bug %s has invalid %s (%s)" % (patch['id'], patch['bug_id'], result_key, setter_email)) return None def _validate_reviewer(self, patch, reject_invalid_patches): return self._validate_setter_email(patch, 'reviewer', self.committers.reviewer_by_email, self.reject_patch_from_review_queue, reject_invalid_patches) def _validate_committer(self, patch, reject_invalid_patches): return self._validate_setter_email(patch, 'committer', self.committers.committer_by_email, self.reject_patch_from_commit_queue, reject_invalid_patches) # FIXME: This is a hack until we have a real Attachment object. # _validate_committer and _validate_reviewer fill in the 'reviewer' and 'committer' # keys which other parts of the code expect to be filled in. def _validate_committer_and_reviewer(self, patch): self._validate_reviewer(patch, reject_invalid_patches=False) self._validate_committer(patch, reject_invalid_patches=False) def fetch_unreviewed_patches_from_bug(self, bug_id): unreviewed_patches = [] for attachment in self.fetch_attachments_from_bug(bug_id): if attachment.get('review') == '?' and not attachment['is_obsolete']: unreviewed_patches.append(attachment) return unreviewed_patches def fetch_reviewed_patches_from_bug(self, bug_id, reject_invalid_patches=False): reviewed_patches = [] for attachment in self.fetch_attachments_from_bug(bug_id): if self._validate_reviewer(attachment, reject_invalid_patches) and not attachment['is_obsolete']: reviewed_patches.append(attachment) return reviewed_patches def fetch_commit_queue_patches_from_bug(self, bug_id, reject_invalid_patches=False): commit_queue_patches = [] for attachment in self.fetch_reviewed_patches_from_bug(bug_id, reject_invalid_patches): if self._validate_committer(attachment, reject_invalid_patches) and not attachment['is_obsolete']: commit_queue_patches.append(attachment) return commit_queue_patches def _fetch_bug_ids_advanced_query(self, query): page = urllib2.urlopen(query) soup = BeautifulSoup(page) bug_ids = [] # Grab the cells in the first column (which happens to be the bug ids) for bug_link_cell in soup('td', "first-child"): # tds with the class "first-child" bug_link = bug_link_cell.find("a") bug_ids.append(int(bug_link.string)) # the contents happen to be the bug id return bug_ids def _parse_attachment_ids_request_query(self, page): digits = re.compile("\d+") attachment_href = re.compile("attachment.cgi\?id=\d+&action=review") attachment_links = SoupStrainer("a", href=attachment_href) return [int(digits.search(tag["href"]).group(0)) for tag in BeautifulSoup(page, parseOnlyThese=attachment_links)] def _fetch_attachment_ids_request_query(self, query): return self._parse_attachment_ids_request_query(urllib2.urlopen(query)) def fetch_bug_ids_from_commit_queue(self): commit_queue_url = self.bug_server_url + "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=commit-queue%2B" return self._fetch_bug_ids_advanced_query(commit_queue_url) # List of all r+'d bugs. def fetch_bug_ids_from_needs_commit_list(self): needs_commit_query_url = self.bug_server_url + "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review%2B" return self._fetch_bug_ids_advanced_query(needs_commit_query_url) def fetch_bug_ids_from_review_queue(self): review_queue_url = self.bug_server_url + "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review?" return self._fetch_bug_ids_advanced_query(review_queue_url) def fetch_attachment_ids_from_review_queue(self): review_queue_url = self.bug_server_url + "request.cgi?action=queue&type=review&group=type" return self._fetch_attachment_ids_request_query(review_queue_url) def fetch_patches_from_commit_queue(self, reject_invalid_patches=False): patches_to_land = [] for bug_id in self.fetch_bug_ids_from_commit_queue(): patches = self.fetch_commit_queue_patches_from_bug(bug_id, reject_invalid_patches) patches_to_land += patches return patches_to_land def fetch_patches_from_pending_commit_list(self): patches_needing_commit = [] for bug_id in self.fetch_bug_ids_from_needs_commit_list(): patches = self.fetch_reviewed_patches_from_bug(bug_id) patches_needing_commit += patches return patches_needing_commit def fetch_patches_from_review_queue(self, limit=None): patches_to_review = [] for bug_id in self.fetch_bug_ids_from_review_queue(): if limit and len(patches_to_review) >= limit: break patches = self.fetch_unreviewed_patches_from_bug(bug_id) patches_to_review += patches return patches_to_review def authenticate(self): if self.authenticated: return if self.dryrun: log("Skipping log in for dry run...") self.authenticated = True return (username, password) = read_credentials() log("Logging in as %s..." % username) self.browser.open(self.bug_server_url + "index.cgi?GoAheadAndLogIn=1") self.browser.select_form(name="login") self.browser['Bugzilla_login'] = username self.browser['Bugzilla_password'] = password response = self.browser.submit() match = re.search("(.+?)", response.read()) # If the resulting page has a title, and it contains the word "invalid" assume it's the login failure page. if match and re.search("Invalid", match.group(1), re.IGNORECASE): # FIXME: We could add the ability to try again on failure. raise BugzillaError("Bugzilla login failed: %s" % match.group(1)) self.authenticated = True def _fill_attachment_form(self, description, patch_file_object, comment_text=None, mark_for_review=False, mark_for_commit_queue=False, bug_id=None): self.browser['description'] = description self.browser['ispatch'] = ("1",) self.browser['flag_type-1'] = ('?',) if mark_for_review else ('X',) self.browser['flag_type-3'] = ('?',) if mark_for_commit_queue else ('X',) if bug_id: patch_name = "bug-%s-%s.patch" % (bug_id, timestamp()) else: patch_name ="%s.patch" % timestamp() self.browser.add_file(patch_file_object, "text/plain", patch_name, 'data') def add_patch_to_bug(self, bug_id, patch_file_object, description, comment_text=None, mark_for_review=False, mark_for_commit_queue=False): self.authenticate() log('Adding patch "%s" to bug %s' % (description, bug_id)) if self.dryrun: log(comment_text) return self.browser.open("%sattachment.cgi?action=enter&bugid=%s" % (self.bug_server_url, bug_id)) self.browser.select_form(name="entryform") self._fill_attachment_form(description, patch_file_object, mark_for_review=mark_for_review, mark_for_commit_queue=mark_for_commit_queue, bug_id=bug_id) if comment_text: log(comment_text) self.browser['comment'] = comment_text self.browser.submit() def prompt_for_component(self, components): log("Please pick a component:") i = 0 for name in components: i += 1 log("%2d. %s" % (i, name)) result = int(raw_input("Enter a number: ")) - 1 return components[result] def _check_create_bug_response(self, response_html): match = re.search("Bug (?P<bug_id>\d+) Submitted", response_html) if match: return match.group('bug_id') match = re.search('
(?P.+)