summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorAnthony King <anthonydking@slimroms.net>2015-11-03 00:23:11 +0000
committerTom Powell <tpowell@cyngn.com>2016-01-04 11:09:01 -0800
commitc713d764c53e439352e79076eeecdbed9b2ccd6c (patch)
treeec2c6e1f7d912a03442405d8fa28cf23de9d889b /tools
parent12b55e8654520aac7cb466e7c149a11a48cc3b0e (diff)
downloadbuild-c713d764c53e439352e79076eeecdbed9b2ccd6c.zip
build-c713d764c53e439352e79076eeecdbed9b2ccd6c.tar.gz
build-c713d764c53e439352e79076eeecdbed9b2ccd6c.tar.bz2
py3: update all the things
Change-Id: I5e11b46b7c2f7f8760d6c0e713ca99c1e88b7cd3
Diffstat (limited to 'tools')
-rwxr-xr-xtools/adbs2
-rwxr-xr-xtools/check_radio_versions.py22
-rwxr-xr-xtools/compare_fileslist.py6
-rwxr-xr-xtools/diff_package_overlays.py6
-rw-r--r--tools/event_log_tags.py10
-rwxr-xr-xtools/fileslist.py2
-rwxr-xr-xtools/findleaves.py6
-rwxr-xr-xtools/generate-notice-files.py83
-rwxr-xr-xtools/getb64key.py7
-rwxr-xr-xtools/java-event-log-tags.py2
-rwxr-xr-xtools/java-layers.py6
-rwxr-xr-xtools/merge-event-log-tags.py2
-rwxr-xr-xtools/parsedeps.py8
-rwxr-xr-xtools/post_process_props.py4
-rwxr-xr-xtools/product_debug.py11
-rwxr-xr-xtools/releasetools/add_img_to_target_files.py38
-rw-r--r--tools/releasetools/blockimgdiff.py2
-rwxr-xr-xtools/releasetools/build_image.py51
-rwxr-xr-xtools/releasetools/check_target_files_signatures.py79
-rw-r--r--tools/releasetools/common.py87
-rw-r--r--tools/releasetools/edify_generator.py4
-rwxr-xr-xtools/releasetools/img_from_target_files.py20
-rwxr-xr-xtools/releasetools/make_recovery_patch.py4
-rwxr-xr-xtools/releasetools/ota_from_target_files.py104
-rw-r--r--tools/releasetools/rangelib.py2
-rwxr-xr-xtools/releasetools/sign_target_files_apks.py70
-rw-r--r--tools/releasetools/sparse_img.py2
-rwxr-xr-xtools/repopick.py7
-rwxr-xr-xtools/roomservice.py5
-rwxr-xr-xtools/warn.py6
30 files changed, 366 insertions, 292 deletions
diff --git a/tools/adbs b/tools/adbs
index a8f06c0..9bd5160 100755
--- a/tools/adbs
+++ b/tools/adbs
@@ -14,6 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import print_function
+
import os
import os.path
import re
diff --git a/tools/check_radio_versions.py b/tools/check_radio_versions.py
index ebe621f..2617424 100755
--- a/tools/check_radio_versions.py
+++ b/tools/check_radio_versions.py
@@ -14,8 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import print_function
+
import sys
-import os
try:
from hashlib import sha1
@@ -52,8 +53,9 @@ for item in sys.argv[2:]:
try:
f = open(fn + ".sha1")
except IOError:
- if not bad: print
- print "*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key)
+ if not bad:
+ print()
+ print("*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key))
bad = True
continue
for line in f:
@@ -63,17 +65,19 @@ for item in sys.argv[2:]:
versions[h] = v
if digest not in versions:
- if not bad: print
- print "*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn)
+ if not bad:
+ print()
+ print("*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn))
bad = True
continue
if versions[digest] not in values:
- if not bad: print
- print "*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
- fn, versions[digest], key, sys.argv[1])
+ if not bad:
+ print()
+ print("*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
+ fn, versions[digest], key, sys.argv[1]))
bad = True
if bad:
- print
+ print()
sys.exit(1)
diff --git a/tools/compare_fileslist.py b/tools/compare_fileslist.py
index 148d740..64ad3ae 100755
--- a/tools/compare_fileslist.py
+++ b/tools/compare_fileslist.py
@@ -17,7 +17,7 @@
from __future__ import print_function
-import cgi, os, string, sys
+import cgi, os, sys
def iteritems(obj):
@@ -42,10 +42,10 @@ def main(argv):
data = {}
index = 0
for input in inputs:
- f = file(input, "r")
+ f = open(input)
lines = f.readlines()
f.close()
- lines = map(string.split, lines)
+ lines = [l.strip() for l in lines]
lines = [(x_y[1],int(x_y[0])) for x_y in lines]
for fn,sz in lines:
if fn not in data:
diff --git a/tools/diff_package_overlays.py b/tools/diff_package_overlays.py
index 0e2c773..687e1d0 100755
--- a/tools/diff_package_overlays.py
+++ b/tools/diff_package_overlays.py
@@ -34,11 +34,13 @@ Format of current_overlays.txt and previous_overlays.txt:
...
"""
+from __future__ import print_function
+
import sys
def main(argv):
if len(argv) != 4:
- print >> sys.stderr, __doc__
+ print(sys.stderr, __doc__)
sys.exit(1)
f = open(argv[1])
@@ -85,7 +87,7 @@ def main(argv):
# Print out the package names that have overlay change.
for r in result:
- print r
+ print(r)
if __name__ == "__main__":
main(sys.argv)
diff --git a/tools/event_log_tags.py b/tools/event_log_tags.py
index 645839e..93244a4 100644
--- a/tools/event_log_tags.py
+++ b/tools/event_log_tags.py
@@ -14,6 +14,8 @@
"""A module for reading and parsing event-log-tags files."""
+from __future__ import print_function
+
import re
import sys
@@ -55,7 +57,7 @@ class TagFile(object):
if file_object is None:
try:
file_object = open(filename, "rb")
- except (IOError, OSError), e:
+ except (IOError, OSError) as e:
self.AddError(str(e))
return
@@ -100,7 +102,7 @@ class TagFile(object):
self.tags.append(Tag(tag, tagname, description,
self.filename, self.linenum))
- except (IOError, OSError), e:
+ except (IOError, OSError) as e:
self.AddError(str(e))
@@ -130,6 +132,6 @@ def WriteOutput(output_file, data):
out = open(output_file, "wb")
out.write(data)
out.close()
- except (IOError, OSError), e:
- print >> sys.stderr, "failed to write %s: %s" % (output_file, e)
+ except (IOError, OSError) as e:
+ print("failed to write %s: %s" % (output_file, e), file=sys.stderr)
sys.exit(1)
diff --git a/tools/fileslist.py b/tools/fileslist.py
index 1442bd3..1538a30 100755
--- a/tools/fileslist.py
+++ b/tools/fileslist.py
@@ -21,7 +21,7 @@ import operator, os, sys
def get_file_size(path):
st = os.lstat(path)
- return st.st_size;
+ return st.st_size
def main(argv):
output = []
diff --git a/tools/findleaves.py b/tools/findleaves.py
index 3a9e508..d97ed74 100755
--- a/tools/findleaves.py
+++ b/tools/findleaves.py
@@ -20,12 +20,14 @@
# the search in a given subdirectory when the file is found.
#
+from __future__ import print_function
+
import os
import sys
def perform_find(mindepth, prune, dirlist, filename):
result = []
- pruneleaves = set(map(lambda x: os.path.split(x)[1], prune))
+ pruneleaves = set([os.path.split(x)[1] for x in prune])
for rootdir in dirlist:
rootdepth = rootdir.count("/")
for root, dirs, files in os.walk(rootdir, followlinks=True):
@@ -92,7 +94,7 @@ def main(argv):
results = list(set(perform_find(mindepth, prune, dirlist, filename)))
results.sort()
for r in results:
- print r
+ print(r)
if __name__ == "__main__":
main(sys.argv)
diff --git a/tools/generate-notice-files.py b/tools/generate-notice-files.py
index 4571b70..36630db 100755
--- a/tools/generate-notice-files.py
+++ b/tools/generate-notice-files.py
@@ -20,6 +20,9 @@ Generate the Android notice files, including both text and html files.
-h to display this usage message and exit.
"""
+
+from __future__ import print_function
+
from collections import defaultdict
import getopt
import hashlib
@@ -40,22 +43,22 @@ HTML_ESCAPE_TABLE = {
try:
opts, args = getopt.getopt(sys.argv[1:], "h")
-except getopt.GetoptError, err:
- print str(err)
- print __doc__
+except getopt.GetoptError as err:
+ print(str(err))
+ print(__doc__)
sys.exit(2)
for o, a in opts:
if o == "-h":
- print __doc__
+ print(__doc__)
sys.exit(2)
else:
- print >> sys.stderr, "unhandled option %s" % (o,)
+ print("unhandled option %s" % o)
if len(args) != 4:
- print """need exactly four arguments, the two output files, the file title
- and the directory containing notices, not %d""" % (len(args),)
- print __doc__
+ print("""need exactly four arguments, the two output files, the file title
+ and the directory containing notices, not %d""" % len(args))
+ print(__doc__)
sys.exit(1)
def hexify(s):
@@ -107,13 +110,13 @@ def combine_notice_files_html(file_hash, input_dir, output_filename):
# Open the output file, and output the header pieces
output_file = open(output_filename, "wb")
- print >> output_file, "<html><head>"
- print >> output_file, HTML_OUTPUT_CSS
- print >> output_file, '</head><body topmargin="0" leftmargin="0" rightmargin="0" bottommargin="0">'
+ print(output_file, "<html><head>", file=output_file)
+ print(HTML_OUTPUT_CSS, file=output_file)
+ print('</head><body topmargin="0" leftmargin="0" rightmargin="0" bottommargin="0">', file=output_file)
# Output our table of contents
- print >> output_file, '<div class="toc">'
- print >> output_file, "<ul>"
+ print('<div class="toc">', file=output_file)
+ print("<ul>", file=output_file)
# Flatten the list of lists into a single list of filenames
sorted_filenames = sorted(itertools.chain.from_iterable(file_hash.values()))
@@ -121,31 +124,31 @@ def combine_notice_files_html(file_hash, input_dir, output_filename):
# Print out a nice table of contents
for filename in sorted_filenames:
stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
- print >> output_file, '<li><a href="#id%d">%s</a></li>' % (id_table.get(filename), stripped_filename)
+ print('<li><a href="#id%d">%s</a></li>' % (id_table.get(filename), stripped_filename), file=output_file)
- print >> output_file, "</ul>"
- print >> output_file, "</div><!-- table of contents -->"
+ print("</ul>", file=output_file)
+ print("</div><!-- table of contents -->", file=output_file)
# Output the individual notice file lists
- print >>output_file, '<table cellpadding="0" cellspacing="0" border="0">'
+ print('<table cellpadding="0" cellspacing="0" border="0">', file=output_file)
for value in file_hash.values():
- print >> output_file, '<tr id="id%d"><td class="same-license">' % id_table.get(value[0])
- print >> output_file, '<div class="label">Notices for file(s):</div>'
- print >> output_file, '<div class="file-list">'
+ print('<tr id="id%d"><td class="same-license">' % id_table.get(value[0]), file=output_file)
+ print('<div class="label">Notices for file(s):</div>', file=output_file)
+ print('<div class="file-list">', file=output_file)
for filename in sorted(value):
- print >> output_file, "%s <br/>" % (SRC_DIR_STRIP_RE.sub(r"\1", filename))
- print >> output_file, "</div><!-- file-list -->"
- print >> output_file
- print >> output_file, '<pre class="license-text">'
- print >> output_file, html_escape(open(value[0]).read())
- print >> output_file, "</pre><!-- license-text -->"
- print >> output_file, "</td></tr><!-- same-license -->"
- print >> output_file
- print >> output_file
- print >> output_file
+ print("%s <br/>" % (SRC_DIR_STRIP_RE.sub(r"\1", filename)), file=output_file)
+ print("</div><!-- file-list -->", file=output_file)
+ print(file=output_file)
+ print('<pre class="license-text">', file=output_file)
+ print(html_escape(open(value[0]).read()), file=output_file)
+ print("</pre><!-- license-text -->", file=output_file)
+ print("</td></tr><!-- same-license -->", file=output_file)
+ print(file=output_file)
+ print(file=output_file)
+ print(file=output_file)
# Finish off the file output
- print >> output_file, "</table>"
- print >> output_file, "</body></html>"
+ print( "</table>", file=output_file)
+ print("</body></html>", file=output_file)
output_file.close()
def combine_notice_files_text(file_hash, input_dir, output_filename, file_title):
@@ -153,14 +156,14 @@ def combine_notice_files_text(file_hash, input_dir, output_filename, file_title)
SRC_DIR_STRIP_RE = re.compile(input_dir + "(/.*).txt")
output_file = open(output_filename, "wb")
- print >> output_file, file_title
+ print(file_title, file=output_file)
for value in file_hash.values():
- print >> output_file, "============================================================"
- print >> output_file, "Notices for file(s):"
+ print("============================================================", file=output_file)
+ print("Notices for file(s):", file=output_file)
for filename in sorted(value):
- print >> output_file, SRC_DIR_STRIP_RE.sub(r"\1", filename)
- print >> output_file, "------------------------------------------------------------"
- print >> output_file, open(value[0]).read()
+ print(SRC_DIR_STRIP_RE.sub(r"\1", filename), file=output_file)
+ print("------------------------------------------------------------", file=output_file)
+ print(open(value[0]).read(), file=output_file)
output_file.close()
def main(args):
@@ -179,9 +182,9 @@ def main(args):
files_with_same_hash[file_md5sum].append(filename)
- print "Combining NOTICE files into HTML"
+ print("Combining NOTICE files into HTML")
combine_notice_files_html(files_with_same_hash, input_dir, html_output_file)
- print "Combining NOTICE files into text"
+ print("Combining NOTICE files into text")
combine_notice_files_text(files_with_same_hash, input_dir, txt_output_file, file_title)
if __name__ == "__main__":
diff --git a/tools/getb64key.py b/tools/getb64key.py
index a0cd1c3..976a157 100755
--- a/tools/getb64key.py
+++ b/tools/getb64key.py
@@ -1,17 +1,18 @@
#!/usr/bin/env python
+from __future__ import print_function
+
import base64
import sys
-import os
pkFile = open(sys.argv[1], 'rb').readlines()
base64Key = ""
inCert = False
for line in pkFile:
- if line.startswith("-"):
+ if line.startswith(b"-"):
inCert = not inCert
continue
base64Key += line.strip()
-print base64.b16encode(base64.b64decode(base64Key)).lower()
+print(base64.b16encode(base64.b64decode(base64Key)).lower())
diff --git a/tools/java-event-log-tags.py b/tools/java-event-log-tags.py
index 6ac69bd..24bad3c 100755
--- a/tools/java-event-log-tags.py
+++ b/tools/java-event-log-tags.py
@@ -149,7 +149,7 @@ for t in tagfile.tags:
buffer.write("\n }\n")
-buffer.write("}\n");
+buffer.write("}\n")
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
diff --git a/tools/java-layers.py b/tools/java-layers.py
index d7c0c8e..3b9d802 100755
--- a/tools/java-layers.py
+++ b/tools/java-layers.py
@@ -126,7 +126,7 @@ class Dependencies:
def parse_dependency_file(filename):
global err
- f = file(filename)
+ f = open(filename)
lines = f.readlines()
f.close()
def lineno(s, i):
@@ -180,7 +180,7 @@ def find_java_files(srcs):
result = []
for d in srcs:
if d[0] == '@':
- f = file(d[1:])
+ f = open(d[1:])
result.extend([fn for fn in [s.strip() for s in f.readlines()]
if len(fn) != 0])
f.close()
@@ -197,7 +197,7 @@ IMPORT = re.compile("import\s+(.*)")
def examine_java_file(deps, filename):
global err
# Yes, this is a crappy java parser. Write a better one if you want to.
- f = file(filename)
+ f = open(filename)
text = f.read()
f.close()
text = COMMENTS.sub("", text)
diff --git a/tools/merge-event-log-tags.py b/tools/merge-event-log-tags.py
index 65580f6..7790048 100755
--- a/tools/merge-event-log-tags.py
+++ b/tools/merge-event-log-tags.py
@@ -42,7 +42,7 @@ except ImportError:
import event_log_tags
-def itermitems(obj):
+def iteritems(obj):
if hasattr(obj, 'iteritems'):
return obj.iteritems()
return obj.items()
diff --git a/tools/parsedeps.py b/tools/parsedeps.py
index 9a4fc8e..d36442b 100755
--- a/tools/parsedeps.py
+++ b/tools/parsedeps.py
@@ -4,9 +4,13 @@
from __future__ import print_function
import optparse
-import re
import sys
+try:
+ raw_input
+except NameError:
+ raw_input = input
+
class Dependency:
def __init__(self, tgt):
@@ -45,7 +49,7 @@ class Dependencies:
t.pos = pos
def get(self, tgt):
- if self.lines.has_key(tgt):
+ if tgt in self.lines:
return self.lines[tgt]
else:
return None
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index 6bb68c7..64af01d 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os, sys
+import sys
def iteritems(obj):
@@ -60,7 +60,7 @@ def mangle_default_prop(prop):
# default to "adb". That might not the right policy there, but it's better
# to be explicit.
if not prop.get("persist.sys.usb.config"):
- prop.put("persist.sys.usb.config", "none");
+ prop.put("persist.sys.usb.config", "none")
def validate(prop):
"""Validate the properties.
diff --git a/tools/product_debug.py b/tools/product_debug.py
index 891587f..1433a9a 100755
--- a/tools/product_debug.py
+++ b/tools/product_debug.py
@@ -16,20 +16,20 @@
from __future__ import print_function
-import os
+from operator import itemgetter
import re
import sys
def iteritems(obj):
- if hasattr('iteritems'):
+ if hasattr(obj, 'iteritems'):
return obj.iteritems()
return obj.items()
def break_lines(key, val):
# these don't get split
- if key in ("PRODUCT_MODEL"):
+ if key in ("PRODUCT_MODEL",):
return (key,val)
return (key, "\n".join(val.split()))
@@ -51,8 +51,7 @@ def parse_variables(lines):
def render_variables(variables):
variables = dict(variables)
del variables["FILE"]
- variables = list(variables.items())
- variables.sort(lambda a, b: cmp(a[0], b[0]))
+ variables = sorted(variables.items(), key=itemgetter(0))
return ("<table id='variables'>"
+ "\n".join([ "<tr><th>%(key)s</th><td>%(val)s</td></tr>" % { "key": key, "val": val }
for key,val in variables])
@@ -78,7 +77,7 @@ def render_original(variables, text):
return text
def read_file(fn):
- f = file(fn)
+ f = open(fn)
text = f.read()
f.close()
return text
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index b7ec726..899d265 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -22,10 +22,12 @@ add them to the zipfile.
Usage: add_img_to_target_files target_files
"""
+from __future__ import print_function
+
import sys
if sys.hexversion < 0x02070000:
- print >> sys.stderr, "Python 2.7 or newer is required."
+ print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
import datetime
@@ -51,7 +53,7 @@ def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "system.img")
if os.path.exists(prebuilt_path):
- print "system.img already exists in %s, no need to rebuild..." % (prefix,)
+ print("system.img already exists in %s, no need to rebuild..." % prefix)
return
def output_sink(fn, data):
@@ -60,7 +62,7 @@ def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
ofile.close()
if OPTIONS.rebuild_recovery:
- print "Building new recovery patch"
+ print("Building new recovery patch")
common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
boot_img, info_dict=OPTIONS.info_dict)
@@ -83,7 +85,7 @@ def AddVendor(output_zip, prefix="IMAGES/"):
prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "vendor.img")
if os.path.exists(prebuilt_path):
- print "vendor.img already exists in %s, no need to rebuild..." % (prefix,)
+ print("vendor.img already exists in %s, no need to rebuild..." % prefix)
return
block_list = common.MakeTempFile(prefix="vendor-blocklist-", suffix=".map")
@@ -100,7 +102,7 @@ def BuildVendor(input_dir, info_dict, block_list=None):
def CreateImage(input_dir, info_dict, what, block_list=None):
- print "creating " + what + ".img..."
+ print("creating " + what + ".img...")
img = common.MakeTempFile(prefix=what + "-", suffix=".img")
@@ -167,7 +169,7 @@ def AddUserdata(output_zip, prefix="IMAGES/"):
prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "userdata.img")
if os.path.exists(prebuilt_path):
- print "userdata.img already exists in %s, no need to rebuild..." % (prefix,)
+ print("userdata.img already exists in %s, no need to rebuild..." % prefix)
return
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
@@ -178,7 +180,7 @@ def AddUserdata(output_zip, prefix="IMAGES/"):
not image_props.get("partition_size")):
return
- print "creating userdata.img..."
+ print("creating userdata.img...")
# Use a fixed timestamp (01/01/2009) when packaging the image.
# Bug: 24377993
@@ -230,7 +232,7 @@ def AddUserdataExtra(output_zip, prefix="IMAGES/"):
not image_props.get("partition_size")):
return
- print "creating userdata_%s.img..." % extra_name
+ print("creating userdata_%s.img..." % extra_name)
# The name of the directory it is making an image out of matters to
# mkyaffs2image. So we create a temp dir, and within it we create an
@@ -259,7 +261,7 @@ def AddCache(output_zip, prefix="IMAGES/"):
prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "cache.img")
if os.path.exists(prebuilt_path):
- print "cache.img already exists in %s, no need to rebuild..." % (prefix,)
+ print("cache.img already exists in %s, no need to rebuild..." % prefix)
return
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
@@ -268,7 +270,7 @@ def AddCache(output_zip, prefix="IMAGES/"):
if "fs_type" not in image_props:
return
- print "creating cache.img..."
+ print("creating cache.img...")
# Use a fixed timestamp (01/01/2009) when packaging the image.
# Bug: 24377993
@@ -303,7 +305,7 @@ def AddImagesToTargetFiles(filename):
if not OPTIONS.add_missing:
for n in input_zip.namelist():
if n.startswith("IMAGES/"):
- print "target_files appears to already contain images."
+ print("target_files appears to already contain images.")
sys.exit(1)
try:
@@ -322,13 +324,13 @@ def AddImagesToTargetFiles(filename):
compression=zipfile.ZIP_DEFLATED)
def banner(s):
- print "\n\n++++ " + s + " ++++\n\n"
+ print("\n\n++++ " + s + " ++++\n\n")
banner("boot")
prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "boot.img")
boot_image = None
if os.path.exists(prebuilt_path):
- print "boot.img already exists in IMAGES/, no need to rebuild..."
+ print("boot.img already exists in IMAGES/, no need to rebuild...")
if OPTIONS.rebuild_recovery:
boot_image = common.GetBootableImage(
"IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
@@ -342,7 +344,7 @@ def AddImagesToTargetFiles(filename):
recovery_image = None
prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "recovery.img")
if os.path.exists(prebuilt_path):
- print "recovery.img already exists in IMAGES/, no need to rebuild..."
+ print("recovery.img already exists in IMAGES/, no need to rebuild...")
if OPTIONS.rebuild_recovery:
recovery_image = common.GetBootableImage(
"IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
@@ -396,16 +398,16 @@ def main(argv):
sys.exit(1)
AddImagesToTargetFiles(args[0])
- print "done."
+ print("done.")
if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError as e:
- print
- print " ERROR: %s" % (e,)
- print
+ print()
+ print(" ERROR: %s" % e)
+ print()
sys.exit(1)
finally:
common.Cleanup()
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index a6c6bd8..bb2f16d 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -379,7 +379,7 @@ class BlockImageDiff(object):
src_str.append("%s:%s" % (sh, sr.to_string_raw()))
stashes[sh] -= 1
if stashes[sh] == 0:
- free_string.append("free %s\n" % (sh))
+ free_string.append("free %s\n" % sh)
stashes.pop(sh)
heapq.heappush(free_stash_ids, sid)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index b83379c..efaf7eb 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -20,16 +20,23 @@ Build image output_image_file from input_directory and properties_file.
Usage: build_image input_directory properties_file output_image_file
"""
+
+from __future__ import print_function
+
import os
import os.path
import re
import subprocess
import sys
-import commands
import common
import shutil
import tempfile
+try:
+ from commands import getstatusoutput
+except ImportError:
+ from subprocess import getstatusoutput
+
OPTIONS = common.OPTIONS
FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
@@ -42,18 +49,18 @@ def RunCommand(cmd):
Returns:
A tuple of the output and the exit code.
"""
- print "Running: ", " ".join(cmd)
+ print("Running: %s" % " ".join(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output, _ = p.communicate()
- print "%s" % (output.rstrip(),)
+ print("%s" % output.rstrip())
return (output, p.returncode)
def GetVerityTreeSize(partition_size):
cmd = "build_verity_tree -s %d"
cmd %= partition_size
- status, output = commands.getstatusoutput(cmd)
+ status, output = getstatusoutput(cmd)
if status:
- print output
+ print(output)
return False, 0
return True, int(output)
@@ -61,9 +68,9 @@ def GetVerityMetadataSize(partition_size):
cmd = "system/extras/verity/build_verity_metadata.py -s %d"
cmd %= partition_size
- status, output = commands.getstatusoutput(cmd)
+ status, output = getstatusoutput(cmd)
if status:
- print output
+ print(output)
return False, 0
return True, int(output)
@@ -87,10 +94,10 @@ def AdjustPartitionSizeForVerity(partition_size):
def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
cmd = "build_verity_tree -A %s %s %s" % (
FIXED_SALT, sparse_image_path, verity_image_path)
- print cmd
- status, output = commands.getstatusoutput(cmd)
+ print(cmd)
+ status, output = getstatusoutput(cmd)
if status:
- print "Could not build verity tree! Error: %s" % output
+ print("Could not build verity tree! Error: %s" % output)
return False
root, salt = output.split()
prop_dict["verity_root_hash"] = root
@@ -103,10 +110,10 @@ def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
"system/extras/verity/build_verity_metadata.py %s %s %s %s %s %s %s")
cmd = cmd_template % (image_size, verity_metadata_path, root_hash, salt,
block_device, signer_path, key)
- print cmd
- status, output = commands.getstatusoutput(cmd)
+ print(cmd)
+ status, output = getstatusoutput(cmd)
if status:
- print "Could not build verity metadata! Error: %s" % output
+ print("Could not build verity metadata! Error: %s" % output)
return False
return True
@@ -121,10 +128,10 @@ def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
"""
cmd = "append2simg %s %s"
cmd %= (sparse_image_path, unsparse_image_path)
- print cmd
- status, output = commands.getstatusoutput(cmd)
+ print(cmd)
+ status, output = getstatusoutput(cmd)
if status:
- print "%s: %s" % (error_message, output)
+ print("%s: %s" % (error_message, output))
return False
return True
@@ -348,7 +355,7 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None):
ext4fs_stats = re.compile(
r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
r'(?P<total_blocks>[0-9]+) blocks')
- m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
+ m = ext4fs_stats.match(ext4fs_output.strip().split(b'\n')[-1])
used_blocks = int(m.groupdict().get('used_blocks'))
total_blocks = int(m.groupdict().get('total_blocks'))
reserved_blocks = min(4096, int(total_blocks * 0.02))
@@ -371,7 +378,7 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None):
return False
if verity_supported and is_verity_partition:
if 2 * image_size - AdjustPartitionSizeForVerity(image_size) > partition_size:
- print "Error: No more room on %s to fit verity data" % mount_point
+ print("Error: No more room on %s to fit verity data" % mount_point)
return False
prop_dict["original_partition_size"] = prop_dict["partition_size"]
prop_dict["partition_size"] = str(image_size)
@@ -483,7 +490,7 @@ def LoadGlobalDict(filename):
def main(argv):
if len(argv) != 4:
- print __doc__
+ print(__doc__)
sys.exit(1)
in_dir = argv[0]
@@ -510,14 +517,14 @@ def main(argv):
elif image_filename == "oem.img":
mount_point = "oem"
else:
- print >> sys.stderr, "error: unknown image file name ", image_filename
+ print("error: unknown image file name ", image_filename, file=sys.stderr)
exit(1)
image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
if not BuildImage(in_dir, image_properties, out_file, target_out):
- print >> sys.stderr, "error: failed to build %s from %s" % (out_file,
- in_dir)
+ print("error: failed to build %s from %s" % (out_file, in_dir),
+ file=sys.stderr)
exit(1)
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index 5c541ab..df3be2f 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -39,10 +39,12 @@ Usage: check_target_file_signatures [flags] target_files
"""
+from __future__ import print_function
+
import sys
if sys.hexversion < 0x02070000:
- print >> sys.stderr, "Python 2.7 or newer is required."
+ print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
import os
@@ -53,6 +55,13 @@ import zipfile
import common
+
+def iteritems(obj):
+ if hasattr(obj, 'iteritems'):
+ return obj.iteritems()
+ return obj.items()
+
+
# Work around a bug in python's zipfile module that prevents opening
# of zipfiles if any entry has an extra field of between 1 and 3 bytes
# (which is common with zipaligned APKs). This overrides the
@@ -81,9 +90,9 @@ def Pop():
def Banner(msg):
- print "-" * 70
- print " ", msg
- print "-" * 70
+ print("-" * 70)
+ print(" ", msg)
+ print("-" * 70)
def GetCertSubject(cert):
@@ -260,7 +269,7 @@ class TargetFiles(object):
"""Look for any instances where packages signed with different
certs request the same sharedUserId."""
apks_by_uid = {}
- for apk in self.apks.itervalues():
+ for apk in self.apks.values():
if apk.shared_uid:
apks_by_uid.setdefault(apk.shared_uid, []).append(apk)
@@ -275,15 +284,15 @@ class TargetFiles(object):
AddProblem("different cert sets for packages with uid %s" % (uid,))
- print "uid %s is shared by packages with different cert sets:" % (uid,)
+ print("uid %s is shared by packages with different cert sets:" % uid)
for apk in apks:
- print "%-*s [%s]" % (self.max_pkg_len, apk.package, apk.filename)
+ print("%-*s [%s]" % (self.max_pkg_len, apk.package, apk.filename))
for cert in apk.certs:
- print " ", ALL_CERTS.Get(cert)
- print
+ print(" ", ALL_CERTS.Get(cert))
+ print()
def CheckExternalSignatures(self):
- for apk_filename, certname in self.certmap.iteritems():
+ for apk_filename, certname in iteritems(self.certmap):
if certname == "EXTERNAL":
# Apps marked EXTERNAL should be signed with the test key
# during development, then manually re-signed after
@@ -299,26 +308,26 @@ class TargetFiles(object):
def PrintCerts(self):
"""Display a table of packages grouped by cert."""
by_cert = {}
- for apk in self.apks.itervalues():
+ for apk in self.apks.values():
for cert in apk.certs:
by_cert.setdefault(cert, []).append((apk.package, apk))
- order = [(-len(v), k) for (k, v) in by_cert.iteritems()]
+ order = [(-len(v), k) for (k, v) in iteritems(by_cert)]
order.sort()
for _, cert in order:
- print "%s:" % (ALL_CERTS.Get(cert),)
+ print("%s:" % ALL_CERTS.Get(cert))
apks = by_cert[cert]
apks.sort()
for _, apk in apks:
if apk.shared_uid:
- print " %-*s %-*s [%s]" % (self.max_fn_len, apk.filename,
+ print(" %-*s %-*s [%s]" % (self.max_fn_len, apk.filename,
self.max_pkg_len, apk.package,
- apk.shared_uid)
+ apk.shared_uid))
else:
- print " %-*s %-*s" % (self.max_fn_len, apk.filename,
- self.max_pkg_len, apk.package)
- print
+ print(" %-*s %-*s" % (self.max_fn_len, apk.filename,
+ self.max_pkg_len, apk.package))
+ print()
def CompareWith(self, other):
"""Look for instances where a given package that exists in both
@@ -339,12 +348,12 @@ class TargetFiles(object):
by_certpair.setdefault((other.apks[i].certs,
self.apks[i].certs), []).append(i)
else:
- print "%s [%s]: new APK (not in comparison target_files)" % (
- i, self.apks[i].filename)
+ print("%s [%s]: new APK (not in comparison target_files)" % (
+ i, self.apks[i].filename))
else:
if i in other.apks:
- print "%s [%s]: removed APK (only in comparison target_files)" % (
- i, other.apks[i].filename)
+ print("%s [%s]: removed APK (only in comparison target_files)" % (
+ i, other.apks[i].filename))
if by_certpair:
AddProblem("some APKs changed certs")
@@ -352,23 +361,23 @@ class TargetFiles(object):
for (old, new), packages in sorted(by_certpair.items()):
for i, o in enumerate(old):
if i == 0:
- print "was", ALL_CERTS.Get(o)
+ print("was", ALL_CERTS.Get(o))
else:
- print " ", ALL_CERTS.Get(o)
+ print(" ", ALL_CERTS.Get(o))
for i, n in enumerate(new):
if i == 0:
- print "now", ALL_CERTS.Get(n)
+ print("now", ALL_CERTS.Get(n))
else:
- print " ", ALL_CERTS.Get(n)
+ print(" ", ALL_CERTS.Get(n))
for i in sorted(packages):
old_fn = other.apks[i].filename
new_fn = self.apks[i].filename
if old_fn == new_fn:
- print " %-*s [%s]" % (max_pkg_len, i, old_fn)
+ print(" %-*s [%s]" % (max_pkg_len, i, old_fn))
else:
- print " %-*s [was: %s; now: %s]" % (max_pkg_len, i,
- old_fn, new_fn)
- print
+ print(" %-*s [was: %s; now: %s]" % (max_pkg_len, i,
+ old_fn, new_fn))
+ print()
def main(argv):
@@ -423,9 +432,9 @@ def main(argv):
target_files.CompareWith(compare_files)
if PROBLEMS:
- print "%d problem(s) found:\n" % (len(PROBLEMS),)
+ print("%d problem(s) found:\n" % len(PROBLEMS))
for p in PROBLEMS:
- print p
+ print(p)
return 1
return 0
@@ -436,7 +445,7 @@ if __name__ == '__main__':
r = main(sys.argv[1:])
sys.exit(r)
except common.ExternalError as e:
- print
- print " ERROR: %s" % (e,)
- print
+ print()
+ print(" ERROR: %s" % e)
+ print()
sys.exit(1)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 47cf759..de2660a 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import print_function
+
import copy
import errno
import getopt
@@ -34,6 +36,17 @@ import rangelib
from hashlib import sha1 as sha1
+try:
+ raw_input
+except NameError:
+ raw_input = input
+
+
+def iteritems(obj):
+ if hasattr(obj, 'iteritems'):
+ return obj.iteritems()
+ return obj.items()
+
class Options(object):
def __init__(self):
@@ -83,7 +96,7 @@ def Run(args, **kwargs):
"""Create and return a subprocess.Popen object, printing the command
line on the terminal if -v was specified."""
if OPTIONS.verbose:
- print " running: ", " ".join(args)
+ print(" running: ", " ".join(args))
return subprocess.Popen(args, **kwargs)
@@ -193,7 +206,7 @@ def LoadBuildProp(read_helper):
try:
data = read_helper("SYSTEM/build.prop")
except KeyError:
- print "Warning: could not find SYSTEM/build.prop in %s" % zip
+ print("Warning: could not find SYSTEM/build.prop in %s" % zip)
data = ""
return LoadDictionaryFromLines(data.split("\n"))
@@ -221,7 +234,7 @@ def LoadRecoveryFSTab(read_helper, fstab_version, type):
try:
data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
except KeyError:
- print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
+ print("Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab")
data = ""
if fstab_version == 1:
@@ -253,7 +266,7 @@ def LoadRecoveryFSTab(read_helper, fstab_version, type):
if i.startswith("length="):
length = int(i[7:])
else:
- print "%s: unknown option \"%s\"" % (mount_point, i)
+ print("%s: unknown option \"%s\"" % (mount_point, i))
d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
device=pieces[2], length=length,
@@ -305,7 +318,7 @@ def LoadRecoveryFSTab(read_helper, fstab_version, type):
def DumpInfoDict(d):
for k, v in sorted(d.items()):
- print "%-25s = (%s) %s" % (k, type(v).__name__, v)
+ print("%-25s = (%s) %s" % (k, type(v).__name__, v))
def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
@@ -459,15 +472,15 @@ def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
if os.path.exists(prebuilt_path):
- print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
+ print("using prebuilt %s from BOOTABLE_IMAGES..." % prebuilt_name)
return File.FromLocalFile(name, prebuilt_path)
prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
if os.path.exists(prebuilt_path):
- print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
+ print("using prebuilt %s from IMAGES..." % prebuilt_name)
return File.FromLocalFile(name, prebuilt_path)
- print "building image from target_files %s..." % (tree_subdir,)
+ print("building image from target_files %s..." % tree_subdir)
fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
os.path.join(unpack_dir, fs_config),
@@ -546,7 +559,7 @@ def GetKeyPasswords(keylist):
if p.returncode == 0:
# Encrypted key with empty string as password.
key_passwords[k] = ''
- elif stderr.startswith('Error decrypting key'):
+ elif stderr.startswith(b'Error decrypting key'):
# Definitely encrypted key.
# It would have said "Error reading key" if it didn't parse correctly.
need_passwords.append(k)
@@ -641,11 +654,11 @@ def CheckSize(data, target, info_dict):
if pct >= 99.0:
raise ExternalError(msg)
elif pct >= 95.0:
- print
- print " WARNING: ", msg
- print
+ print()
+ print(" WARNING: ", msg)
+ print()
elif OPTIONS.verbose:
- print " ", msg
+ print(" ", msg)
def ReadApkCerts(tf_zip):
@@ -694,8 +707,8 @@ COMMON_DOCSTRING = """
"""
def Usage(docstring):
- print docstring.rstrip("\n")
- print COMMON_DOCSTRING
+ print(docstring.rstrip("\n"))
+ print(COMMON_DOCSTRING)
def ParseOptions(argv,
@@ -719,7 +732,7 @@ def ParseOptions(argv,
list(extra_long_opts))
except getopt.GetoptError as err:
Usage(docstring)
- print "**", str(err), "**"
+ print("**", str(err), "**")
sys.exit(2)
for o, a in opts:
@@ -815,7 +828,7 @@ class PasswordManager(object):
current[i] = ""
if not first:
- print "key file %s still missing some passwords." % (self.pwfile,)
+ print("key file %s still missing some passwords." % self.pwfile)
answer = raw_input("try to edit again? [y]> ").strip()
if answer and answer[0] not in 'yY':
raise RuntimeError("key passwords unavailable")
@@ -829,7 +842,7 @@ class PasswordManager(object):
values.
"""
result = {}
- for k, v in sorted(current.iteritems()):
+ for k, v in sorted(iteritems(current)):
if v:
result[k] = v
else:
@@ -850,7 +863,7 @@ class PasswordManager(object):
f.write("# (Additional spaces are harmless.)\n\n")
first_line = None
- sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
+ sorted_list = sorted((not v, k, v) for (k, v) in current.items())
for i, (_, k, v) in enumerate(sorted_list):
f.write("[[[ %s ]]] %s\n" % (v, k))
if not v and first_line is None:
@@ -875,13 +888,13 @@ class PasswordManager(object):
continue
m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
if not m:
- print "failed to parse password file: ", line
+ print("failed to parse password file: ", line)
else:
result[m.group(2)] = m.group(1)
f.close()
except IOError as e:
if e.errno != errno.ENOENT:
- print "error reading password file: ", str(e)
+ print("error reading password file: ", str(e))
return result
@@ -982,7 +995,7 @@ class DeviceSpecificParams(object):
"""Keyword arguments to the constructor become attributes of this
object, which is passed to all functions in the device-specific
module."""
- for k, v in kwargs.iteritems():
+ for k, v in iteritems(kwargs):
setattr(self, k, v)
self.extras = OPTIONS.extras
@@ -999,10 +1012,10 @@ class DeviceSpecificParams(object):
if x == ".py":
f = b
info = imp.find_module(f, [d])
- print "loaded device-specific extensions from", path
+ print("loaded device-specific extensions from", path)
self.module = imp.load_module("device_specific", *info)
except ImportError:
- print "unable to load device-specific module; assuming none"
+ print("unable to load device-specific module; assuming none")
def _DoCall(self, function_name, *args, **kwargs):
"""Call the named function in the device-specific module, passing
@@ -1138,7 +1151,7 @@ class Difference(object):
th.start()
th.join(timeout=300) # 5 mins
if th.is_alive():
- print "WARNING: diff command timed out"
+ print("WARNING: diff command timed out")
p.terminate()
th.join(5)
if th.is_alive():
@@ -1146,8 +1159,8 @@ class Difference(object):
th.join()
if err or p.returncode != 0:
- print "WARNING: failure running %s:\n%s\n" % (
- diff_program, "".join(err))
+ print("WARNING: failure running %s:\n%s\n" % (
+ diff_program, "".join(err)))
self.patch = None
return None, None, None
diff = ptemp.read()
@@ -1169,7 +1182,7 @@ class Difference(object):
def ComputeDifferences(diffs):
"""Call ComputePatch on all the Difference objects in 'diffs'."""
- print len(diffs), "diffs to compute"
+ print(len(diffs), "diffs to compute")
# Do the largest files first, to try and reduce the long-pole effect.
by_size = [(i.tf.size, i) for i in diffs]
@@ -1195,13 +1208,13 @@ def ComputeDifferences(diffs):
else:
name = "%s (%s)" % (tf.name, sf.name)
if patch is None:
- print "patching failed! %s" % (name,)
+ print("patching failed! %s" % name)
else:
- print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
- dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
+ print("%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
+ dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name))
lock.release()
except Exception as e:
- print e
+ print(e)
raise
# start worker threads; wait for them all to finish.
@@ -1401,16 +1414,18 @@ def GetTypeAndDevice(mount_point, info):
def ParseCertificate(data):
"""Parse a PEM-format certificate."""
+ from codecs import decode
cert = []
save = False
for line in data.split("\n"):
if "--END CERTIFICATE--" in line:
break
if save:
- cert.append(line)
+ l = line.encode() if hasattr(line, 'encode') else line
+ cert.append(l)
if "--BEGIN CERTIFICATE--" in line:
save = True
- cert = "".join(cert).decode('base64')
+ cert = decode(b"".join(cert), 'base64')
return cert
def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
@@ -1481,9 +1496,9 @@ fi
m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
if m:
sh_location = m.group(1)
- print "putting script in", sh_location
+ print("putting script in", sh_location)
break
except (OSError, IOError) as e:
- print "failed to read init.rc: %s" % (e,)
+ print("failed to read init.rc: %s" % e)
output_sink(sh_location, sh)
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index e3425bf..80b8a44 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -145,7 +145,7 @@ class EdifyGenerator(object):
", ".join(["%s" % (b,) for b in basebands]) +
'; this device has baseband " + getprop("ro.baseband") + ".");' +
");")
- self.script.append(self._WordWrap(cmd))
+ self.script.append(self.WordWrap(cmd))
def RunBackup(self, command):
self.script.append(('run_program("/tmp/install/bin/backuptool.sh", "%s");' % command))
@@ -372,7 +372,7 @@ class EdifyGenerator(object):
for d, l in symlink_list:
by_dest.setdefault(d, []).append(l)
- for dest, links in sorted(by_dest.iteritems()):
+ for dest, links in sorted(by_dest.items()):
cmd = ('symlink("%s", ' % (dest,) +
",\0".join(['"' + i + '"' for i in sorted(links)]) + ");")
self.script.append(self.WordWrap(cmd))
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 09fc64d..d486a7a 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -26,10 +26,12 @@ Usage: img_from_target_files [flags] input_target_files output_image_zip
"""
+from __future__ import print_function
+
import sys
if sys.hexversion < 0x02070000:
- print >> sys.stderr, "Python 2.7 or newer is required."
+ print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
import errno
@@ -61,7 +63,7 @@ def AddRadio(output_zip):
# If a filesmap file exists, create a script to flash the radio images based on it
filesmap = os.path.join(OPTIONS.input_tmp, "RADIO/filesmap")
if os.path.isfile(filesmap):
- print "creating flash-radio.sh..."
+ print("creating flash-radio.sh...")
filesmap_data = open(filesmap, "r")
filesmap_regex = re.compile(r'^(\S+)\s\S+\/by-name\/(\S+).*')
tmp_flash_radio = tempfile.NamedTemporaryFile()
@@ -74,7 +76,7 @@ def AddRadio(output_zip):
if os.path.getsize(tmp_flash_radio.name) > 0:
output_zip.write(tmp_flash_radio.name, "flash-radio.sh")
else:
- print "flash-radio.sh is empty, skipping..."
+ print("flash-radio.sh is empty, skipping...")
tmp_flash_radio.close()
def main(argv):
@@ -146,7 +148,7 @@ def main(argv):
recovery_image.AddToZip(output_zip)
def banner(s):
- print "\n\n++++ " + s + " ++++\n\n"
+ print("\n\n++++ " + s + " ++++\n\n")
if not bootable_only:
banner("AddSystem")
@@ -165,11 +167,11 @@ def main(argv):
add_img_to_target_files.AddCache(output_zip, prefix="")
finally:
- print "cleaning up..."
+ print("cleaning up...")
common.ZipClose(output_zip)
shutil.rmtree(OPTIONS.input_tmp)
- print "done."
+ print("done.")
if __name__ == '__main__':
@@ -177,7 +179,7 @@ if __name__ == '__main__':
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError as e:
- print
- print " ERROR: %s" % (e,)
- print
+ print()
+ print(" ERROR: %s" % e)
+ print()
sys.exit(1)
diff --git a/tools/releasetools/make_recovery_patch.py b/tools/releasetools/make_recovery_patch.py
index 08d1450..7c6007e 100755
--- a/tools/releasetools/make_recovery_patch.py
+++ b/tools/releasetools/make_recovery_patch.py
@@ -14,10 +14,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import print_function
+
import sys
if sys.hexversion < 0x02070000:
- print >> sys.stderr, "Python 2.7 or newer is required."
+ print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
import os
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 3e72ede..6e5124a 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -106,10 +106,12 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
"""
+from __future__ import print_function
+
import sys
if sys.hexversion < 0x02070000:
- print >> sys.stderr, "Python 2.7 or newer is required."
+ print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
import multiprocessing
@@ -150,7 +152,7 @@ OPTIONS.override_prop = False
def MostPopularKey(d, default):
"""Given a dict, return the key corresponding to the largest
value. Returns 'default' if the dict is empty."""
- x = [(v, k) for (k, v) in d.iteritems()]
+ x = list(d.items())
if not x:
return default
x.sort()
@@ -269,14 +271,14 @@ class Item(object):
def Dump(self, indent=0):
if self.uid is not None:
- print "%s%s %d %d %o" % (
- " " * indent, self.name, self.uid, self.gid, self.mode)
+ print("%s%s %d %d %o" % (
+ " " * indent, self.name, self.uid, self.gid, self.mode))
else:
- print "%s%s %s %s %s" % (
- " " * indent, self.name, self.uid, self.gid, self.mode)
+ print("%s%s %s %s %s" % (
+ " " * indent, self.name, self.uid, self.gid, self.mode))
if self.is_dir:
- print "%s%s" % (" "*indent, self.descendants)
- print "%s%s" % (" "*indent, self.best_subtree)
+ print("%s%s" % (" "*indent, self.descendants))
+ print("%s%s" % (" "*indent, self.best_subtree))
for i in self.children:
i.Dump(indent=indent+1)
@@ -300,7 +302,7 @@ class Item(object):
d = self.descendants
for i in self.children:
if i.is_dir:
- for k, v in i.CountChildMetadata().iteritems():
+ for k, v in i.CountChildMetadata().items():
d[k] = d.get(k, 0) + v
else:
k = (i.uid, i.gid, None, i.mode, i.selabel, i.capabilities)
@@ -312,7 +314,7 @@ class Item(object):
# First, find the (uid, gid) pair that matches the most
# descendants.
ug = {}
- for (uid, gid, _, _, _, _), count in d.iteritems():
+ for (uid, gid, _, _, _, _), count in d.items():
ug[(uid, gid)] = ug.get((uid, gid), 0) + count
ug = MostPopularKey(ug, (0, 0))
@@ -322,7 +324,7 @@ class Item(object):
best_fmode = (0, 0o644)
best_selabel = (0, None)
best_capabilities = (0, None)
- for k, count in d.iteritems():
+ for k, count in d.items():
if k[:2] != ug:
continue
if k[2] is not None and count >= best_dmode[0]:
@@ -483,11 +485,11 @@ def GetImage(which, tmpdir, info_dict):
path = os.path.join(tmpdir, "IMAGES", which + ".img")
mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
if os.path.exists(path) and os.path.exists(mappath):
- print "using %s.img from target-files" % (which,)
+ print("using %s.img from target-files" % which)
# This is a 'new' target-files, which already has the image in it.
else:
- print "building %s.img from target-files" % (which,)
+ print("building %s.img from target-files" % which)
# This is an 'old' target-files, which does not contain images
# already built. Build them.
@@ -620,8 +622,8 @@ else if get_stage("%(bcb_dev)s") == "3/3" then
CopyInstallTools(output_zip)
script.UnpackPackageDir("install", "/tmp/install")
- script.SetPermissionsRecursive("/tmp/install", 0, 0, 0755, 0644, None, None)
- script.SetPermissionsRecursive("/tmp/install/bin", 0, 0, 0755, 0755, None, None)
+ script.SetPermissionsRecursive("/tmp/install", 0, 0, 0o755, 0o644, None, None)
+ script.SetPermissionsRecursive("/tmp/install/bin", 0, 0, 0o755, 0o755, None, None)
if OPTIONS.backuptool:
script.Mount("/system")
@@ -759,7 +761,7 @@ def WritePolicyConfig(file_name, output_zip):
def WriteMetadata(metadata, output_zip):
common.ZipWriteStr(output_zip, "META-INF/com/android/metadata",
"".join(["%s=%s\n" % kv
- for kv in sorted(metadata.iteritems())]))
+ for kv in sorted(metadata.items())]))
def LoadPartitionFiles(z, partition):
@@ -969,8 +971,8 @@ else if get_stage("%(bcb_dev)s") != "3/3" then
else:
include_full_boot = False
- print "boot target: %d source: %d diff: %d" % (
- target_boot.size, source_boot.size, len(d))
+ print("boot target: %d source: %d diff: %d" % (
+ target_boot.size, source_boot.size, len(d)))
common.ZipWriteStr(output_zip, "patch/boot.img.p", d)
@@ -1006,19 +1008,19 @@ else
if OPTIONS.two_step:
common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
script.WriteRawImage("/boot", "boot.img")
- print "writing full boot image (forced by two-step mode)"
+ print("writing full boot image (forced by two-step mode)")
if not OPTIONS.two_step:
if updating_boot:
if include_full_boot:
- print "boot image changed; including full."
+ print("boot image changed; including full.")
script.Print("Installing boot image...")
script.WriteRawImage("/boot", "boot.img")
else:
# Produce the boot image by applying a patch to the current
# contents of the boot partition, and write it back to the
# partition.
- print "boot image changed; including patch."
+ print("boot image changed; including patch.")
script.Print("Patching boot image...")
script.ShowProgress(0.1, 10)
script.ApplyPatch("%s:%s:%d:%s:%d:%s"
@@ -1029,7 +1031,7 @@ else
target_boot.size, target_boot.sha1,
source_boot.sha1, "patch/boot.img.p")
else:
- print "boot image unchanged; skipping."
+ print("boot image unchanged; skipping.")
# Do device-specific installation (eg, write radio image).
device_specific.IncrementalOTA_InstallEnd()
@@ -1056,9 +1058,9 @@ endif;
class FileDifference(object):
def __init__(self, partition, source_zip, target_zip, output_zip):
self.deferred_patch_list = None
- print "Loading target..."
+ print("Loading target...")
self.target_data = target_data = LoadPartitionFiles(target_zip, partition)
- print "Loading source..."
+ print("Loading source...")
self.source_data = source_data = LoadPartitionFiles(source_zip, partition)
self.verbatim_targets = verbatim_targets = []
@@ -1085,14 +1087,14 @@ class FileDifference(object):
assert fn == tf.name
sf = ClosestFileMatch(tf, matching_file_cache, renames)
if sf is not None and sf.name != tf.name:
- print "File has moved from " + sf.name + " to " + tf.name
+ print("File has moved from " + sf.name + " to " + tf.name)
renames[sf.name] = tf
if sf is None or fn in OPTIONS.require_verbatim:
# This file should be included verbatim
if fn in OPTIONS.prohibit_verbatim:
raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
- print "send", fn, "verbatim"
+ print("send", fn, "verbatim")
tf.AddToZip(output_zip)
verbatim_targets.append((fn, tf.size, tf.sha1))
if fn in target_data.keys():
@@ -1177,8 +1179,8 @@ class FileDifference(object):
def EmitRenames(self, script):
if len(self.renames) > 0:
script.Print("Renaming files...")
- for src, tgt in self.renames.iteritems():
- print "Renaming " + src + " to " + tgt.name
+ for src, tgt in self.renames.items():
+ print("Renaming " + src + " to " + tgt.name)
script.RenameFile(src, tgt.name)
@@ -1321,8 +1323,8 @@ else if get_stage("%(bcb_dev)s") != "3/3" then
""" % bcb_dev)
# Dump fingerprints
- script.Print("Source: %s" % (source_fp,))
- script.Print("Target: %s" % (target_fp,))
+ script.Print("Source: %s" % source_fp)
+ script.Print("Target: %s" % target_fp)
script.Print("Verifying current system...")
@@ -1336,8 +1338,8 @@ else if get_stage("%(bcb_dev)s") != "3/3" then
if updating_boot:
d = common.Difference(target_boot, source_boot)
_, _, d = d.ComputePatch()
- print "boot target: %d source: %d diff: %d" % (
- target_boot.size, source_boot.size, len(d))
+ print("boot target: %d source: %d diff: %d" % (
+ target_boot.size, source_boot.size, len(d)))
common.ZipWriteStr(output_zip, "patch/boot.img.p", d)
@@ -1376,7 +1378,7 @@ else
if OPTIONS.two_step:
common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
script.WriteRawImage("/boot", "boot.img")
- print "writing full boot image (forced by two-step mode)"
+ print("writing full boot image (forced by two-step mode)")
script.Print("Removing unneeded files...")
system_diff.RemoveUnneededFiles(script, ("/system/recovery.img",))
@@ -1411,9 +1413,9 @@ else
source_boot.sha1, "patch/boot.img.p")
so_far += target_boot.size
script.SetProgress(so_far / total_patch_size)
- print "boot image changed; including."
+ print("boot image changed; including.")
else:
- print "boot image unchanged; skipping."
+ print("boot image unchanged; skipping.")
system_items = ItemSet("system", "META/filesystem_config.txt")
if vendor_diff:
@@ -1438,9 +1440,9 @@ else
target_recovery, target_boot)
script.DeleteFiles(["/system/recovery-from-boot.p",
"/system/etc/install-recovery.sh"])
- print "recovery image changed; including as patch from boot."
+ print("recovery image changed; including as patch from boot.")
else:
- print "recovery image unchanged; skipping."
+ print("recovery image unchanged; skipping.")
script.ShowProgress(0.1, 10)
@@ -1610,7 +1612,7 @@ def main(argv):
OPTIONS.verify = True
elif o == "--block":
OPTIONS.block_based = True
- elif o in ("-b", "--binary"):
+ elif o in ("-b", "--binary",):
OPTIONS.updater_binary = a
elif o in ("--no_fallback_to_full",):
OPTIONS.fallback_to_full = False
@@ -1620,11 +1622,11 @@ def main(argv):
except ValueError:
raise ValueError("Cannot parse value %r for option %r - expecting "
"a float" % (a, o))
- elif o in ("--backup"):
+ elif o in ("--backup",):
OPTIONS.backuptool = bool(a.lower() == 'true')
- elif o in ("--override_device"):
+ elif o in ("--override_device",):
OPTIONS.override_device = a
- elif o in ("--override_prop"):
+ elif o in ("--override_prop",):
OPTIONS.override_prop = bool(a.lower() == 'true')
else:
return False
@@ -1663,7 +1665,7 @@ def main(argv):
if OPTIONS.extra_script is not None:
OPTIONS.extra_script = open(OPTIONS.extra_script).read()
- print "unzipping target target-files..."
+ print("unzipping target target-files...")
OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
OPTIONS.target_tmp = OPTIONS.input_tmp
@@ -1678,7 +1680,7 @@ def main(argv):
OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts")
if OPTIONS.verbose:
- print "--- target info ---"
+ print("--- target info ---")
common.DumpInfoDict(OPTIONS.info_dict)
# If the caller explicitly specified the device-specific extensions
@@ -1691,7 +1693,7 @@ def main(argv):
if OPTIONS.device_specific is None:
from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
if os.path.exists(from_input):
- print "(using device-specific extensions from target_files)"
+ print("(using device-specific extensions from target_files)")
OPTIONS.device_specific = from_input
else:
OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
@@ -1726,7 +1728,7 @@ def main(argv):
break
else:
- print "unzipping source target-files..."
+ print("unzipping source target-files...")
OPTIONS.source_tmp, source_zip = common.UnzipTemp(
OPTIONS.incremental_source)
OPTIONS.target_info_dict = OPTIONS.info_dict
@@ -1739,7 +1741,7 @@ def main(argv):
"default_system_dev_certificate",
"build/target/product/security/testkey")
if OPTIONS.verbose:
- print "--- source info ---"
+ print("--- source info ---")
common.DumpInfoDict(OPTIONS.source_info_dict)
try:
WriteIncrementalOTAPackage(input_zip, source_zip, output_zip)
@@ -1748,7 +1750,7 @@ def main(argv):
except ValueError:
if not OPTIONS.fallback_to_full:
raise
- print "--- failed to build incremental; falling back to full ---"
+ print("--- failed to build incremental; falling back to full ---")
OPTIONS.incremental_source = None
common.ZipClose(output_zip)
@@ -1756,7 +1758,7 @@ def main(argv):
SignOutput(temp_zip_file.name, args[1])
temp_zip_file.close()
- print "done."
+ print("done.")
if __name__ == '__main__':
@@ -1764,9 +1766,9 @@ if __name__ == '__main__':
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError as e:
- print
- print " ERROR: %s" % (e,)
- print
+ print()
+ print(" ERROR: %s" % e)
+ print()
sys.exit(1)
finally:
common.Cleanup()
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index 373bbed..cbb34b9 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -43,6 +43,8 @@ class RangeSet(object):
return self.data != other.data
def __nonzero__(self):
return bool(self.data)
+ def __bool__(self):
+ return self.__nonzero__()
def __str__(self):
if not self.data:
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 60d62c2..a95af16 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -65,14 +65,15 @@ Usage: sign_target_files_apks [flags] input_target_files output_target_files
"""
+from __future__ import print_function
+
import sys
if sys.hexversion < 0x02070000:
- print >> sys.stderr, "Python 2.7 or newer is required."
+ print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
import base64
-import cStringIO
import copy
import errno
import os
@@ -82,6 +83,11 @@ import subprocess
import tempfile
import zipfile
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
+
import add_img_to_target_files
import common
@@ -98,11 +104,11 @@ def GetApkCerts(tf_zip):
certmap = common.ReadApkCerts(tf_zip)
# apply the key remapping to the contents of the file
- for apk, cert in certmap.iteritems():
+ for apk, cert in certmap.items():
certmap[apk] = OPTIONS.key_map.get(cert, cert)
# apply all the -e options, overriding anything in the file
- for apk, cert in OPTIONS.extra_apks.iteritems():
+ for apk, cert in OPTIONS.extra_apks.items():
if not cert:
cert = "PRESIGNED"
certmap[apk] = OPTIONS.key_map.get(cert, cert)
@@ -120,10 +126,10 @@ def CheckAllApksSigned(input_tf_zip, apk_key_map):
if name not in apk_key_map:
unknown_apks.append(name)
if unknown_apks:
- print "ERROR: no key specified for:\n\n ",
- print "\n ".join(unknown_apks)
- print "\nUse '-e <apkname>=' to specify a key (which may be an"
- print "empty string to not sign this apk)."
+ print("ERROR: no key specified for:\n\n ", end=' ')
+ print("\n ".join(unknown_apks))
+ print("\nUse '-e <apkname>=' to specify a key (which may be an")
+ print("empty string to not sign this apk).")
sys.exit(1)
@@ -194,25 +200,25 @@ def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
name = os.path.basename(info.filename)
key = apk_key_map[name]
if key not in common.SPECIAL_CERT_STRINGS:
- print " signing: %-*s (%s)" % (maxsize, name, key)
+ print(" signing: %-*s (%s)" % (maxsize, name, key))
signed_data = SignApk(data, key, key_passwords[key])
common.ZipWriteStr(output_tf_zip, out_info, signed_data)
else:
# an APK we're not supposed to sign.
- print "NOT signing: %s" % (name,)
+ print("NOT signing: %s" % name)
common.ZipWriteStr(output_tf_zip, out_info, data)
elif info.filename in ("SYSTEM/build.prop",
"VENDOR/build.prop",
"BOOT/RAMDISK/default.prop",
"RECOVERY/RAMDISK/default.prop"):
- print "rewriting %s:" % (info.filename,)
+ print("rewriting %s:" % info.filename)
new_data = RewriteProps(data, misc_info)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
if info.filename in ("BOOT/RAMDISK/default.prop",
"RECOVERY/RAMDISK/default.prop"):
write_to_temp(info.filename, info.external_attr, new_data)
elif info.filename.endswith("mac_permissions.xml"):
- print "rewriting %s with new keys." % (info.filename,)
+ print("rewriting %s with new keys." % info.filename)
new_data = ReplaceCerts(data)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
elif info.filename in ("SYSTEM/recovery-from-boot.p",
@@ -257,10 +263,10 @@ def ReplaceCerts(data):
"""Given a string of data, replace all occurences of a set
of X509 certs with a newer set of X509 certs and return
the updated data string."""
- for old, new in OPTIONS.key_map.iteritems():
+ for old, new in OPTIONS.key_map.items():
try:
if OPTIONS.verbose:
- print " Replacing %s.x509.pem with %s.x509.pem" % (old, new)
+ print(" Replacing %s.x509.pem with %s.x509.pem" % (old, new))
f = open(old + ".x509.pem")
old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
f.close()
@@ -271,14 +277,14 @@ def ReplaceCerts(data):
pattern = "\\b"+old_cert16+"\\b"
(data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
if OPTIONS.verbose:
- print " Replaced %d occurence(s) of %s.x509.pem with " \
- "%s.x509.pem" % (num, old, new)
+ print(" Replaced %d occurence(s) of %s.x509.pem with "
+ "%s.x509.pem" % (num, old, new))
except IOError as e:
if e.errno == errno.ENOENT and not OPTIONS.verbose:
continue
- print " Error accessing %s. %s. Skip replacing %s.x509.pem " \
- "with %s.x509.pem." % (e.filename, e.strerror, old, new)
+ print(" Error accessing %s. %s. Skip replacing %s.x509.pem "
+ "with %s.x509.pem." % (e.filename, e.strerror, old, new))
return data
@@ -331,8 +337,8 @@ def RewriteProps(data, misc_info):
value = " ".join(value)
line = key + "=" + value
if line != original_line:
- print " replace: ", original_line
- print " with: ", line
+ print(" replace: ", original_line)
+ print(" with: ", line)
output.append(line)
return "\n".join(output) + "\n"
@@ -348,7 +354,7 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
extra_recovery_keys = [OPTIONS.key_map.get(k, k) + ".x509.pem"
for k in extra_recovery_keys.split()]
if extra_recovery_keys:
- print "extra recovery-only key(s): " + ", ".join(extra_recovery_keys)
+ print("extra recovery-only key(s): " + ", ".join(extra_recovery_keys))
else:
extra_recovery_keys = []
@@ -362,14 +368,14 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
if mapped_keys:
- print "using:\n ", "\n ".join(mapped_keys)
- print "for OTA package verification"
+ print("using:\n ", "\n ".join(mapped_keys))
+ print("for OTA package verification")
else:
devkey = misc_info.get("default_system_dev_certificate",
"build/target/product/security/testkey")
mapped_keys.append(
OPTIONS.key_map.get(devkey, devkey) + ".x509.pem")
- print "META/otakeys.txt has no keys; using", mapped_keys[0]
+ print("META/otakeys.txt has no keys; using", mapped_keys[0])
# recovery uses a version of the key that has been slightly
# predigested (by DumpPublicKey.java) and put in res/keys.
@@ -389,7 +395,7 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
# put into a zipfile system/etc/security/otacerts.zip.
# We DO NOT include the extra_recovery_keys (if any) here.
- temp_file = cStringIO.StringIO()
+ temp_file = StringIO()
certs_zip = zipfile.ZipFile(temp_file, "w")
for k in mapped_keys:
certs_zip.write(k)
@@ -400,7 +406,7 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
return new_recovery_keys
def ReplaceVerityPublicKey(targetfile_zip, key_path):
- print "Replacing verity public key with %s" % key_path
+ print("Replacing verity public key with %s" % key_path)
with open(key_path) as f:
data = f.read()
common.ZipWriteStr(targetfile_zip, "BOOT/RAMDISK/verity_key", data)
@@ -408,7 +414,7 @@ def ReplaceVerityPublicKey(targetfile_zip, key_path):
def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip,
misc_info, key_path):
- print "Replacing verity private key with %s" % key_path
+ print("Replacing verity private key with %s" % key_path)
current_key = misc_info["verity_key"]
original_misc_info = targetfile_input_zip.read("META/misc_info.txt")
new_misc_info = original_misc_info.replace(current_key, key_path)
@@ -499,14 +505,14 @@ def main(argv):
add_img_to_target_files.AddImagesToTargetFiles(args[1])
- print "done."
+ print("done.")
if __name__ == '__main__':
try:
main(sys.argv[1:])
- except common.ExternalError, e:
- print
- print " ERROR: %s" % (e,)
- print
+ except common.ExternalError as e:
+ print()
+ print(" ERROR: %s" % e)
+ print()
sys.exit(1)
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 07f3c1c..10022d0 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import print_function
+
import bisect
import os
import struct
diff --git a/tools/repopick.py b/tools/repopick.py
index abc70a9..64db7bf 100755
--- a/tools/repopick.py
+++ b/tools/repopick.py
@@ -66,7 +66,8 @@ def fetch_query_via_ssh(remote_url, query):
out = subprocess.check_output(['ssh', '-x', '-p{0}'.format(port), userhost, 'gerrit', 'query', '--format=JSON --patch-sets --current-patch-set', query])
-
+ if not hasattr(out, 'encode'):
+ out = out.decode()
reviews = []
for line in out.split('\n'):
try:
@@ -184,8 +185,10 @@ if __name__ == '__main__':
if args.abandon_first:
# Determine if the branch already exists; skip the abandon if it does not
plist = subprocess.check_output(['repo', 'info'])
+ if not hasattr(plist, 'encode'):
+ plist = plist.decode()
needs_abandon = False
- for pline in plist:
+ for pline in plist.splitlines():
matchObj = re.match(r'Local Branches.*\[(.*)\]', pline)
if matchObj:
local_branches = re.split('\s*,\s*', matchObj.group(1))
diff --git a/tools/roomservice.py b/tools/roomservice.py
index 01a7154..ee8e5d8 100755
--- a/tools/roomservice.py
+++ b/tools/roomservice.py
@@ -38,7 +38,7 @@ except ImportError:
from xml.etree import ElementTree
-product = sys.argv[1];
+product = sys.argv[1]
if len(sys.argv) > 2:
depsonly = sys.argv[2]
@@ -59,7 +59,8 @@ try:
authtuple = netrc.netrc().authenticators("api.github.com")
if authtuple:
- githubauth = base64.encodestring('%s:%s' % (authtuple[0], authtuple[2])).replace('\n', '')
+ auth_string = ('%s:%s' % (authtuple[0], authtuple[2])).encode()
+ githubauth = base64.encodestring(auth_string).decode().replace('\n', '')
else:
githubauth = None
except:
diff --git a/tools/warn.py b/tools/warn.py
index 8097123..b5a49f6 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -1,11 +1,13 @@
#!/usr/bin/env python
# This file uses the following encoding: utf-8
+from __future__ import print_function
+
import sys
import re
if len(sys.argv) == 1:
- print 'usage: ' + sys.argv[0] + ' <build.log>'
+ print('usage: ' + sys.argv[0] + ' <build.log>')
sys.exit()
# if you add another level, don't forget to give it a color below
@@ -399,7 +401,7 @@ cur_row_color = 0
row_colors = [ 'e0e0e0', 'd0d0d0' ]
def output(text):
- print text,
+ print(text, end=' ')
def htmlbig(param):
return '<font size="+2">' + param + '</font>'