summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDoug Zongker <dougz@google.com>2014-10-16 05:52:53 +0000
committerAndroid Git Automerger <android-git-automerger@android.com>2014-10-16 05:52:53 +0000
commit641bfeddf32e902842bcbcfa76d9a9c68b822659 (patch)
treea86004101ed4357c595cc297626100541204d61d
parent21f527bed81ecfe7b90098edb7076983d118099d (diff)
parente18eb50850201e573084a0cd8e1199d877170b64 (diff)
downloadbuild-641bfeddf32e902842bcbcfa76d9a9c68b822659.zip
build-641bfeddf32e902842bcbcfa76d9a9c68b822659.tar.gz
build-641bfeddf32e902842bcbcfa76d9a9c68b822659.tar.bz2
am e18eb508: add support for the \'fill\' chunk type to sparse_img
* commit 'e18eb50850201e573084a0cd8e1199d877170b64': add support for the 'fill' chunk type to sparse_img
-rw-r--r--tools/releasetools/sparse_img.py43
1 files changed, 29 insertions, 14 deletions
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 6b70fe1..7574747 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -77,12 +77,16 @@ class SparseImage(object):
else:
care_data.append(pos)
care_data.append(pos + chunk_sz)
- offset_map.append((pos, chunk_sz, f.tell()))
+ offset_map.append((pos, chunk_sz, f.tell(), None))
pos += chunk_sz
f.seek(data_sz, os.SEEK_CUR)
elif chunk_type == 0xCAC2:
- raise ValueError("Fill chunks are not supported")
+ fill_data = f.read(4)
+ care_data.append(pos)
+ care_data.append(pos + chunk_sz)
+ offset_map.append((pos, chunk_sz, None, fill_data))
+ pos += chunk_sz
elif chunk_type == 0xCAC3:
if data_sz != 0:
@@ -130,24 +134,29 @@ class SparseImage(object):
for s, e in ranges:
to_read = e-s
idx = bisect.bisect_right(self.offset_index, s) - 1
- chunk_start, chunk_len, filepos = self.offset_map[idx]
+ chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
# for the first chunk we may be starting partway through it.
- p = filepos + ((s - chunk_start) * self.blocksize)
remain = chunk_len - (s - chunk_start)
-
- f.seek(p, os.SEEK_SET)
this_read = min(remain, to_read)
- yield f.read(this_read * self.blocksize)
+ if filepos is not None:
+ p = filepos + ((s - chunk_start) * self.blocksize)
+ f.seek(p, os.SEEK_SET)
+ yield f.read(this_read * self.blocksize)
+ else:
+ yield fill_data * (this_read * (self.blocksize >> 2))
to_read -= this_read
while to_read > 0:
# continue with following chunks if this range spans multiple chunks.
idx += 1
- chunk_start, chunk_len, filepos = self.offset_map[idx]
- f.seek(filepos, os.SEEK_SET)
+ chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
this_read = min(chunk_len, to_read)
- yield f.read(this_read * self.blocksize)
+ if filepos is not None:
+ f.seek(filepos, os.SEEK_SET)
+ yield f.read(this_read * self.blocksize)
+ else:
+ yield fill_data * (this_read * (self.blocksize >> 2))
to_read -= this_read
def LoadFileBlockMap(self, fn):
@@ -177,10 +186,16 @@ class SparseImage(object):
for s, e in remaining:
for b in range(s, e):
idx = bisect.bisect_right(self.offset_index, b) - 1
- chunk_start, chunk_len, filepos = self.offset_map[idx]
- filepos += (b-chunk_start) * self.blocksize
- f.seek(filepos, os.SEEK_SET)
- data = f.read(self.blocksize)
+ chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
+ if filepos is not None:
+ filepos += (b-chunk_start) * self.blocksize
+ f.seek(filepos, os.SEEK_SET)
+ data = f.read(self.blocksize)
+ else:
+ if fill_data == reference[:4]: # fill with all zeros
+ data = reference
+ else:
+ data = None
if data == reference:
zero_blocks.append(b)