from collections import deque, OrderedDict
from hashlib import sha1
+import common
import heapq
import itertools
import multiprocessing
if free_string:
out.append("".join(free_string))
- # sanity check: abort if we're going to need more than 512 MB if
- # stash space
- assert max_stashed_blocks * self.tgt.blocksize < (512 << 20)
+ if self.version >= 2:
+ # Sanity check: abort if we're going to need more stash space than
+ # the allowed size (cache_size * threshold). There are two purposes
+ # of having a threshold here. a) Part of the cache may have been
+ # occupied by some recovery logs. b) It will buy us some time to deal
+ # with the oversize issue.
+ cache_size = common.OPTIONS.cache_size
+ stash_threshold = common.OPTIONS.stash_threshold
+ max_allowed = cache_size * stash_threshold
+ assert max_stashed_blocks * self.tgt.blocksize < max_allowed, \
+ 'Stash size %d (%d * %d) exceeds the limit %d (%d * %.2f)' % (
+ max_stashed_blocks * self.tgt.blocksize, max_stashed_blocks,
+ self.tgt.blocksize, max_allowed, cache_size,
+ stash_threshold)
# Zero out extended blocks as a workaround for bug 20881595.
if self.tgt.extended:
f.write(i)
if self.version >= 2:
- print("max stashed blocks: %d (%d bytes)\n" % (
- max_stashed_blocks, max_stashed_blocks * self.tgt.blocksize))
+ max_stashed_size = max_stashed_blocks * self.tgt.blocksize
+ max_allowed = common.OPTIONS.cache_size * common.OPTIONS.stash_threshold
+ print("max stashed blocks: %d (%d bytes), limit: %d bytes (%.2f%%)\n" % (
+ max_stashed_blocks, max_stashed_size, max_allowed,
+ max_stashed_size * 100.0 / max_allowed))
def ComputePatches(self, prefix):
print("Reticulating splines...")
Specifies the number of worker-threads that will be used when
generating patches for incremental updates (defaults to 3).
+ --stash_threshold <float>
+ Specifies the threshold that will be used to compute the maximum
+ allowed stash size (defaults to 0.8).
"""
import sys
OPTIONS.oem_source = None
OPTIONS.fallback_to_full = True
OPTIONS.full_radio = False
+# Stash size cannot exceed cache_size * threshold.
+OPTIONS.cache_size = None
+OPTIONS.stash_threshold = 0.8
+
def MostPopularKey(d, default):
"""Given a dict, return the key corresponding to the largest
OPTIONS.updater_binary = a
elif o in ("--no_fallback_to_full",):
OPTIONS.fallback_to_full = False
+ elif o == "--stash_threshold":
+ try:
+ OPTIONS.stash_threshold = float(a)
+ except ValueError:
+ raise ValueError("Cannot parse value %r for option %r - expecting "
+ "a float" % (a, o))
else:
return False
return True
"oem_settings=",
"verify",
"no_fallback_to_full",
+ "stash_threshold=",
], extra_option_handler=option_handler)
if len(args) != 2:
output_zip = zipfile.ZipFile(temp_zip_file, "w",
compression=zipfile.ZIP_DEFLATED)
+ cache_size = OPTIONS.info_dict.get("cache_size", None)
+ if cache_size is None:
+ raise RuntimeError("can't determine the cache partition size")
+ OPTIONS.cache_size = cache_size
+
if OPTIONS.incremental_source is None:
WriteFullOTAPackage(input_zip, output_zip)
if OPTIONS.package_key is None: