Merge "Fix lint errors in ota_from_target_files script"

This commit is contained in:
Kelvin Zhang 2020-06-24 19:17:35 +00:00 committed by Gerrit Code Review
commit c8b4784cb9
3 changed files with 81 additions and 74 deletions

View File

@ -17,6 +17,7 @@ from __future__ import print_function
import base64
import collections
import copy
import datetime
import errno
import fnmatch
import getopt
@ -53,16 +54,17 @@ class Options(object):
# running this function, user-supplied search path (`--path`) hasn't been
# available. So the value set here is the default, which might be overridden
# by commandline flag later.
exec_path = sys.argv[0]
exec_path = os.path.realpath(sys.argv[0])
if exec_path.endswith('.py'):
script_name = os.path.basename(exec_path)
# logger hasn't been initialized yet at this point. Use print to output
# warnings.
print(
'Warning: releasetools script should be invoked as hermetic Python '
'executable -- build and run `{}` directly.'.format(script_name[:-3]),
'executable -- build and run `{}` directly.'.format(
script_name[:-3]),
file=sys.stderr)
self.search_path = os.path.realpath(os.path.join(os.path.dirname(exec_path), '..'))
self.search_path = os.path.dirname(os.path.dirname(exec_path))
self.signapk_path = "framework/signapk.jar" # Relative to search_path
self.signapk_shared_library_path = "lib64" # Relative to search_path
@ -191,11 +193,11 @@ def InitLogging():
if OPTIONS.logfile:
config = copy.deepcopy(config)
config['handlers']['logfile'] = {
'class': 'logging.FileHandler',
'formatter': 'standard',
'level': 'INFO',
'mode': 'w',
'filename': OPTIONS.logfile,
'class': 'logging.FileHandler',
'formatter': 'standard',
'level': 'INFO',
'mode': 'w',
'filename': OPTIONS.logfile,
}
config['loggers']['']['handlers'].append('logfile')
@ -224,7 +226,7 @@ def Run(args, verbose=None, **kwargs):
if 'universal_newlines' not in kwargs:
kwargs['universal_newlines'] = True
# Don't log any if caller explicitly says so.
if verbose != False:
if verbose:
logger.info(" Running: \"%s\"", " ".join(args))
return subprocess.Popen(args, **kwargs)
@ -274,7 +276,7 @@ def RunAndCheckOutput(args, verbose=None, **kwargs):
if output is None:
output = ""
# Don't log any if caller explicitly says so.
if verbose != False:
if verbose:
logger.info("%s", output.rstrip())
if proc.returncode != 0:
raise ExternalError(
@ -375,7 +377,6 @@ class BuildInfo(object):
'Invalid build fingerprint: "{}". See the requirement in Android CDD '
"3.2.2. Build Parameters.".format(fingerprint))
self._partition_fingerprints = {}
for partition in PARTITIONS_WITH_CARE_MAP:
try:
@ -522,7 +523,8 @@ class BuildInfo(object):
self.GetPartitionBuildProp("ro.product.device", partition),
self.GetPartitionBuildProp("ro.build.version.release", partition),
self.GetPartitionBuildProp("ro.build.id", partition),
self.GetPartitionBuildProp("ro.build.version.incremental", partition),
self.GetPartitionBuildProp(
"ro.build.version.incremental", partition),
self.GetPartitionBuildProp("ro.build.type", partition),
self.GetPartitionBuildProp("ro.build.tags", partition))
@ -683,7 +685,7 @@ def LoadInfoDict(input_file, repacking=False):
if "boot_images" in d:
boot_images = d["boot_images"]
for b in boot_images.split():
makeint(b.replace(".img","_size"))
makeint(b.replace(".img", "_size"))
# Load recovery fstab if applicable.
d["fstab"] = _FindAndLoadRecoveryFstab(d, input_file, read_helper)
@ -703,7 +705,7 @@ def LoadInfoDict(input_file, repacking=False):
for partition in PARTITIONS_WITH_CARE_MAP:
fingerprint = build_info.GetPartitionFingerprint(partition)
if fingerprint:
d["avb_{}_salt".format(partition)] = sha256(fingerprint).hexdigest()
d["avb_{}_salt".format(partition)] = sha256(fingerprint.encode()).hexdigest()
return d
@ -749,6 +751,7 @@ class PartitionBuildProps(object):
placeholders in the build.prop file. We expect exactly one value for
each of the variables.
"""
def __init__(self, input_file, name, placeholder_values=None):
self.input_file = input_file
self.partition = name
@ -808,7 +811,7 @@ class PartitionBuildProps(object):
"""Parses the build prop in a given import statement."""
tokens = line.split()
if tokens[0] != 'import' or (len(tokens) != 2 and len(tokens) != 3) :
if tokens[0] != 'import' or (len(tokens) != 2 and len(tokens) != 3):
raise ValueError('Unrecognized import statement {}'.format(line))
if len(tokens) == 3:
@ -998,9 +1001,9 @@ def MergeDynamicPartitionInfoDicts(framework_dict, vendor_dict):
# Pick virtual ab related flags from vendor dict, if defined.
if "virtual_ab" in vendor_dict.keys():
merged_dict["virtual_ab"] = vendor_dict["virtual_ab"]
merged_dict["virtual_ab"] = vendor_dict["virtual_ab"]
if "virtual_ab_retrofit" in vendor_dict.keys():
merged_dict["virtual_ab_retrofit"] = vendor_dict["virtual_ab_retrofit"]
merged_dict["virtual_ab_retrofit"] = vendor_dict["virtual_ab_retrofit"]
return merged_dict
@ -1234,7 +1237,7 @@ def _BuildBootableImage(image_name, sourcedir, fs_config_file, info_dict=None,
kernel = "kernel"
else:
kernel = image_name.replace("boot", "kernel")
kernel = kernel.replace(".img","")
kernel = kernel.replace(".img", "")
if not os.access(os.path.join(sourcedir, kernel), os.F_OK):
return None
@ -1358,7 +1361,7 @@ def _BuildBootableImage(image_name, sourcedir, fs_config_file, info_dict=None,
if partition_name == "recovery":
part_size = info_dict["recovery_size"]
else:
part_size = info_dict[image_name.replace(".img","_size")]
part_size = info_dict[image_name.replace(".img", "_size")]
cmd = [avbtool, "add_hash_footer", "--image", img.name,
"--partition_size", str(part_size), "--partition_name",
partition_name]
@ -1511,7 +1514,8 @@ def GetVendorBootImage(name, prebuilt_name, unpack_dir, tree_subdir,
if info_dict is None:
info_dict = OPTIONS.info_dict
data = _BuildVendorBootImage(os.path.join(unpack_dir, tree_subdir), info_dict)
data = _BuildVendorBootImage(
os.path.join(unpack_dir, tree_subdir), info_dict)
if data:
return File(name, data)
return None
@ -1520,7 +1524,7 @@ def GetVendorBootImage(name, prebuilt_name, unpack_dir, tree_subdir,
def Gunzip(in_filename, out_filename):
"""Gunzips the given gzip compressed file to a given output file."""
with gzip.open(in_filename, "rb") as in_file, \
open(out_filename, "wb") as out_file:
open(out_filename, "wb") as out_file:
shutil.copyfileobj(in_file, out_file)
@ -1622,8 +1626,7 @@ def GetUserImage(which, tmpdir, input_zip,
if reset_file_map:
img.ResetFileMap()
return img
else:
return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None):
@ -1822,10 +1825,9 @@ def GetMinSdkVersionInt(apk_name, codename_to_api_level_map):
# Not a decimal number. Codename?
if version in codename_to_api_level_map:
return codename_to_api_level_map[version]
else:
raise ExternalError(
"Unknown minSdkVersion: '{}'. Known codenames: {}".format(
version, codename_to_api_level_map))
raise ExternalError(
"Unknown minSdkVersion: '{}'. Known codenames: {}".format(
version, codename_to_api_level_map))
def SignFile(input_name, output_name, key, password, min_api_level=None,
@ -1930,7 +1932,8 @@ def CheckSize(data, target, info_dict):
msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
if pct >= 99.0:
raise ExternalError(msg)
elif pct >= 95.0:
if pct >= 95.0:
logger.warning("\n WARNING: %s\n", msg)
else:
logger.info(" %s", msg)
@ -2040,6 +2043,7 @@ Global options
Put verbose logs to specified file (regardless of --verbose option.)
"""
def Usage(docstring):
print(docstring.rstrip("\n"))
print(COMMON_DOCSTRING)
@ -2202,7 +2206,7 @@ class PasswordManager(object):
current = self.UpdateAndReadFile(current)
def PromptResult(self, current): # pylint: disable=no-self-use
def PromptResult(self, current): # pylint: disable=no-self-use
"""Prompt the user to enter a value (password) for each key in
'current' whose value is fales. Returns a new dict with all the
values.
@ -2265,7 +2269,6 @@ class PasswordManager(object):
def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
compress_type=None):
import datetime
# http://b/18015246
# Python 2.7's zipfile implementation wrongly thinks that zip64 is required
@ -2391,6 +2394,7 @@ def ZipClose(zip_file):
class DeviceSpecificParams(object):
module = None
def __init__(self, **kwargs):
"""Keyword arguments to the constructor become attributes of this
object, which is passed to all functions in the device-specific
@ -2519,12 +2523,12 @@ class File(object):
DIFF_PROGRAM_BY_EXT = {
".gz" : "imgdiff",
".zip" : ["imgdiff", "-z"],
".jar" : ["imgdiff", "-z"],
".apk" : ["imgdiff", "-z"],
".img" : "imgdiff",
}
".gz": "imgdiff",
".zip": ["imgdiff", "-z"],
".jar": ["imgdiff", "-z"],
".apk": ["imgdiff", "-z"],
".img": "imgdiff",
}
class Difference(object):
@ -2563,6 +2567,7 @@ class Difference(object):
cmd.append(ptemp.name)
p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
err = []
def run():
_, e = p.communicate()
if e:
@ -2591,7 +2596,6 @@ class Difference(object):
self.patch = diff
return self.tf, self.sf, self.patch
def GetPatch(self):
"""Returns a tuple of (target_file, source_file, patch_data).
@ -2902,7 +2906,7 @@ class BlockDifference(object):
new_data_name=new_data_name, code=code))
script.AppendExtra(script.WordWrap(call))
def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
data = source.ReadRangeSet(ranges)
ctx = sha1()
@ -2911,7 +2915,7 @@ class BlockDifference(object):
return ctx.hexdigest()
def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
"""Return the hash value for all zero blocks."""
zero_block = '\x00' * 4096
ctx = sha1()
@ -2934,6 +2938,7 @@ PARTITION_TYPES = {
"squashfs": "EMMC"
}
def GetTypeAndDevice(mount_point, info, check_no_slot=True):
"""
Use GetTypeAndDeviceExpr whenever possible. This function is kept for
@ -2944,11 +2949,10 @@ def GetTypeAndDevice(mount_point, info, check_no_slot=True):
if fstab:
if check_no_slot:
assert not fstab[mount_point].slotselect, \
"Use GetTypeAndDeviceExpr instead"
"Use GetTypeAndDeviceExpr instead"
return (PARTITION_TYPES[fstab[mount_point].fs_type],
fstab[mount_point].device)
else:
raise KeyError
raise KeyError
def GetTypeAndDeviceExpr(mount_point, info):
@ -2963,8 +2967,7 @@ def GetTypeAndDeviceExpr(mount_point, info):
if p.slotselect:
device_expr = 'add_slot_suffix(%s)' % device_expr
return (PARTITION_TYPES[fstab[mount_point].fs_type], device_expr)
else:
raise KeyError
raise KeyError
def GetEntryForDevice(fstab, device):
@ -2979,6 +2982,7 @@ def GetEntryForDevice(fstab, device):
return fstab[mount_point]
return None
def ParseCertificate(data):
"""Parses and converts a PEM-encoded certificate into DER-encoded.
@ -3305,7 +3309,7 @@ class DynamicPartitionsDifference(object):
for p, u in self._partition_updates.items():
if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
u.block_difference.WritePostInstallVerifyScript(script)
script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
for p, u in self._partition_updates.items():
if u.tgt_size and u.src_size <= u.tgt_size:
@ -3313,7 +3317,7 @@ class DynamicPartitionsDifference(object):
u.block_difference.WriteScript(script, output_zip, progress=u.progress,
write_verify_script=write_verify_script)
if write_verify_script:
script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
script.Comment('--- End patching dynamic partitions ---')
@ -3370,7 +3374,8 @@ class DynamicPartitionsDifference(object):
for p, u in self._partition_updates.items():
if u.tgt_size and u.src_size < u.tgt_size:
comment('Grow partition %s from %d to %d' % (p, u.src_size, u.tgt_size))
comment('Grow partition %s from %d to %d' %
(p, u.src_size, u.tgt_size))
append('resize %s %d' % (p, u.tgt_size))
for p, u in self._partition_updates.items():

View File

@ -221,8 +221,10 @@ import zipfile
import check_target_files_vintf
import common
import edify_generator
import target_files_diff
import verity_utils
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
@ -545,10 +547,10 @@ def HasRecoveryPatch(target_files_zip, info_dict):
target_files_dir = "SYSTEM/vendor"
patch = "%s/recovery-from-boot.p" % target_files_dir
img = "%s/etc/recovery.img" %target_files_dir
img = "%s/etc/recovery.img" % target_files_dir
namelist = [name for name in target_files_zip.namelist()]
return (patch in namelist or img in namelist)
namelist = target_files_zip.namelist()
return patch in namelist or img in namelist
def HasPartition(target_files_zip, partition):
@ -626,7 +628,8 @@ def GetBlockDifferences(target_zip, source_zip, target_info, source_info,
def GetIncrementalBlockDifferenceForPartition(name):
if not HasPartition(source_zip, name):
raise RuntimeError("can't generate incremental that adds {}".format(name))
raise RuntimeError(
"can't generate incremental that adds {}".format(name))
partition_src = common.GetUserImage(name, OPTIONS.source_tmp, source_zip,
info_dict=source_info,
@ -637,8 +640,7 @@ def GetBlockDifferences(target_zip, source_zip, target_info, source_info,
partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip,
info_dict=target_info,
allow_shared_blocks=allow_shared_blocks,
hashtree_info_generator=
hashtree_info_generator)
hashtree_info_generator=hashtree_info_generator)
# Check the first block of the source system partition for remount R/W only
# if the filesystem is ext4.
@ -1450,7 +1452,7 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
fs = source_info["fstab"]["/misc"]
assert fs.fs_type.upper() == "EMMC", \
"two-step packages only supported on devices with EMMC /misc partitions"
bcb_dev = {"bcb_dev" : fs.device}
bcb_dev = {"bcb_dev": fs.device}
common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
script.AppendExtra("""
if get_stage("%(bcb_dev)s") == "2/3" then
@ -1668,7 +1670,7 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
partitions = [partition for partition in partitions if partition
not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
output_list.append('{}={}'.format(key, ' '.join(partitions)))
elif key == 'virtual_ab' or key == "virtual_ab_retrofit":
elif key in ['virtual_ab', "virtual_ab_retrofit"]:
# Remove virtual_ab flag from secondary payload so that OTA client
# don't use snapshots for secondary update
pass
@ -1712,7 +1714,8 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
partition_list = f.read().splitlines()
partition_list = [partition for partition in partition_list if partition
and partition not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
common.ZipWriteStr(target_zip, info.filename, '\n'.join(partition_list))
common.ZipWriteStr(target_zip, info.filename,
'\n'.join(partition_list))
# Remove the unnecessary partitions from the dynamic partitions list.
elif (info.filename == 'META/misc_info.txt' or
info.filename == DYNAMIC_PARTITION_INFO):
@ -1795,7 +1798,8 @@ def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
"{} is in super_block_devices but not in {}".format(
super_device_not_updated, AB_PARTITIONS)
# ab_partitions -= (dynamic_partition_list - super_block_devices)
new_ab_partitions = common.MakeTempFile(prefix="ab_partitions", suffix=".txt")
new_ab_partitions = common.MakeTempFile(
prefix="ab_partitions", suffix=".txt")
with open(new_ab_partitions, 'w') as f:
for partition in ab_partitions:
if (partition in dynamic_partition_list and
@ -1985,7 +1989,7 @@ def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
OPTIONS.source_tmp = common.UnzipTemp(
OPTIONS.incremental_source, UNZIP_PATTERN)
with zipfile.ZipFile(target_file) as input_zip, \
zipfile.ZipFile(source_file) as source_zip:
zipfile.ZipFile(source_file) as source_zip:
WriteBlockIncrementalOTAPackage(
input_zip,
source_zip,
@ -2245,7 +2249,6 @@ def main(argv):
OPTIONS.incremental_source, TARGET_DIFFING_UNZIP_PATTERN)
with open(OPTIONS.log_diff, 'w') as out_file:
import target_files_diff
target_files_diff.recursiveDiff(
'', source_dir, target_dir, out_file)

View File

@ -608,22 +608,22 @@ def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
elif (OPTIONS.remove_avb_public_keys and
(filename.startswith("BOOT/RAMDISK/avb/") or
filename.startswith("BOOT/RAMDISK/first_stage_ramdisk/avb/"))):
matched_removal = False
for key_to_remove in OPTIONS.remove_avb_public_keys:
if filename.endswith(key_to_remove):
matched_removal = True
print("Removing AVB public key from ramdisk: %s" % filename)
break
if not matched_removal:
# Copy it verbatim if we don't want to remove it.
common.ZipWriteStr(output_tf_zip, out_info, data)
matched_removal = False
for key_to_remove in OPTIONS.remove_avb_public_keys:
if filename.endswith(key_to_remove):
matched_removal = True
print("Removing AVB public key from ramdisk: %s" % filename)
break
if not matched_removal:
# Copy it verbatim if we don't want to remove it.
common.ZipWriteStr(output_tf_zip, out_info, data)
# Skip verity keyid (for system_root_image use) if we will replace it.
elif OPTIONS.replace_verity_keyid and filename == "BOOT/cmdline":
pass
# Skip the care_map as we will regenerate the system/vendor images.
elif filename == "META/care_map.pb" or filename == "META/care_map.txt":
elif filename in ["META/care_map.pb", "META/care_map.txt"]:
pass
# Updates system_other.avbpubkey in /product/etc/.
@ -967,11 +967,10 @@ def ReplaceAvbSigningKeys(misc_info):
if extra_args:
print('Setting extra AVB signing args for %s to "%s"' % (
partition, extra_args))
if partition in AVB_FOOTER_ARGS_BY_PARTITION:
args_key = AVB_FOOTER_ARGS_BY_PARTITION[partition]
else:
# custom partition
args_key = "avb_{}_add_hashtree_footer_args".format(partition)
args_key = AVB_FOOTER_ARGS_BY_PARTITION.get(
partition,
# custom partition
"avb_{}_add_hashtree_footer_args".format(partition))
misc_info[args_key] = (misc_info.get(args_key, '') + ' ' + extra_args)
for partition in AVB_FOOTER_ARGS_BY_PARTITION: