forked from openkylin/platform_build
Merge "releasetools: Clean up additional modules for Python 3 compatibility." am: 90f638d842
am: c82d8ceffc
Change-Id: I84e7866b06ff693240272b39119d0ab5cae7e72a
This commit is contained in:
commit
c515aa03b8
|
@ -38,8 +38,7 @@ def CertUsesSha256(cert):
|
|||
"""Check if the cert uses SHA-256 hashing algorithm."""
|
||||
|
||||
cmd = ['openssl', 'x509', '-text', '-noout', '-in', cert]
|
||||
p1 = common.Run(cmd, stdout=subprocess.PIPE)
|
||||
cert_dump, _ = p1.communicate()
|
||||
cert_dump = common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
|
||||
|
||||
algorithm = re.search(r'Signature Algorithm: ([a-zA-Z0-9]+)', cert_dump)
|
||||
assert algorithm, "Failed to identify the signature algorithm."
|
||||
|
@ -69,13 +68,13 @@ def VerifyPackage(cert, package):
|
|||
print('Certificate: %s' % (cert,))
|
||||
|
||||
# Read in the package.
|
||||
with open(package) as package_file:
|
||||
with open(package, 'rb') as package_file:
|
||||
package_bytes = package_file.read()
|
||||
|
||||
length = len(package_bytes)
|
||||
assert length >= 6, "Not big enough to contain footer."
|
||||
|
||||
footer = [ord(x) for x in package_bytes[-6:]]
|
||||
footer = bytearray(package_bytes[-6:])
|
||||
assert footer[2] == 0xff and footer[3] == 0xff, "Footer is wrong."
|
||||
|
||||
signature_start_from_end = (footer[1] << 8) + footer[0]
|
||||
|
@ -111,31 +110,25 @@ def VerifyPackage(cert, package):
|
|||
|
||||
# Parse the signature and get the hash.
|
||||
cmd = ['openssl', 'asn1parse', '-inform', 'DER', '-in', sig_file]
|
||||
p1 = common.Run(cmd, stdout=subprocess.PIPE)
|
||||
sig, _ = p1.communicate()
|
||||
assert p1.returncode == 0, "Failed to parse the signature."
|
||||
sig = common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
|
||||
|
||||
digest_line = sig.strip().split('\n')[-1]
|
||||
digest_line = sig.rstrip().split('\n')[-1]
|
||||
digest_string = digest_line.split(':')[3]
|
||||
digest_file = common.MakeTempFile(prefix='digest-')
|
||||
with open(digest_file, 'wb') as f:
|
||||
f.write(digest_string.decode('hex'))
|
||||
f.write(bytearray.fromhex(digest_string))
|
||||
|
||||
# Verify the digest by outputing the decrypted result in ASN.1 structure.
|
||||
decrypted_file = common.MakeTempFile(prefix='decrypted-')
|
||||
cmd = ['openssl', 'rsautl', '-verify', '-certin', '-inkey', cert,
|
||||
'-in', digest_file, '-out', decrypted_file]
|
||||
p1 = common.Run(cmd, stdout=subprocess.PIPE)
|
||||
p1.communicate()
|
||||
assert p1.returncode == 0, "Failed to run openssl rsautl -verify."
|
||||
common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
|
||||
|
||||
# Parse the output ASN.1 structure.
|
||||
cmd = ['openssl', 'asn1parse', '-inform', 'DER', '-in', decrypted_file]
|
||||
p1 = common.Run(cmd, stdout=subprocess.PIPE)
|
||||
decrypted_output, _ = p1.communicate()
|
||||
assert p1.returncode == 0, "Failed to parse the output."
|
||||
decrypted_output = common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
|
||||
|
||||
digest_line = decrypted_output.strip().split('\n')[-1]
|
||||
digest_line = decrypted_output.rstrip().split('\n')[-1]
|
||||
digest_string = digest_line.split(':')[3].lower()
|
||||
|
||||
# Verify that the two digest strings match.
|
||||
|
@ -156,7 +149,7 @@ def VerifyAbOtaPayload(cert, package):
|
|||
|
||||
# Dump pubkey from the certificate.
|
||||
pubkey = common.MakeTempFile(prefix="key-", suffix=".pem")
|
||||
with open(pubkey, 'wb') as pubkey_fp:
|
||||
with open(pubkey, 'w') as pubkey_fp:
|
||||
pubkey_fp.write(common.ExtractPublicKey(cert))
|
||||
|
||||
package_dir = common.MakeTempDir(prefix='package-')
|
||||
|
@ -166,11 +159,7 @@ def VerifyAbOtaPayload(cert, package):
|
|||
cmd = ['delta_generator',
|
||||
'--in_file=' + payload_file,
|
||||
'--public_key=' + pubkey]
|
||||
proc = common.Run(cmd)
|
||||
stdoutdata, _ = proc.communicate()
|
||||
assert proc.returncode == 0, \
|
||||
'Failed to verify payload with delta_generator: {}\n{}'.format(
|
||||
package, stdoutdata)
|
||||
common.RunAndCheckOutput(cmd)
|
||||
common.ZipClose(package_zip)
|
||||
|
||||
# Verified successfully upon reaching here.
|
||||
|
|
|
@ -38,8 +38,8 @@ Common options that apply to both of non-A/B and A/B OTAs
|
|||
-k (--package_key) <key>
|
||||
Key to use to sign the package (default is the value of
|
||||
default_system_dev_certificate from the input target-files's
|
||||
META/misc_info.txt, or "build/make/target/product/security/testkey" if that
|
||||
value is not specified).
|
||||
META/misc_info.txt, or "build/make/target/product/security/testkey" if
|
||||
that value is not specified).
|
||||
|
||||
For incremental OTAs, the default value is based on the source
|
||||
target-file, not the target build.
|
||||
|
@ -371,7 +371,7 @@ class BuildInfo(object):
|
|||
return prop_val
|
||||
|
||||
source_order_val = self.info_dict.get("build.prop", {}).get(
|
||||
"ro.product.property_source_order")
|
||||
"ro.product.property_source_order")
|
||||
if source_order_val:
|
||||
source_order = source_order_val.split(",")
|
||||
else:
|
||||
|
@ -381,13 +381,13 @@ class BuildInfo(object):
|
|||
if any([x not in BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
|
||||
for x in source_order]):
|
||||
raise common.ExternalError(
|
||||
"Invalid ro.product.property_source_order '{}'".format(source_order))
|
||||
"Invalid ro.product.property_source_order '{}'".format(source_order))
|
||||
|
||||
for source in source_order:
|
||||
source_prop = prop.replace("ro.product", "ro.product.{}".format(source),
|
||||
1)
|
||||
prop_val = self.info_dict.get("{}.build.prop".format(source), {}).get(
|
||||
source_prop)
|
||||
source_prop = prop.replace(
|
||||
"ro.product", "ro.product.{}".format(source), 1)
|
||||
prop_val = self.info_dict.get(
|
||||
"{}.build.prop".format(source), {}).get(source_prop)
|
||||
if prop_val:
|
||||
return prop_val
|
||||
|
||||
|
@ -412,14 +412,14 @@ class BuildInfo(object):
|
|||
return self.GetBuildProp("ro.build.fingerprint")
|
||||
except common.ExternalError:
|
||||
return "{}/{}/{}:{}/{}/{}:{}/{}".format(
|
||||
self.GetBuildProp("ro.product.brand"),
|
||||
self.GetBuildProp("ro.product.name"),
|
||||
self.GetBuildProp("ro.product.device"),
|
||||
self.GetBuildProp("ro.build.version.release"),
|
||||
self.GetBuildProp("ro.build.id"),
|
||||
self.GetBuildProp("ro.build.version.incremental"),
|
||||
self.GetBuildProp("ro.build.type"),
|
||||
self.GetBuildProp("ro.build.tags"))
|
||||
self.GetBuildProp("ro.product.brand"),
|
||||
self.GetBuildProp("ro.product.name"),
|
||||
self.GetBuildProp("ro.product.device"),
|
||||
self.GetBuildProp("ro.build.version.release"),
|
||||
self.GetBuildProp("ro.build.id"),
|
||||
self.GetBuildProp("ro.build.version.incremental"),
|
||||
self.GetBuildProp("ro.build.type"),
|
||||
self.GetBuildProp("ro.build.tags"))
|
||||
return "%s/%s/%s:%s" % (
|
||||
self.GetOemProperty("ro.product.brand"),
|
||||
self.GetOemProperty("ro.product.name"),
|
||||
|
@ -508,7 +508,7 @@ class PayloadSigner(object):
|
|||
MODULUS_PREFIX = "Modulus="
|
||||
assert modulus_string.startswith(MODULUS_PREFIX)
|
||||
modulus_string = modulus_string[len(MODULUS_PREFIX):]
|
||||
key_size = len(modulus_string) / 2
|
||||
key_size = len(modulus_string) // 2
|
||||
assert key_size == 256 or key_size == 512, \
|
||||
"Unsupported key size {}".format(key_size)
|
||||
return key_size
|
||||
|
@ -1051,7 +1051,7 @@ def WriteMetadata(metadata, output):
|
|||
output: A ZipFile object or a string of the output file path.
|
||||
"""
|
||||
|
||||
value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.iteritems())])
|
||||
value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
|
||||
if isinstance(output, zipfile.ZipFile):
|
||||
common.ZipWriteStr(output, METADATA_NAME, value,
|
||||
compress_type=zipfile.ZIP_STORED)
|
||||
|
@ -1067,7 +1067,7 @@ def HandleDowngradeMetadata(metadata, target_info, source_info):
|
|||
|
||||
post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
|
||||
pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
|
||||
is_downgrade = long(post_timestamp) < long(pre_timestamp)
|
||||
is_downgrade = int(post_timestamp) < int(pre_timestamp)
|
||||
|
||||
if OPTIONS.downgrade:
|
||||
if not is_downgrade:
|
||||
|
@ -1392,7 +1392,7 @@ class AbOtaPropertyFiles(StreamingPropertyFiles):
|
|||
payload_offset += len(payload_info.extra) + len(payload_info.filename)
|
||||
payload_size = payload_info.file_size
|
||||
|
||||
with input_zip.open('payload.bin', 'r') as payload_fp:
|
||||
with input_zip.open('payload.bin') as payload_fp:
|
||||
header_bin = payload_fp.read(24)
|
||||
|
||||
# network byte order (big-endian)
|
||||
|
@ -1864,7 +1864,6 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
|
|||
|
||||
with zipfile.ZipFile(input_file, 'r') as input_zip:
|
||||
infolist = input_zip.infolist()
|
||||
namelist = input_zip.namelist()
|
||||
|
||||
input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
|
||||
for info in infolist:
|
||||
|
@ -1963,8 +1962,8 @@ def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
|
|||
for partition in ab_partitions:
|
||||
if (partition in dynamic_partition_list and
|
||||
partition not in super_block_devices):
|
||||
logger.info("Dropping %s from ab_partitions.txt", partition)
|
||||
continue
|
||||
logger.info("Dropping %s from ab_partitions.txt", partition)
|
||||
continue
|
||||
f.write(partition + "\n")
|
||||
to_delete = [AB_PARTITIONS]
|
||||
|
||||
|
@ -1976,7 +1975,7 @@ def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
|
|||
to_delete += [DYNAMIC_PARTITION_INFO]
|
||||
|
||||
# Remove the existing partition images as well as the map files.
|
||||
to_delete += replace.values()
|
||||
to_delete += list(replace.values())
|
||||
to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
|
||||
|
||||
common.ZipDelete(target_file, to_delete)
|
||||
|
@ -1986,7 +1985,7 @@ def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
|
|||
# Write super_{foo}.img as {foo}.img.
|
||||
for src, dst in replace.items():
|
||||
assert src in namelist, \
|
||||
'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
|
||||
'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
|
||||
unzipped_file = os.path.join(input_tmp, *src.split('/'))
|
||||
common.ZipWrite(target_zip, unzipped_file, arcname=dst)
|
||||
|
||||
|
@ -2291,7 +2290,8 @@ def main(argv):
|
|||
OPTIONS.cache_size = cache_size
|
||||
|
||||
if OPTIONS.extra_script is not None:
|
||||
OPTIONS.extra_script = open(OPTIONS.extra_script).read()
|
||||
with open(OPTIONS.extra_script) as fp:
|
||||
OPTIONS.extra_script = fp.read()
|
||||
|
||||
if OPTIONS.extracted_input is not None:
|
||||
OPTIONS.input_tmp = OPTIONS.extracted_input
|
||||
|
|
Loading…
Reference in New Issue