Merge "Define the protobuf for OTA metadata"

This commit is contained in:
Tianjie Xu 2020-08-27 18:21:29 +00:00 committed by Gerrit Code Review
commit 0068973f73
8 changed files with 369 additions and 98 deletions

View File

@ -89,16 +89,35 @@ python_defaults {
],
}
python_library_host {
name: "ota_metadata_proto",
version: {
py2: {
enabled: true,
},
py3: {
enabled: true,
},
},
srcs: [
"ota_metadata.proto",
],
proto: {
canonical_path_from_root: false,
},
}
python_defaults {
name: "releasetools_ota_from_target_files_defaults",
srcs: [
"edify_generator.py",
"ota_from_target_files.py",
"non_ab_ota.py",
"target_files_diff.py",
"ota_from_target_files.py",
"ota_utils.py",
"target_files_diff.py",
],
libs: [
"ota_metadata_proto",
"releasetools_check_target_files_vintf",
"releasetools_common",
"releasetools_verity_utils",

View File

@ -276,7 +276,7 @@ endif;
script.SetProgress(1)
script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
metadata["ota-required-cache"] = str(script.required_cache)
metadata.required_cache = script.required_cache
# We haven't written the metadata entry, which will be done in
# FinalizeMetadata.
@ -530,7 +530,7 @@ endif;
script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
else:
script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
metadata["ota-required-cache"] = str(script.required_cache)
metadata.required_cache = script.required_cache
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().

View File

@ -848,7 +848,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
if OPTIONS.downgrade:
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
else:
max_timestamp = metadata["post-timestamp"]
max_timestamp = str(metadata.postcondition.timestamp)
additional_args = ["--max_timestamp", max_timestamp]
payload.Generate(target_file, source_file, additional_args)

View File

@ -0,0 +1,88 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
syntax = "proto3";
package build.tools.releasetools;
option optimize_for = LITE_RUNTIME;
// The build information of a particular partition on the device.
message PartitionState {
string partition_name = 1;
repeated string device = 2;
repeated string build = 3;
// The version string of the partition. It's usually timestamp if present.
// One known exception is the boot image, who uses the kmi version, e.g.
// 5.4.42-android12-0
string version = 4;
// TODO(xunchang), revisit other necessary fields, e.g. security_patch_level.
}
// The build information on the device. The bytes of the running images are thus
// inferred from the device state. For more information of the meaning of each
// subfield, check
// https://source.android.com/compatibility/android-cdd#3_2_2_build_parameters
message DeviceState {
// device name. i.e. ro.product.device; if the field has multiple values, it
// means the ota package supports multiple devices. This usually happens when
// we use the same image to support multiple skus.
repeated string device = 1;
// device fingerprint. Up to R build, the value reads from
// ro.build.fingerprint.
repeated string build = 2;
// A value that specify a version of the android build.
string build_incremental = 3;
// The timestamp when the build is generated.
int64 timestamp = 4;
// The version of the currently-executing Android system.
string sdk_level = 5;
// A value indicating the security patch level of a build.
string security_patch_level = 6;
// The detailed state of each partition. For partial updates or devices with
// mixed build of partitions, some of the above fields may left empty. And the
// client will rely on the information of specific partitions to target the
// update.
repeated PartitionState partition_state = 7;
}
// The metadata of an OTA package. It contains the information of the package
// and prerequisite to install the update correctly.
message OtaMetadata {
enum OtaType {
AB = 0;
BLOCK = 1;
};
OtaType type = 1;
// True if we need to wipe after the update.
bool wipe = 2;
// True if the timestamp of the post build is older than the pre build.
bool downgrade = 3;
// A map of name:content of property files, e.g. ota-property-files.
map<string, string> property_files = 4;
// The required device state in order to install the package.
DeviceState precondition = 5;
// The expected device state after the update.
DeviceState postcondition = 6;
// True if the ota that updates a device to support dynamic partitions, where
// the source build doesn't support it.
bool retrofit_dynamic_partitions = 7;
// The required size of the cache partition, only valid for non-A/B update.
int64 required_cache = 8;
}

View File

@ -17,6 +17,7 @@ import itertools
import os
import zipfile
import ota_metadata_pb2
from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps)
@ -34,6 +35,7 @@ OPTIONS.output_metadata_path = None
OPTIONS.boot_variable_file = None
METADATA_NAME = 'META-INF/com/android/metadata'
METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
@ -62,11 +64,12 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
# Write the current metadata entry with placeholders.
with zipfile.ZipFile(input_file) as input_zip:
for property_files in needed_property_files:
metadata[property_files.name] = property_files.Compute(input_zip)
metadata.property_files[property_files.name] = property_files.Compute(
input_zip)
namelist = input_zip.namelist()
if METADATA_NAME in namelist:
ZipDelete(input_file, METADATA_NAME)
if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
output_zip = zipfile.ZipFile(input_file, 'a')
WriteMetadata(metadata, output_zip)
ZipClose(output_zip)
@ -81,8 +84,9 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
for property_files in needed_property_files:
metadata[property_files.name] = property_files.Finalize(
prelim_signing_zip, len(metadata[property_files.name]))
metadata.property_files[property_files.name] = property_files.Finalize(
prelim_signing_zip,
len(metadata.property_files[property_files.name]))
# SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
# entries, as well as padding the entry headers. We do a preliminary signing
@ -103,7 +107,7 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
# Replace the METADATA entry.
ZipDelete(prelim_signing, METADATA_NAME)
ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
output_zip = zipfile.ZipFile(prelim_signing, 'a')
WriteMetadata(metadata, output_zip)
ZipClose(output_zip)
@ -117,7 +121,8 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
# Reopen the final signed zip to double check the streaming metadata.
with zipfile.ZipFile(output_file) as output_zip:
for property_files in needed_property_files:
property_files.Verify(output_zip, metadata[property_files.name].strip())
property_files.Verify(
output_zip, metadata.property_files[property_files.name].strip())
# If requested, dump the metadata to a separate file.
output_metadata_path = OPTIONS.output_metadata_path
@ -125,30 +130,60 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
WriteMetadata(metadata, output_metadata_path)
def WriteMetadata(metadata, output):
def WriteMetadata(metadata_proto, output):
"""Writes the metadata to the zip archive or a file.
Args:
metadata: The metadata dict for the package.
output: A ZipFile object or a string of the output file path.
metadata_proto: The metadata protobuf for the package.
output: A ZipFile object or a string of the output file path. If a string
path is given, the metadata in the protobuf format will be written to
{output}.pb, e.g. ota_metadata.pb
"""
value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
legacy_metadata = "".join(["%s=%s\n" % kv for kv in
sorted(metadata_dict.items())])
if isinstance(output, zipfile.ZipFile):
ZipWriteStr(output, METADATA_NAME, value,
ZipWriteStr(output, METADATA_PROTO_NAME, metadata_proto.SerializeToString(),
compress_type=zipfile.ZIP_STORED)
ZipWriteStr(output, METADATA_NAME, legacy_metadata,
compress_type=zipfile.ZIP_STORED)
return
with open('{}.pb'.format(output), 'w') as f:
f.write(metadata_proto.SerializeToString())
with open(output, 'w') as f:
f.write(value)
f.write(legacy_metadata)
def UpdateDeviceState(device_state, build_info, boot_variable_values,
is_post_build):
"""Update the fields of the DeviceState proto with build info."""
build_devices, build_fingerprints = \
CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values)
device_state.device.extend(sorted(build_devices))
device_state.build.extend(sorted(build_fingerprints))
device_state.build_incremental = build_info.GetBuildProp(
'ro.build.version.incremental')
# TODO(xunchang) update the partition state
if is_post_build:
device_state.sdk_level = build_info.GetBuildProp(
'ro.build.version.sdk')
device_state.security_patch_level = build_info.GetBuildProp(
'ro.build.version.security_patch')
# Use the actual post-timestamp, even for a downgrade case.
device_state.timestamp = int(build_info.GetBuildProp('ro.build.date.utc'))
def GetPackageMetadata(target_info, source_info=None):
"""Generates and returns the metadata dict.
"""Generates and returns the metadata proto.
It generates a dict() that contains the info to be written into an OTA
package (META-INF/com/android/metadata). It also handles the detection of
downgrade / data wipe based on the global options.
It generates a ota_metadata protobuf that contains the info to be written
into an OTA package (META-INF/com/android/metadata.pb). It also handles the
detection of downgrade / data wipe based on the global options.
Args:
target_info: The BuildInfo instance that holds the target build info.
@ -156,66 +191,96 @@ def GetPackageMetadata(target_info, source_info=None):
None if generating full OTA.
Returns:
A dict to be written into package metadata entry.
A protobuf to be written into package metadata entry.
"""
assert isinstance(target_info, BuildInfo)
assert source_info is None or isinstance(source_info, BuildInfo)
separator = '|'
boot_variable_values = {}
if OPTIONS.boot_variable_file:
d = LoadDictionaryFromFile(OPTIONS.boot_variable_file)
for key, values in d.items():
boot_variable_values[key] = [val.strip() for val in values.split(',')]
post_build_devices, post_build_fingerprints = \
CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values)
metadata = {
'post-build': separator.join(sorted(post_build_fingerprints)),
'post-build-incremental': target_info.GetBuildProp(
'ro.build.version.incremental'),
'post-sdk-level': target_info.GetBuildProp(
'ro.build.version.sdk'),
'post-security-patch-level': target_info.GetBuildProp(
'ro.build.version.security_patch'),
}
metadata_proto = ota_metadata_pb2.OtaMetadata()
# TODO(xunchang) some fields, e.g. post-device isn't necessary. We can
# consider skipping them if they aren't used by clients.
UpdateDeviceState(metadata_proto.postcondition, target_info,
boot_variable_values, True)
if target_info.is_ab and not OPTIONS.force_non_ab:
metadata['ota-type'] = 'AB'
metadata['ota-required-cache'] = '0'
metadata_proto.type = ota_metadata_pb2.OtaMetadata.AB
metadata_proto.required_cache = 0
else:
metadata['ota-type'] = 'BLOCK'
metadata_proto.type = ota_metadata_pb2.OtaMetadata.BLOCK
# cache requirement will be updated by the non-A/B codes.
if OPTIONS.wipe_user_data:
metadata['ota-wipe'] = 'yes'
metadata_proto.wipe = True
if OPTIONS.retrofit_dynamic_partitions:
metadata['ota-retrofit-dynamic-partitions'] = 'yes'
metadata_proto.retrofit_dynamic_partitions = True
is_incremental = source_info is not None
if is_incremental:
pre_build_devices, pre_build_fingerprints = \
CalculateRuntimeDevicesAndFingerprints(source_info,
boot_variable_values)
metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints))
metadata['pre-build-incremental'] = source_info.GetBuildProp(
'ro.build.version.incremental')
metadata['pre-device'] = separator.join(sorted(pre_build_devices))
UpdateDeviceState(metadata_proto.precondition, source_info,
boot_variable_values, False)
else:
metadata['pre-device'] = separator.join(sorted(post_build_devices))
# Use the actual post-timestamp, even for a downgrade case.
metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
metadata_proto.precondition.device.extend(
metadata_proto.postcondition.device)
# Detect downgrades and set up downgrade flags accordingly.
if is_incremental:
HandleDowngradeMetadata(metadata, target_info, source_info)
HandleDowngradeMetadata(metadata_proto, target_info, source_info)
return metadata
return metadata_proto
def HandleDowngradeMetadata(metadata, target_info, source_info):
def BuildLegacyOtaMetadata(metadata_proto):
"""Converts the metadata proto to a legacy metadata dict.
This metadata dict is used to build the legacy metadata text file for
backward compatibility. We won't add new keys to the legacy metadata format.
If new information is needed, we should add it as a new field in OtaMetadata
proto definition.
"""
separator = '|'
metadata_dict = {}
if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
metadata_dict['ota-type'] = 'AB'
elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
metadata_dict['ota-type'] = 'BLOCK'
if metadata_proto.wipe:
metadata_dict['ota-wipe'] = 'yes'
if metadata_proto.retrofit_dynamic_partitions:
metadata_dict['ota-retrofit-dynamic-partitions'] = 'yes'
if metadata_proto.downgrade:
metadata_dict['ota-downgrade'] = 'yes'
metadata_dict['ota-required-cache'] = str(metadata_proto.required_cache)
post_build = metadata_proto.postcondition
metadata_dict['post-build'] = separator.join(post_build.build)
metadata_dict['post-build-incremental'] = post_build.build_incremental
metadata_dict['post-sdk-level'] = post_build.sdk_level
metadata_dict['post-security-patch-level'] = post_build.security_patch_level
metadata_dict['post-timestamp'] = str(post_build.timestamp)
pre_build = metadata_proto.precondition
metadata_dict['pre-device'] = separator.join(pre_build.device)
# incremental updates
if len(pre_build.build) != 0:
metadata_dict['pre-build'] = separator.join(pre_build.build)
metadata_dict['pre-build-incremental'] = pre_build.build_incremental
metadata_dict.update(metadata_proto.property_files)
return metadata_dict
def HandleDowngradeMetadata(metadata_proto, target_info, source_info):
# Only incremental OTAs are allowed to reach here.
assert OPTIONS.incremental_source is not None
@ -228,7 +293,7 @@ def HandleDowngradeMetadata(metadata, target_info, source_info):
raise RuntimeError(
"--downgrade or --override_timestamp specified but no downgrade "
"detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
metadata["ota-downgrade"] = "yes"
metadata_proto.downgrade = True
else:
if is_downgrade:
raise RuntimeError(
@ -415,8 +480,10 @@ class PropertyFiles(object):
# reserved space serves the metadata entry only.
if reserve_space:
tokens.append('metadata:' + ' ' * 15)
tokens.append('metadata.pb:' + ' ' * 15)
else:
tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
return ','.join(tokens)

View File

@ -42,12 +42,13 @@ class NonAbOtaPropertyFilesTest(PropertyFilesTestCase):
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
self.assertEqual(1, len(tokens))
self.assertEqual(2, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Finalize(self):
entries = [
'META-INF/com/android/metadata',
'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()
@ -57,14 +58,16 @@ class NonAbOtaPropertyFilesTest(PropertyFilesTestCase):
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
self.assertEqual(1, len(tokens))
self.assertEqual(2, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata'.
entries[0] = 'metadata'
entries[1] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
def test_Verify(self):
entries = (
'META-INF/com/android/metadata',
'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()

View File

@ -20,17 +20,20 @@ import os.path
import zipfile
import common
import ota_metadata_pb2
import test_utils
from ota_utils import CalculateRuntimeDevicesAndFingerprints
from ota_utils import (
BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
FinalizeMetadata, GetPackageMetadata, PropertyFiles)
from ota_from_target_files import (
_LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata,
GetPackageMetadata, GetTargetFilesZipForSecondaryImages,
_LoadOemDicts, AbOtaPropertyFiles,
GetTargetFilesZipForSecondaryImages,
GetTargetFilesZipWithoutPostinstallConfig,
Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
Payload, PayloadSigner, POSTINSTALL_CONFIG,
StreamingPropertyFiles)
from non_ab_ota import NonAbOtaPropertyFiles
from test_utils import PropertyFilesTestCase
def construct_target_files(secondary=False):
"""Returns a target-files.zip file for generating OTA packages."""
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@ -150,7 +153,6 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
}
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
self.assertTrue(os.path.exists(self.testdata_dir))
@ -169,11 +171,16 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
common.OPTIONS.search_path = test_utils.get_search_path()
@staticmethod
def GetLegacyOtaMetadata(target_info, source_info=None):
metadata_proto = GetPackageMetadata(target_info, source_info)
return BuildLegacyOtaMetadata(metadata_proto)
def test_GetPackageMetadata_abOta_full(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
target_info = common.BuildInfo(target_info_dict, None)
metadata = GetPackageMetadata(target_info)
metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
'ota-type' : 'AB',
@ -193,7 +200,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
target_info = common.BuildInfo(target_info_dict, None)
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
metadata = GetPackageMetadata(target_info, source_info)
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
self.assertDictEqual(
{
'ota-type' : 'AB',
@ -211,10 +218,11 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
def test_GetPackageMetadata_nonAbOta_full(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
metadata = GetPackageMetadata(target_info)
metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
'ota-type' : 'BLOCK',
'ota-required-cache' : '0',
'post-build' : 'build-fingerprint-target',
'post-build-incremental' : 'build-version-incremental-target',
'post-sdk-level' : '27',
@ -228,10 +236,11 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
metadata = GetPackageMetadata(target_info, source_info)
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
self.assertDictEqual(
{
'ota-type' : 'BLOCK',
'ota-required-cache' : '0',
'post-build' : 'build-fingerprint-target',
'post-build-incremental' : 'build-version-incremental-target',
'post-sdk-level' : '27',
@ -246,10 +255,11 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
def test_GetPackageMetadata_wipe(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.wipe_user_data = True
metadata = GetPackageMetadata(target_info)
metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
'ota-type' : 'BLOCK',
'ota-required-cache' : '0',
'ota-wipe' : 'yes',
'post-build' : 'build-fingerprint-target',
'post-build-incremental' : 'build-version-incremental-target',
@ -263,11 +273,12 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.retrofit_dynamic_partitions = True
metadata = GetPackageMetadata(target_info)
metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
'ota-retrofit-dynamic-partitions' : 'yes',
'ota-type' : 'BLOCK',
'ota-required-cache' : '0',
'post-build' : 'build-fingerprint-target',
'post-build-incremental' : 'build-version-incremental-target',
'post-sdk-level' : '27',
@ -293,7 +304,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
target_info = common.BuildInfo(target_info_dict, None)
source_info = common.BuildInfo(source_info_dict, None)
common.OPTIONS.incremental_source = ''
self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
self.assertRaises(RuntimeError, self.GetLegacyOtaMetadata, target_info,
source_info)
def test_GetPackageMetadata_downgrade(self):
@ -307,11 +318,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
common.OPTIONS.incremental_source = ''
common.OPTIONS.downgrade = True
common.OPTIONS.wipe_user_data = True
metadata = GetPackageMetadata(target_info, source_info)
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
self.assertDictEqual(
{
'ota-downgrade' : 'yes',
'ota-type' : 'BLOCK',
'ota-required-cache' : '0',
'ota-wipe' : 'yes',
'post-build' : 'build-fingerprint-target',
'post-build-incremental' : 'build-version-incremental-target',
@ -464,13 +477,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
'A' * 1024 * 1024 * 1024,
zipfile.ZIP_STORED)
metadata = {}
metadata = ota_metadata_pb2.OtaMetadata()
output_file = common.MakeTempFile(suffix='.zip')
needed_property_files = (
TestPropertyFiles(),
)
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
self.assertIn('ota-test-property-files', metadata)
self.assertIn('ota-test-property-files', metadata.property_files)
@test_utils.SkipIfExternalToolsUnavailable()
def test_FinalizeMetadata(self):
@ -508,13 +521,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
'A' * 1024 * 1024,
zipfile.ZIP_STORED)
metadata = {}
metadata = ota_metadata_pb2.OtaMetadata()
needed_property_files = (
TestPropertyFiles(),
)
output_file = common.MakeTempFile(suffix='.zip')
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
self.assertIn('ota-test-property-files', metadata)
self.assertIn('ota-test-property-files', metadata.property_files)
class TestPropertyFiles(PropertyFiles):
@ -532,8 +545,8 @@ class TestPropertyFiles(PropertyFiles):
'optional-entry2',
)
class PropertyFilesTest(PropertyFilesTestCase):
class PropertyFilesTest(PropertyFilesTestCase):
@test_utils.SkipIfExternalToolsUnavailable()
def test_Compute(self):
@ -547,7 +560,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
self.assertEqual(3, len(tokens))
self.assertEqual(4, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Compute_withOptionalEntries(self):
@ -563,7 +576,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
self.assertEqual(5, len(tokens))
self.assertEqual(6, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Compute_missingRequiredEntry(self):
@ -581,6 +594,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
'required-entry1',
'required-entry2',
'META-INF/com/android/metadata',
'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
@ -590,10 +604,11 @@ class PropertyFilesTest(PropertyFilesTestCase):
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
self.assertEqual(3, len(tokens))
self.assertEqual(4, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
# streaming metadata.
entries[2] = 'metadata'
entries[3] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
@test_utils.SkipIfExternalToolsUnavailable()
@ -604,6 +619,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
'optional-entry1',
'optional-entry2',
'META-INF/com/android/metadata',
'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
@ -638,6 +654,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
'optional-entry1',
'optional-entry2',
'META-INF/com/android/metadata',
'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
@ -687,7 +704,7 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
self.assertEqual(5, len(tokens))
self.assertEqual(6, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Finalize(self):
@ -697,6 +714,7 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
'care_map.txt',
'compatibility.zip',
'META-INF/com/android/metadata',
'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
@ -706,10 +724,11 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
self.assertEqual(5, len(tokens))
self.assertEqual(6, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
# streaming metadata.
entries[4] = 'metadata'
entries[5] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
def test_Verify(self):
@ -719,6 +738,7 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
'care_map.txt',
'compatibility.zip',
'META-INF/com/android/metadata',
'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
@ -855,6 +875,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTestCase):
# Put META-INF/com/android/metadata if needed.
if with_metadata:
entries.append('META-INF/com/android/metadata')
entries.append('META-INF/com/android/metadata.pb')
for entry in entries:
zip_fp.writestr(
@ -870,9 +891,9 @@ class AbOtaPropertyFilesTest(PropertyFilesTestCase):
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
# "6" indcludes the four entries above, one metadata entry, and one entry
# "7" indcludes the four entries above, two metadata entries, and one entry
# for payload-metadata.bin.
self.assertEqual(6, len(tokens))
self.assertEqual(7, len(tokens))
self._verify_entries(
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
@ -886,9 +907,9 @@ class AbOtaPropertyFilesTest(PropertyFilesTestCase):
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
# "6" indcludes the four entries above, one metadata entry, and one entry
# "7" includes the four entries above, two metadata entries, and one entry
# for payload-metadata.bin.
self.assertEqual(6, len(tokens))
self.assertEqual(7, len(tokens))
self._verify_entries(
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
@ -1187,10 +1208,29 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
'ro.build.tags=build-tags',
'ro.build.version.sdk=30',
'ro.build.version.security_patch=2020',
'ro.build.date.utc=12345678'
'ro.build.date.utc=12345678',
'ro.system.build.version.release=version-release',
'ro.system.build.id=build-id',
'ro.system.build.version.incremental=version-incremental',
'ro.system.build.type=build-type',
'ro.system.build.tags=build-tags',
'ro.system.build.version.sdk=30',
'ro.system.build.version.security_patch=2020',
'ro.system.build.date.utc=12345678',
'ro.product.system.brand=generic',
'ro.product.system.name=generic',
'ro.product.system.device=generic',
]
VENDOR_BUILD_PROP = [
'ro.vendor.build.version.release=version-release',
'ro.vendor.build.id=build-id',
'ro.vendor.build.version.incremental=version-incremental',
'ro.vendor.build.type=build-type',
'ro.vendor.build.tags=build-tags',
'ro.vendor.build.version.sdk=30',
'ro.vendor.build.version.security_patch=2020',
'ro.vendor.build.date.utc=12345678',
'ro.product.vendor.brand=vendor-product-brand',
'ro.product.vendor.name=vendor-product-name',
'ro.product.vendor.device=vendor-product-device'
@ -1326,8 +1366,8 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
f.write('ro.boot.sku_name=std,pro')
build_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
metadata = GetPackageMetadata(build_info)
self.assertEqual('vendor-product-device', metadata['pre-device'])
metadata_dict = BuildLegacyOtaMetadata(GetPackageMetadata(build_info))
self.assertEqual('vendor-product-device', metadata_dict['pre-device'])
fingerprints = [
self.constructFingerprint(
'vendor-product-brand/vendor-product-name/vendor-product-device'),
@ -1336,7 +1376,33 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
self.constructFingerprint(
'vendor-product-brand/vendor-product-std/vendor-product-device'),
]
self.assertEqual('|'.join(fingerprints), metadata['post-build'])
self.assertEqual('|'.join(fingerprints), metadata_dict['post-build'])
def CheckMetadataEqual(self, metadata_dict, metadata_proto):
post_build = metadata_proto.postcondition
self.assertEqual('|'.join(post_build.build),
metadata_dict['post-build'])
self.assertEqual(post_build.build_incremental,
metadata_dict['post-build-incremental'])
self.assertEqual(post_build.sdk_level,
metadata_dict['post-sdk-level'])
self.assertEqual(post_build.security_patch_level,
metadata_dict['post-security-patch-level'])
if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
ota_type = 'AB'
elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
ota_type = 'BLOCK'
else:
ota_type = ''
self.assertEqual(ota_type, metadata_dict['ota-type'])
self.assertEqual(metadata_proto.wipe,
metadata_dict.get('ota-wipe') == 'yes')
self.assertEqual(metadata_proto.required_cache,
int(metadata_dict.get('ota-required-cache', 0)))
self.assertEqual(metadata_proto.retrofit_dynamic_partitions,
metadata_dict.get(
'ota-retrofit-dynamic-partitions') == 'yes')
def test_GetPackageMetadata_incremental_package(self):
vendor_build_prop = copy.deepcopy(self.VENDOR_BUILD_PROP)
@ -1365,7 +1431,18 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
'ro.build.tags=build-tags',
'ro.build.version.sdk=29',
'ro.build.version.security_patch=2020',
'ro.build.date.utc=12340000'
'ro.build.date.utc=12340000',
'ro.system.build.version.release=source-version-release',
'ro.system.build.id=source-build-id',
'ro.system.build.version.incremental=source-version-incremental',
'ro.system.build.type=build-type',
'ro.system.build.tags=build-tags',
'ro.system.build.version.sdk=29',
'ro.system.build.version.security_patch=2020',
'ro.system.build.date.utc=12340000',
'ro.product.system.brand=generic',
'ro.product.system.name=generic',
'ro.product.system.device=generic',
]
self.writeFiles({
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
@ -1381,10 +1458,11 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
target_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
source_info = common.BuildInfo(common.LoadInfoDict(source_dir))
metadata = GetPackageMetadata(target_info, source_info)
metadata_proto = GetPackageMetadata(target_info, source_info)
metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
self.assertEqual(
'vendor-device-pro|vendor-device-std|vendor-product-device',
metadata['pre-device'])
metadata_dict['pre-device'])
suffix = ':source-version-release/source-build-id/' \
'source-version-incremental:build-type/build-tags'
pre_fingerprints = [
@ -1395,7 +1473,7 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
'vendor-product-brand/vendor-product-name/vendor-product-device'
'{}'.format(suffix),
]
self.assertEqual('|'.join(pre_fingerprints), metadata['pre-build'])
self.assertEqual('|'.join(pre_fingerprints), metadata_dict['pre-build'])
post_fingerprints = [
self.constructFingerprint(
@ -1405,4 +1483,6 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
self.constructFingerprint(
'vendor-product-brand/vendor-product-name/vendor-product-device'),
]
self.assertEqual('|'.join(post_fingerprints), metadata['post-build'])
self.assertEqual('|'.join(post_fingerprints), metadata_dict['post-build'])
self.CheckMetadataEqual(metadata_dict, metadata_proto)

View File

@ -22,6 +22,7 @@ Utils for running unittests.
import logging
import os
import os.path
import re
import struct
import sys
import unittest
@ -224,13 +225,26 @@ class PropertyFilesTestCase(ReleaseToolsTestCase):
input_fp.seek(offset)
if entry == 'metadata':
expected = b'META-INF/COM/ANDROID/METADATA'
elif entry == 'metadata.pb':
expected = b'META-INF/COM/ANDROID/METADATA-PB'
else:
expected = entry.replace('.', '-').upper().encode()
self.assertEqual(expected, input_fp.read(size))
if __name__ == '__main__':
testsuite = unittest.TestLoader().discover(
os.path.dirname(os.path.realpath(__file__)))
# We only want to run tests from the top level directory. Unfortunately the
# pattern option of unittest.discover, internally using fnmatch, doesn't
# provide a good API to filter the test files based on directory. So we do an
# os walk and load them manually.
test_modules = []
base_path = os.path.dirname(os.path.realpath(__file__))
for dirpath, _, files in os.walk(base_path):
for fn in files:
if dirpath == base_path and re.match('test_.*\\.py$', fn):
test_modules.append(fn[:-3])
test_suite = unittest.TestLoader().loadTestsFromNames(test_modules)
# atest needs a verbosity level of >= 2 to correctly parse the result.
unittest.TextTestRunner(verbosity=2).run(testsuite)
unittest.TextTestRunner(verbosity=2).run(test_suite)