forked from openkylin/platform_build
Merge "Check the completeness of original blocks during target files validation" am: ef7737f921
am: 1f80b12aa0
Change-Id: Ieaa5d86aa24b220df42ac639408bcc659e2359fd
This commit is contained in:
commit
9312c2acc0
|
@ -19,11 +19,13 @@
|
|||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import zipfile
|
||||
|
||||
import common
|
||||
import test_utils
|
||||
import verity_utils
|
||||
from validate_target_files import ValidateVerifiedBootImages
|
||||
from rangelib import RangeSet
|
||||
from validate_target_files import (ValidateVerifiedBootImages,
|
||||
ValidateFileConsistency)
|
||||
from verity_utils import CreateVerityImageBuilder
|
||||
|
||||
|
||||
|
@ -107,7 +109,8 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||
AssertionError, ValidateVerifiedBootImages, input_tmp, info_dict,
|
||||
options)
|
||||
|
||||
def _generate_system_image(self, output_file):
|
||||
def _generate_system_image(self, output_file, system_root=None,
|
||||
file_map=None):
|
||||
prop_dict = {
|
||||
'partition_size': str(1024 * 1024),
|
||||
'verity': 'true',
|
||||
|
@ -120,9 +123,12 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||
image_size = verity_image_builder.CalculateMaxImageSize()
|
||||
|
||||
# Use an empty root directory.
|
||||
system_root = common.MakeTempDir()
|
||||
if not system_root:
|
||||
system_root = common.MakeTempDir()
|
||||
cmd = ['mkuserimg_mke2fs', '-s', system_root, output_file, 'ext4',
|
||||
'/system', str(image_size), '-j', '0']
|
||||
if file_map:
|
||||
cmd.extend(['-B', file_map])
|
||||
proc = common.Run(cmd)
|
||||
stdoutdata, _ = proc.communicate()
|
||||
self.assertEqual(
|
||||
|
@ -155,3 +161,56 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||
'verity_key_mincrypt' : verity_key_mincrypt,
|
||||
}
|
||||
ValidateVerifiedBootImages(input_tmp, info_dict, options)
|
||||
|
||||
def test_ValidateFileConsistency_incompleteRange(self):
|
||||
input_tmp = common.MakeTempDir()
|
||||
os.mkdir(os.path.join(input_tmp, 'IMAGES'))
|
||||
system_image = os.path.join(input_tmp, 'IMAGES', 'system.img')
|
||||
system_root = os.path.join(input_tmp, "SYSTEM")
|
||||
os.mkdir(system_root)
|
||||
|
||||
# Write the test file that contain multiple blocks of zeros, and these
|
||||
# zero blocks will be omitted by kernel. And the test files will occupy one
|
||||
# block range each in the final system image.
|
||||
with open(os.path.join(system_root, 'a'), 'w') as f:
|
||||
f.write("aaa")
|
||||
f.write('\0' * 4096 * 3)
|
||||
with open(os.path.join(system_root, 'b'), 'w') as f:
|
||||
f.write("bbb")
|
||||
f.write('\0' * 4096 * 3)
|
||||
|
||||
raw_file_map = os.path.join(input_tmp, 'IMAGES', 'raw_system.map')
|
||||
self._generate_system_image(system_image, system_root, raw_file_map)
|
||||
|
||||
# Parse the generated file map and update the block ranges for each file.
|
||||
file_map_list = {}
|
||||
image_ranges = RangeSet()
|
||||
with open(raw_file_map, 'r') as f:
|
||||
for line in f.readlines():
|
||||
info = line.split()
|
||||
self.assertEqual(2, len(info))
|
||||
image_ranges = image_ranges.union(RangeSet(info[1]))
|
||||
file_map_list[info[0]] = RangeSet(info[1])
|
||||
|
||||
# Add one unoccupied block as the shared block for all test files.
|
||||
mock_shared_block = RangeSet("10-20").subtract(image_ranges).first(1)
|
||||
with open(os.path.join(input_tmp, 'IMAGES', 'system.map'), 'w') as f:
|
||||
for key in sorted(file_map_list.keys()):
|
||||
line = "{} {}\n".format(
|
||||
key, file_map_list[key].union(mock_shared_block))
|
||||
f.write(line)
|
||||
|
||||
# Prepare for the target zip file
|
||||
input_file = common.MakeTempFile()
|
||||
all_entries = ['SYSTEM/', 'SYSTEM/b', 'SYSTEM/a', 'IMAGES/',
|
||||
'IMAGES/system.map', 'IMAGES/system.img']
|
||||
with zipfile.ZipFile(input_file, 'w') as input_zip:
|
||||
for name in all_entries:
|
||||
input_zip.write(os.path.join(input_tmp, name), arcname=name)
|
||||
|
||||
input_zip = zipfile.ZipFile(input_file, 'r')
|
||||
info_dict = {'extfs_sparse_flag': '-s'}
|
||||
|
||||
# Expect the validation to pass and both files are skipped due to
|
||||
# 'incomplete' block range.
|
||||
ValidateFileConsistency(input_zip, input_tmp, info_dict)
|
||||
|
|
|
@ -84,11 +84,6 @@ def ValidateFileConsistency(input_zip, input_tmp, info_dict):
|
|||
# bytes past the file length, which is expected to be padded with '\0's.
|
||||
ranges = image.file_map[entry]
|
||||
|
||||
incomplete = ranges.extra.get('incomplete', False)
|
||||
if incomplete:
|
||||
logging.warning('Skipping %s that has incomplete block list', entry)
|
||||
continue
|
||||
|
||||
# Use the original RangeSet if applicable, which includes the shared
|
||||
# blocks. And this needs to happen before checking the monotonicity flag.
|
||||
if ranges.extra.get('uses_shared_blocks'):
|
||||
|
@ -96,6 +91,11 @@ def ValidateFileConsistency(input_zip, input_tmp, info_dict):
|
|||
else:
|
||||
file_ranges = ranges
|
||||
|
||||
incomplete = file_ranges.extra.get('incomplete', False)
|
||||
if incomplete:
|
||||
logging.warning('Skipping %s that has incomplete block list', entry)
|
||||
continue
|
||||
|
||||
# TODO(b/79951650): Handle files with non-monotonic ranges.
|
||||
if not file_ranges.monotonic:
|
||||
logging.warning(
|
||||
|
|
Loading…
Reference in New Issue