extra-files: Allow configuring used checksums
Instead of adding a new config option, we can just reuse the existing `media_checksums` value. If the value is good for image checksums, it should work for extra files as well. Relates: #591 Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
parent
532b6b1fbc
commit
c293a1e147
@ -764,7 +764,8 @@ Example
|
|||||||
Extra Files Metadata
|
Extra Files Metadata
|
||||||
--------------------
|
--------------------
|
||||||
If extra files are specified a metadata file, ``extra_files.json``, is placed
|
If extra files are specified a metadata file, ``extra_files.json``, is placed
|
||||||
in the os/ directory and media. This metadata file is in the format:
|
in the ``os/`` directory and media. The checksums generated are determined by
|
||||||
|
``media_checksums`` option. This metadata file is in the format:
|
||||||
|
|
||||||
::
|
::
|
||||||
|
|
||||||
|
@ -47,7 +47,8 @@ class ExtraFilesPhase(ConfigGuardedPhase):
|
|||||||
% (arch, variant.uid))
|
% (arch, variant.uid))
|
||||||
|
|
||||||
|
|
||||||
def copy_extra_files(compose, cfg, arch, variant, package_sets, checksum_type='sha256'):
|
def copy_extra_files(compose, cfg, arch, variant, package_sets, checksum_type=None):
|
||||||
|
checksum_type = checksum_type or compose.conf['media_checksums']
|
||||||
var_dict = {
|
var_dict = {
|
||||||
"arch": arch,
|
"arch": arch,
|
||||||
"variant_id": variant.id,
|
"variant_id": variant.id,
|
||||||
|
@ -200,6 +200,53 @@ class TestWriteExtraFiles(helpers.PungiTestCase):
|
|||||||
self.assertEqual(expected_metadata['header'], actual_metadata['header'])
|
self.assertEqual(expected_metadata['header'], actual_metadata['header'])
|
||||||
self.assertEqual(expected_metadata['data'], actual_metadata['data'])
|
self.assertEqual(expected_metadata['data'], actual_metadata['data'])
|
||||||
|
|
||||||
|
def test_write_extra_files_multiple_checksums(self):
|
||||||
|
"""Assert metadata is written to the proper location with valid data"""
|
||||||
|
self.maxDiff = None
|
||||||
|
mock_logger = mock.Mock()
|
||||||
|
files = ['file1', 'file2', 'subdir/file3']
|
||||||
|
expected_metadata = {
|
||||||
|
u'header': {u'version': u'1.0'},
|
||||||
|
u'data': [
|
||||||
|
{
|
||||||
|
u'file': u'file1',
|
||||||
|
u'checksums': {
|
||||||
|
u'md5': u'5149d403009a139c7e085405ef762e1a',
|
||||||
|
u'sha256': u'ecdc5536f73bdae8816f0ea40726ef5e9b810d914493075903bb90623d97b1d8'
|
||||||
|
},
|
||||||
|
u'size': 6,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'file': u'file2',
|
||||||
|
u'checksums': {
|
||||||
|
u'md5': u'3d709e89c8ce201e3c928eb917989aef',
|
||||||
|
u'sha256': u'67ee5478eaadb034ba59944eb977797b49ca6aa8d3574587f36ebcbeeb65f70e'
|
||||||
|
},
|
||||||
|
u'size': 6,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'file': u'subdir/file3',
|
||||||
|
u'checksums': {
|
||||||
|
u'md5': u'1ed02b5cf7fd8626f854e9ef3fee8694',
|
||||||
|
u'sha256': u'52f9f0e467e33da811330cad085fdb4eaa7abcb9ebfe6001e0f5910da678be51'
|
||||||
|
},
|
||||||
|
u'size': 13,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
tree_dir = os.path.join(self.topdir, 'compose', 'Server', 'x86_64', 'os')
|
||||||
|
for f in files:
|
||||||
|
helpers.touch(os.path.join(tree_dir, f), f + '\n')
|
||||||
|
|
||||||
|
metadata_file = metadata.write_extra_files(tree_dir, files,
|
||||||
|
checksum_type=['md5', 'sha256'],
|
||||||
|
logger=mock_logger)
|
||||||
|
with open(metadata_file) as metadata_fd:
|
||||||
|
actual_metadata = json.load(metadata_fd)
|
||||||
|
|
||||||
|
self.assertEqual(expected_metadata['header'], actual_metadata['header'])
|
||||||
|
self.assertEqual(expected_metadata['data'], actual_metadata['data'])
|
||||||
|
|
||||||
def test_write_extra_files_missing_file(self):
|
def test_write_extra_files_missing_file(self):
|
||||||
"""Assert metadata is written to the proper location with valid data"""
|
"""Assert metadata is written to the proper location with valid data"""
|
||||||
mock_logger = mock.Mock()
|
mock_logger = mock.Mock()
|
||||||
|
Loading…
Reference in New Issue
Block a user