2015-12-08 13:29:18 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2018-05-30 14:09:31 +00:00
|
|
|
import argparse
|
2015-12-08 13:29:18 +00:00
|
|
|
import mock
|
|
|
|
import os
|
2020-01-22 10:02:22 +00:00
|
|
|
|
2016-05-25 11:39:02 +00:00
|
|
|
try:
|
|
|
|
import unittest2 as unittest
|
|
|
|
except ImportError:
|
|
|
|
import unittest
|
2016-01-06 11:24:01 +00:00
|
|
|
import tempfile
|
|
|
|
import shutil
|
2017-03-08 12:16:34 +00:00
|
|
|
import subprocess
|
2019-10-04 12:45:03 +00:00
|
|
|
import six
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2016-01-06 11:24:01 +00:00
|
|
|
from pungi import compose
|
2015-12-08 13:29:18 +00:00
|
|
|
from pungi import util
|
|
|
|
|
2018-11-21 09:56:22 +00:00
|
|
|
from tests.helpers import touch, PungiTestCase, mk_boom
|
2016-02-29 12:35:55 +00:00
|
|
|
|
2015-12-08 13:29:18 +00:00
|
|
|
|
|
|
|
class TestGitRefResolver(unittest.TestCase):
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2015-12-08 13:29:18 +00:00
|
|
|
def test_successful_resolve(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.return_value = (0, "CAFEBABE\tHEAD\n")
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.resolve_git_url("https://git.example.com/repo.git?somedir#HEAD")
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(url, "https://git.example.com/repo.git?somedir#CAFEBABE")
|
|
|
|
run.assert_called_once_with(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2016-02-25 07:39:01 +00:00
|
|
|
def test_successful_resolve_branch(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.return_value = (0, "CAFEBABE\trefs/heads/f24\n")
|
2016-02-25 07:39:01 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.resolve_git_url(
|
|
|
|
"https://git.example.com/repo.git?somedir#origin/f24"
|
|
|
|
)
|
2016-02-25 07:39:01 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(url, "https://git.example.com/repo.git?somedir#CAFEBABE")
|
|
|
|
run.assert_called_once_with(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "refs/heads/f24"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
2016-02-25 07:39:01 +00:00
|
|
|
|
2019-02-25 13:12:21 +00:00
|
|
|
def test_resolve_ref_with_commit_id(self):
|
|
|
|
ref = util.resolve_git_ref("https://git.example.com/repo.git", "a" * 40)
|
|
|
|
self.assertEqual(ref, "a" * 40)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2019-05-07 06:28:20 +00:00
|
|
|
def test_resolve_ref_multiple_matches(self, run):
|
|
|
|
run.return_value = (
|
2020-01-22 10:02:22 +00:00
|
|
|
0,
|
|
|
|
"CAFEBABE\trefs/heads/master\nBABECAFE\trefs/remotes/origin/master",
|
2019-05-07 06:28:20 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
ref = util.resolve_git_ref("https://git.example.com/repo.git", "master")
|
|
|
|
|
|
|
|
self.assertEqual(ref, "CAFEBABE")
|
|
|
|
run.assert_called_once_with(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "master"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2019-06-18 10:50:39 +00:00
|
|
|
def test_resolve_ref_with_remote_head(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.return_value = (0, "CAFEBABE\tHEAD\nBABECAFE\trefs/remotes/origin/HEAD")
|
2019-06-18 10:50:39 +00:00
|
|
|
|
|
|
|
ref = util.resolve_git_ref("https://git.example.com/repo.git", "HEAD")
|
|
|
|
|
|
|
|
self.assertEqual(ref, "CAFEBABE")
|
|
|
|
run.assert_called_once_with(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2015-12-08 13:29:18 +00:00
|
|
|
def test_resolve_missing_spec(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.resolve_git_url("https://git.example.com/repo.git")
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(url, "https://git.example.com/repo.git")
|
2015-12-08 13:29:18 +00:00
|
|
|
self.assertEqual(run.mock_calls, [])
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2015-12-08 13:29:18 +00:00
|
|
|
def test_resolve_non_head_spec(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.resolve_git_url("https://git.example.com/repo.git#some-tag")
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(url, "https://git.example.com/repo.git#some-tag")
|
2015-12-08 13:29:18 +00:00
|
|
|
self.assertEqual(run.mock_calls, [])
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2015-12-08 13:29:18 +00:00
|
|
|
def test_resolve_ambiguous(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.return_value = (0, "CAFEBABE\tF11\nDEADBEEF\tF10\n")
|
2015-12-08 13:29:18 +00:00
|
|
|
|
|
|
|
with self.assertRaises(RuntimeError):
|
2020-01-22 10:02:22 +00:00
|
|
|
util.resolve_git_url("https://git.example.com/repo.git?somedir#HEAD")
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
run.assert_called_once_with(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2016-02-16 09:42:59 +00:00
|
|
|
def test_resolve_keep_empty_query_string(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.return_value = (0, "CAFEBABE\tHEAD\n")
|
2016-02-16 09:42:59 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.resolve_git_url("https://git.example.com/repo.git?#HEAD")
|
2016-02-16 09:42:59 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
run.assert_called_once_with(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
self.assertEqual(url, "https://git.example.com/repo.git?#CAFEBABE")
|
2016-02-16 09:42:59 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2016-05-16 05:51:13 +00:00
|
|
|
def test_resolve_strip_git_plus_prefix(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.return_value = (0, "CAFEBABE\tHEAD\n")
|
2016-05-16 05:51:13 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.resolve_git_url("git+https://git.example.com/repo.git#HEAD")
|
2016-05-16 05:51:13 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
run.assert_called_once_with(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
self.assertEqual(url, "git+https://git.example.com/repo.git#CAFEBABE")
|
2016-05-16 05:51:13 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.util.run")
|
2017-04-19 13:37:15 +00:00
|
|
|
def test_resolve_no_branch_in_remote(self, run):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.return_value = (0, "")
|
2017-04-19 13:37:15 +00:00
|
|
|
|
|
|
|
with self.assertRaises(RuntimeError) as ctx:
|
2020-01-22 10:02:22 +00:00
|
|
|
util.resolve_git_url(
|
|
|
|
"https://git.example.com/repo.git?somedir#origin/my-branch"
|
|
|
|
)
|
2017-04-19 13:37:15 +00:00
|
|
|
|
|
|
|
run.assert_called_once_with(
|
2020-01-22 10:02:22 +00:00
|
|
|
[
|
|
|
|
"git",
|
|
|
|
"ls-remote",
|
|
|
|
"https://git.example.com/repo.git",
|
|
|
|
"refs/heads/my-branch",
|
|
|
|
],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
self.assertIn("ref does not exist in remote repo", str(ctx.exception))
|
2017-04-19 13:37:15 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("time.sleep")
|
|
|
|
@mock.patch("pungi.util.run")
|
2017-06-02 07:46:05 +00:00
|
|
|
def test_retry(self, run, sleep):
|
2020-01-22 10:02:22 +00:00
|
|
|
run.side_effect = [RuntimeError("Boom"), (0, "CAFEBABE\tHEAD\n")]
|
2017-06-02 07:46:05 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.resolve_git_url("https://git.example.com/repo.git?somedir#HEAD")
|
2017-06-02 07:46:05 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(url, "https://git.example.com/repo.git?somedir#CAFEBABE")
|
2017-06-02 07:46:05 +00:00
|
|
|
self.assertEqual(sleep.call_args_list, [mock.call(30)])
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
run.call_args_list,
|
|
|
|
[
|
|
|
|
mock.call(
|
|
|
|
["git", "ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
]
|
|
|
|
* 2,
|
|
|
|
)
|
2017-06-02 07:46:05 +00:00
|
|
|
|
2019-06-11 14:01:30 +00:00
|
|
|
@mock.patch("pungi.util.resolve_git_ref")
|
2018-11-21 09:15:52 +00:00
|
|
|
@mock.patch("pungi.util.resolve_git_url")
|
2019-06-11 14:01:30 +00:00
|
|
|
def test_resolver_offline(self, mock_resolve_url, mock_resolve_ref):
|
2018-11-21 09:15:52 +00:00
|
|
|
resolver = util.GitUrlResolver(offline=True)
|
|
|
|
self.assertEqual(
|
|
|
|
resolver("http://example.com/repo.git#HEAD"),
|
|
|
|
"http://example.com/repo.git#HEAD",
|
|
|
|
)
|
2019-06-11 14:01:30 +00:00
|
|
|
self.assertEqual(mock_resolve_url.call_args_list, [])
|
|
|
|
self.assertEqual(mock_resolve_ref.call_args_list, [])
|
|
|
|
|
|
|
|
@mock.patch("pungi.util.resolve_git_ref")
|
|
|
|
@mock.patch("pungi.util.resolve_git_url")
|
|
|
|
def test_resolver_offline_branch(self, mock_resolve_url, mock_resolve_ref):
|
|
|
|
resolver = util.GitUrlResolver(offline=True)
|
|
|
|
self.assertEqual(
|
2020-01-22 10:02:22 +00:00
|
|
|
resolver("http://example.com/repo.git", "master"), "master",
|
2019-06-11 14:01:30 +00:00
|
|
|
)
|
|
|
|
self.assertEqual(mock_resolve_url.call_args_list, [])
|
|
|
|
self.assertEqual(mock_resolve_ref.call_args_list, [])
|
2018-11-21 09:15:52 +00:00
|
|
|
|
2019-02-25 13:12:21 +00:00
|
|
|
@mock.patch("pungi.util.resolve_git_ref")
|
2018-11-21 09:15:52 +00:00
|
|
|
@mock.patch("pungi.util.resolve_git_url")
|
2019-02-25 13:12:21 +00:00
|
|
|
def test_resolver_caches_calls(self, mock_resolve_url, mock_resolve_ref):
|
2018-11-21 09:15:52 +00:00
|
|
|
url1 = "http://example.com/repo.git#HEAD"
|
|
|
|
url2 = "http://example.com/repo.git#master"
|
2019-02-25 13:12:21 +00:00
|
|
|
url3 = "http://example.com/repo.git"
|
|
|
|
ref1 = "foo"
|
|
|
|
ref2 = "bar"
|
|
|
|
mock_resolve_url.side_effect = ["1", "2"]
|
|
|
|
mock_resolve_ref.side_effect = ["cafe", "beef"]
|
2018-11-21 09:15:52 +00:00
|
|
|
resolver = util.GitUrlResolver()
|
|
|
|
self.assertEqual(resolver(url1), "1")
|
|
|
|
self.assertEqual(resolver(url1), "1")
|
2019-02-25 13:12:21 +00:00
|
|
|
self.assertEqual(resolver(url3, ref1), "cafe")
|
|
|
|
self.assertEqual(resolver(url3, ref2), "beef")
|
2018-11-21 09:15:52 +00:00
|
|
|
self.assertEqual(resolver(url2), "2")
|
2019-02-25 13:12:21 +00:00
|
|
|
self.assertEqual(resolver(url3, ref1), "cafe")
|
2018-11-21 09:15:52 +00:00
|
|
|
self.assertEqual(resolver(url1), "1")
|
2019-02-25 13:12:21 +00:00
|
|
|
self.assertEqual(resolver(url3, ref2), "beef")
|
2018-11-21 09:15:52 +00:00
|
|
|
self.assertEqual(resolver(url2), "2")
|
2019-02-25 13:12:21 +00:00
|
|
|
self.assertEqual(resolver(url3, ref2), "beef")
|
|
|
|
self.assertEqual(
|
|
|
|
mock_resolve_url.call_args_list, [mock.call(url1), mock.call(url2)]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
mock_resolve_ref.call_args_list,
|
|
|
|
[mock.call(url3, ref1), mock.call(url3, ref2)],
|
|
|
|
)
|
2018-11-21 09:15:52 +00:00
|
|
|
|
2018-11-21 09:56:22 +00:00
|
|
|
@mock.patch("pungi.util.resolve_git_url")
|
|
|
|
def test_resolver_caches_failure(self, mock_resolve):
|
|
|
|
url = "http://example.com/repo.git#HEAD"
|
|
|
|
mock_resolve.side_effect = mk_boom(util.GitUrlResolveError, "failed")
|
|
|
|
resolver = util.GitUrlResolver()
|
|
|
|
with self.assertRaises(util.GitUrlResolveError):
|
|
|
|
resolver(url)
|
|
|
|
with self.assertRaises(util.GitUrlResolveError):
|
|
|
|
resolver(url)
|
|
|
|
self.assertEqual(mock_resolve.call_args_list, [mock.call(url)])
|
|
|
|
|
2015-12-08 13:29:18 +00:00
|
|
|
|
2016-01-05 08:27:20 +00:00
|
|
|
class TestGetVariantData(unittest.TestCase):
|
|
|
|
def test_get_simple(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
conf = {"foo": {"^Client$": 1}}
|
|
|
|
result = util.get_variant_data(conf, "foo", mock.Mock(uid="Client"))
|
2016-01-05 08:27:20 +00:00
|
|
|
self.assertEqual(result, [1])
|
|
|
|
|
|
|
|
def test_get_make_list(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
conf = {"foo": {"^Client$": [1, 2], "^.*$": 3}}
|
|
|
|
result = util.get_variant_data(conf, "foo", mock.Mock(uid="Client"))
|
2019-10-04 12:45:03 +00:00
|
|
|
six.assertCountEqual(self, result, [1, 2, 3])
|
2016-01-05 08:27:20 +00:00
|
|
|
|
|
|
|
def test_not_matching_arch(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
conf = {"foo": {"^Client$": [1, 2]}}
|
|
|
|
result = util.get_variant_data(conf, "foo", mock.Mock(uid="Server"))
|
2019-10-04 12:45:03 +00:00
|
|
|
self.assertEqual(result, [])
|
2016-01-05 08:27:20 +00:00
|
|
|
|
|
|
|
def test_handle_missing_config(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
result = util.get_variant_data({}, "foo", mock.Mock(uid="Client"))
|
2019-10-04 12:45:03 +00:00
|
|
|
self.assertEqual(result, [])
|
2016-01-05 08:27:20 +00:00
|
|
|
|
2017-08-18 07:33:51 +00:00
|
|
|
def test_get_save_pattern(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
conf = {"foo": {"^Client$": 1, "^NotClient$": 2}}
|
2017-08-18 07:33:51 +00:00
|
|
|
patterns = set()
|
2020-01-22 10:02:22 +00:00
|
|
|
result = util.get_variant_data(
|
|
|
|
conf, "foo", mock.Mock(uid="Client"), keys=patterns
|
|
|
|
)
|
2017-08-18 07:33:51 +00:00
|
|
|
self.assertEqual(result, [1])
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(patterns, set(["^Client$"]))
|
2017-08-18 07:33:51 +00:00
|
|
|
|
2016-01-05 08:27:20 +00:00
|
|
|
|
2016-01-06 11:24:01 +00:00
|
|
|
class TestVolumeIdGenerator(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.tmp_dir = tempfile.mkdtemp()
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
shutil.rmtree(self.tmp_dir)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.compose.ComposeInfo")
|
2016-01-06 11:24:01 +00:00
|
|
|
def test_get_volid(self, ci):
|
|
|
|
all_keys = [
|
2020-01-22 10:02:22 +00:00
|
|
|
(
|
|
|
|
["arch", "compose_id", "date", "disc_type"],
|
|
|
|
"x86_64-compose_id-20160107-",
|
|
|
|
),
|
|
|
|
(
|
|
|
|
["label", "label_major_version", "release_short", "respin"],
|
|
|
|
"RC-1.0-1-rel_short2-2",
|
|
|
|
),
|
|
|
|
(["type", "type_suffix", "variant", "version"], "nightly-.n-Server-6.0"),
|
2016-01-06 11:24:01 +00:00
|
|
|
]
|
|
|
|
for keys, expected in all_keys:
|
2020-01-22 10:02:22 +00:00
|
|
|
format = "-".join(["%(" + k + ")s" for k in keys])
|
2016-01-06 11:24:01 +00:00
|
|
|
conf = {
|
2020-01-22 10:02:22 +00:00
|
|
|
"release_short": "rel_short2",
|
|
|
|
"release_version": "6.0",
|
|
|
|
"image_volid_formats": [format],
|
|
|
|
"image_volid_layered_product_formats": [],
|
|
|
|
"volume_id_substitutions": {},
|
|
|
|
"restricted_volid": False,
|
2018-07-26 18:52:19 +00:00
|
|
|
}
|
2020-01-22 10:02:22 +00:00
|
|
|
variant = mock.Mock(uid="Server", type="variant")
|
2018-07-26 18:52:19 +00:00
|
|
|
ci.return_value.compose.respin = 2
|
2020-01-22 10:02:22 +00:00
|
|
|
ci.return_value.compose.id = "compose_id"
|
|
|
|
ci.return_value.compose.date = "20160107"
|
|
|
|
ci.return_value.compose.type = "nightly"
|
|
|
|
ci.return_value.compose.type_suffix = ".n"
|
|
|
|
ci.return_value.compose.label = "RC-1.0"
|
|
|
|
ci.return_value.compose.label_major_version = "1"
|
2018-07-26 18:52:19 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
ci.return_value.release.version = "3.0"
|
|
|
|
ci.return_value.release.short = "rel_short"
|
2018-07-26 18:52:19 +00:00
|
|
|
|
|
|
|
c = compose.Compose(conf, self.tmp_dir)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
volid = util.get_volid(c, "x86_64", variant, disc_type=False)
|
2018-07-26 18:52:19 +00:00
|
|
|
|
|
|
|
self.assertEqual(volid, expected)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.compose.ComposeInfo")
|
2018-07-26 18:52:19 +00:00
|
|
|
def test_get_restricted_volid(self, ci):
|
|
|
|
all_keys = [
|
2020-01-22 10:02:22 +00:00
|
|
|
(
|
|
|
|
["arch", "compose_id", "date", "disc_type"],
|
|
|
|
"x86_64-compose_id-20160107-",
|
|
|
|
),
|
|
|
|
(
|
|
|
|
["label", "label_major_version", "release_short", "respin"],
|
|
|
|
"RC-1-0-1-rel_short2-2",
|
|
|
|
),
|
|
|
|
(["type", "type_suffix", "variant", "version"], "nightly--n-Server-6-0"),
|
2018-07-26 18:52:19 +00:00
|
|
|
]
|
|
|
|
for keys, expected in all_keys:
|
2020-01-22 10:02:22 +00:00
|
|
|
format = "-".join(["%(" + k + ")s" for k in keys])
|
2018-07-26 18:52:19 +00:00
|
|
|
conf = {
|
2020-01-22 10:02:22 +00:00
|
|
|
"release_short": "rel_short2",
|
|
|
|
"release_version": "6.0",
|
|
|
|
"image_volid_formats": [format],
|
|
|
|
"image_volid_layered_product_formats": [],
|
|
|
|
"volume_id_substitutions": {},
|
|
|
|
"restricted_volid": True,
|
2016-01-06 11:24:01 +00:00
|
|
|
}
|
2020-01-22 10:02:22 +00:00
|
|
|
variant = mock.Mock(uid="Server", type="variant")
|
2016-01-06 11:24:01 +00:00
|
|
|
ci.return_value.compose.respin = 2
|
2020-01-22 10:02:22 +00:00
|
|
|
ci.return_value.compose.id = "compose_id"
|
|
|
|
ci.return_value.compose.date = "20160107"
|
|
|
|
ci.return_value.compose.type = "nightly"
|
|
|
|
ci.return_value.compose.type_suffix = ".n"
|
|
|
|
ci.return_value.compose.label = "RC-1.0"
|
|
|
|
ci.return_value.compose.label_major_version = "1"
|
2016-01-06 11:24:01 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
ci.return_value.release.version = "3.0"
|
|
|
|
ci.return_value.release.short = "rel_short"
|
2016-01-06 11:24:01 +00:00
|
|
|
|
|
|
|
c = compose.Compose(conf, self.tmp_dir)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
volid = util.get_volid(c, "x86_64", variant, disc_type=False)
|
2016-01-06 11:24:01 +00:00
|
|
|
|
|
|
|
self.assertEqual(volid, expected)
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.compose.ComposeInfo")
|
2017-04-18 14:12:04 +00:00
|
|
|
def test_get_volid_too_long(self, ci):
|
|
|
|
conf = {
|
2020-01-22 10:02:22 +00:00
|
|
|
"release_short": "rel_short2",
|
|
|
|
"release_version": "6.0",
|
|
|
|
"image_volid_formats": [
|
|
|
|
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", # 34 chars
|
|
|
|
"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", # 33 chars
|
2017-04-18 14:12:04 +00:00
|
|
|
],
|
2020-01-22 10:02:22 +00:00
|
|
|
"image_volid_layered_product_formats": [],
|
|
|
|
"volume_id_substitutions": {},
|
2017-04-18 14:12:04 +00:00
|
|
|
}
|
2020-01-22 10:02:22 +00:00
|
|
|
variant = mock.Mock(uid="Server", type="variant")
|
2017-04-18 14:12:04 +00:00
|
|
|
c = compose.Compose(conf, self.tmp_dir)
|
|
|
|
|
|
|
|
with self.assertRaises(ValueError) as ctx:
|
2020-01-22 10:02:22 +00:00
|
|
|
util.get_volid(c, "x86_64", variant, disc_type=False)
|
2017-04-18 14:12:04 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertIn("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", str(ctx.exception))
|
|
|
|
self.assertIn("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", str(ctx.exception))
|
2017-04-18 14:12:04 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.compose.ComposeInfo")
|
2018-02-22 15:46:58 +00:00
|
|
|
def test_apply_substitutions(self, ci):
|
|
|
|
all_keys = [
|
2020-01-22 10:02:22 +00:00
|
|
|
(
|
|
|
|
"Fedora-WorkstationOstree-ostree-x86_64-rawhide",
|
|
|
|
"Fedora-WS-ostree-x86_64-rawhide",
|
|
|
|
),
|
|
|
|
(
|
|
|
|
"Fedora-WorkstationOstree-ostree-x86_64-Rawhide",
|
|
|
|
"Fedora-WS-ostree-x86_64-rawh",
|
|
|
|
),
|
|
|
|
("x86_64-compose_id-20160107", "x86_64-compose_id-20160107"),
|
|
|
|
("x86_64-compose_id-20160107-Alpha", "x86_64-compose_id-20160107-A"),
|
2018-02-23 22:35:07 +00:00
|
|
|
# These test the case where one substitution is a subset
|
|
|
|
# of the other, but sorts alphabetically ahead of it, to
|
|
|
|
# make sure we're correctly sorting by length
|
2020-01-22 10:02:22 +00:00
|
|
|
("Fedora-zzzaaaaaazzz-Rawhide", "Fedora-zzz-rawh"),
|
|
|
|
("Fedora-aaaaaa-Rawhide", "Fedora-aaa-rawh"),
|
2018-02-22 15:46:58 +00:00
|
|
|
]
|
|
|
|
for volid, expected in all_keys:
|
|
|
|
conf = {
|
2020-01-22 10:02:22 +00:00
|
|
|
"volume_id_substitutions": {
|
|
|
|
"Rawhide": "rawh",
|
|
|
|
"WorkstationOstree": "WS",
|
|
|
|
"Workstation": "WS",
|
|
|
|
"Alpha": "A",
|
|
|
|
"zzzaaaaaazzz": "zzz",
|
|
|
|
"aaaaaa": "aaa",
|
2018-02-22 15:46:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
c = compose.Compose(conf, self.tmp_dir)
|
|
|
|
transformed_volid = util._apply_substitutions(c, volid)
|
|
|
|
self.assertEqual(transformed_volid, expected)
|
|
|
|
|
2016-01-06 11:24:01 +00:00
|
|
|
|
2016-02-29 12:35:55 +00:00
|
|
|
class TestFindOldCompose(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.tmp_dir = tempfile.mkdtemp()
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
shutil.rmtree(self.tmp_dir)
|
|
|
|
|
|
|
|
def test_finds_single(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160229.0/STATUS", "FINISHED")
|
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "Rawhide", "")
|
|
|
|
self.assertEqual(old, self.tmp_dir + "/Fedora-Rawhide-20160229.0")
|
2016-02-29 12:35:55 +00:00
|
|
|
|
|
|
|
def test_ignores_in_progress(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160229.0/STATUS", "STARTED")
|
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "Rawhide", "")
|
2016-02-29 12:35:55 +00:00
|
|
|
self.assertIsNone(old)
|
|
|
|
|
2017-08-29 12:42:39 +00:00
|
|
|
def test_only_considers_allowed_status(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160229.0/STATUS", "FINISHED")
|
|
|
|
old = util.find_old_compose(
|
|
|
|
self.tmp_dir, "Fedora", "Rawhide", "", allowed_statuses=["DOOMED"]
|
|
|
|
)
|
2017-08-29 12:42:39 +00:00
|
|
|
self.assertIsNone(old)
|
|
|
|
|
2016-02-29 12:35:55 +00:00
|
|
|
def test_finds_latest(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160228.0/STATUS", "DOOMED")
|
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160229.0/STATUS", "FINISHED")
|
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160229.1/STATUS", "FINISHED_INCOMPLETE")
|
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "Rawhide", "")
|
|
|
|
self.assertEqual(old, self.tmp_dir + "/Fedora-Rawhide-20160229.1")
|
2016-02-29 12:35:55 +00:00
|
|
|
|
2017-11-06 13:56:08 +00:00
|
|
|
def test_find_correct_type(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-26-updates-20160229.0/STATUS", "FINISHED")
|
|
|
|
touch(self.tmp_dir + "/Fedora-26-updates-testing-20160229.0/STATUS", "FINISHED")
|
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "26", "-updates")
|
|
|
|
self.assertEqual(old, self.tmp_dir + "/Fedora-26-updates-20160229.0")
|
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "26", "-updates-testing")
|
|
|
|
self.assertEqual(old, self.tmp_dir + "/Fedora-26-updates-testing-20160229.0")
|
2017-11-06 13:56:08 +00:00
|
|
|
|
2017-07-14 08:41:17 +00:00
|
|
|
def test_find_latest_with_two_digit_respin(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160228.n.9/STATUS", "FINISHED")
|
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160228.n.10/STATUS", "FINISHED")
|
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "Rawhide", "")
|
|
|
|
self.assertEqual(old, self.tmp_dir + "/Fedora-Rawhide-20160228.n.10")
|
2017-07-14 08:41:17 +00:00
|
|
|
|
2016-02-29 12:35:55 +00:00
|
|
|
def test_finds_ignores_other_files(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160229.0", "not a compose")
|
|
|
|
touch(
|
|
|
|
self.tmp_dir + "/Fedora-Rawhide-20160228.0/STATUS/file",
|
|
|
|
"also not a compose",
|
|
|
|
)
|
|
|
|
touch(self.tmp_dir + "/Fedora-24-20160229.0/STATUS", "FINISHED")
|
|
|
|
touch(self.tmp_dir + "/Another-Rawhide-20160229.0/STATUS", "FINISHED")
|
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "Rawhide", "")
|
2016-02-29 12:35:55 +00:00
|
|
|
self.assertIsNone(old)
|
|
|
|
|
|
|
|
def test_search_in_file(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/file")
|
|
|
|
old = util.find_old_compose(self.tmp_dir + "/file", "Fedora", "Rawhide", "")
|
2016-02-29 12:35:55 +00:00
|
|
|
self.assertIsNone(old)
|
|
|
|
|
2020-03-05 08:32:03 +00:00
|
|
|
def test_do_not_skip_symlink(self):
|
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-20160228.n.10/STATUS", "FINISHED")
|
|
|
|
os.symlink(
|
|
|
|
self.tmp_dir + "/Fedora-Rawhide-20160228.n.10",
|
|
|
|
self.tmp_dir + "/Fedora-Rawhide-20160229.n.0",
|
|
|
|
)
|
2020-01-22 10:02:22 +00:00
|
|
|
old = util.find_old_compose(self.tmp_dir, "Fedora", "Rawhide", "")
|
2020-03-05 08:32:03 +00:00
|
|
|
self.assertEqual(old, self.tmp_dir + "/Fedora-Rawhide-20160229.n.0")
|
2016-02-29 12:35:55 +00:00
|
|
|
|
|
|
|
def test_finds_layered_product(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
touch(self.tmp_dir + "/Fedora-Rawhide-Base-1-20160229.0/STATUS", "FINISHED")
|
|
|
|
old = util.find_old_compose(
|
|
|
|
self.tmp_dir,
|
|
|
|
"Fedora",
|
|
|
|
"Rawhide",
|
|
|
|
"",
|
|
|
|
base_product_short="Base",
|
|
|
|
base_product_version="1",
|
|
|
|
)
|
|
|
|
self.assertEqual(old, self.tmp_dir + "/Fedora-Rawhide-Base-1-20160229.0")
|
2016-02-29 12:35:55 +00:00
|
|
|
|
|
|
|
|
2016-03-31 07:27:22 +00:00
|
|
|
class TestHelpers(PungiTestCase):
|
2016-03-23 09:40:16 +00:00
|
|
|
def test_process_args(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(util.process_args("--opt=%s", None), [])
|
|
|
|
self.assertEqual(util.process_args("--opt=%s", []), [])
|
|
|
|
self.assertEqual(
|
|
|
|
util.process_args("--opt=%s", ["foo", "bar"]), ["--opt=foo", "--opt=bar"]
|
|
|
|
)
|
|
|
|
self.assertEqual(util.process_args("--opt=%s", "foo"), ["--opt=foo"])
|
2016-03-23 09:40:16 +00:00
|
|
|
|
2016-03-31 07:27:22 +00:00
|
|
|
def test_makedirs(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
util.makedirs(self.topdir + "/foo/bar/baz")
|
|
|
|
self.assertTrue(os.path.isdir(self.topdir + "/foo/bar/baz"))
|
2016-03-31 07:27:22 +00:00
|
|
|
|
|
|
|
def test_makedirs_on_existing(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
os.makedirs(self.topdir + "/foo/bar/baz")
|
2016-03-31 07:27:22 +00:00
|
|
|
try:
|
2020-01-22 10:02:22 +00:00
|
|
|
util.makedirs(self.topdir + "/foo/bar/baz")
|
2016-03-31 07:27:22 +00:00
|
|
|
except OSError:
|
2020-01-22 10:02:22 +00:00
|
|
|
self.fail("makedirs raised exception on existing directory")
|
2016-03-31 07:27:22 +00:00
|
|
|
|
2016-03-23 09:40:16 +00:00
|
|
|
|
2016-08-22 14:08:25 +00:00
|
|
|
class TestLevenshtein(unittest.TestCase):
|
|
|
|
def test_edit_dist_empty_str(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(util.levenshtein("", ""), 0)
|
2016-08-22 14:08:25 +00:00
|
|
|
|
|
|
|
def test_edit_dist_same_str(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(util.levenshtein("aaa", "aaa"), 0)
|
2016-08-22 14:08:25 +00:00
|
|
|
|
|
|
|
def test_edit_dist_one_change(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(util.levenshtein("aab", "aaa"), 1)
|
2016-08-22 14:08:25 +00:00
|
|
|
|
|
|
|
def test_edit_dist_different_words(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(util.levenshtein("kitten", "sitting"), 3)
|
2016-08-22 14:08:25 +00:00
|
|
|
|
|
|
|
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
class TestRecursiveFileList(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.tmp_dir = tempfile.mkdtemp()
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
shutil.rmtree(self.tmp_dir)
|
|
|
|
|
|
|
|
def test_flat_file_list(self):
|
|
|
|
"""Build a directory containing files and assert they are listed."""
|
2020-01-22 10:02:22 +00:00
|
|
|
expected_files = sorted(["file1", "file2", "file3"])
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
for expected_file in [os.path.join(self.tmp_dir, f) for f in expected_files]:
|
|
|
|
touch(expected_file)
|
|
|
|
|
|
|
|
actual_files = sorted(util.recursive_file_list(self.tmp_dir))
|
|
|
|
self.assertEqual(expected_files, actual_files)
|
|
|
|
|
|
|
|
def test_nested_file_list(self):
|
|
|
|
"""Build a directory containing files and assert they are listed."""
|
2020-01-22 10:02:22 +00:00
|
|
|
expected_files = sorted(["file1", "subdir/file2", "sub/subdir/file3"])
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
for expected_file in [os.path.join(self.tmp_dir, f) for f in expected_files]:
|
|
|
|
touch(expected_file)
|
|
|
|
|
|
|
|
actual_files = sorted(util.recursive_file_list(self.tmp_dir))
|
|
|
|
self.assertEqual(expected_files, actual_files)
|
|
|
|
|
|
|
|
|
2017-02-17 12:44:11 +00:00
|
|
|
class TestTempFiles(unittest.TestCase):
|
|
|
|
def test_temp_dir_ok(self):
|
|
|
|
with util.temp_dir() as tmp:
|
|
|
|
self.assertTrue(os.path.isdir(tmp))
|
|
|
|
self.assertFalse(os.path.exists(tmp))
|
|
|
|
|
|
|
|
def test_temp_dir_fail(self):
|
|
|
|
with self.assertRaises(RuntimeError):
|
|
|
|
with util.temp_dir() as tmp:
|
|
|
|
self.assertTrue(os.path.isdir(tmp))
|
2020-01-22 10:02:22 +00:00
|
|
|
raise RuntimeError("BOOM")
|
2017-02-17 12:44:11 +00:00
|
|
|
self.assertFalse(os.path.exists(tmp))
|
|
|
|
|
|
|
|
def test_temp_dir_in_non_existing_dir(self):
|
|
|
|
with util.temp_dir() as playground:
|
2020-01-22 10:02:22 +00:00
|
|
|
root = os.path.join(playground, "missing")
|
2017-02-17 12:44:11 +00:00
|
|
|
with util.temp_dir(dir=root) as tmp:
|
|
|
|
self.assertTrue(os.path.isdir(tmp))
|
|
|
|
self.assertTrue(os.path.isdir(root))
|
|
|
|
self.assertFalse(os.path.exists(tmp))
|
|
|
|
|
|
|
|
|
2017-03-08 12:16:34 +00:00
|
|
|
class TestUnmountCmd(unittest.TestCase):
|
2020-01-22 10:02:22 +00:00
|
|
|
def _fakeProc(self, ret, err="", out=""):
|
2017-03-08 12:16:34 +00:00
|
|
|
proc = mock.Mock(returncode=ret)
|
2017-03-14 09:29:53 +00:00
|
|
|
proc.communicate.return_value = (out, err)
|
2017-03-08 12:16:34 +00:00
|
|
|
return proc
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("subprocess.Popen")
|
2017-03-08 12:16:34 +00:00
|
|
|
def test_unmount_cmd_success(self, mockPopen):
|
2020-01-22 10:02:22 +00:00
|
|
|
cmd = "unmount"
|
|
|
|
mockPopen.side_effect = [self._fakeProc(0, "")]
|
2017-03-08 12:16:34 +00:00
|
|
|
util.run_unmount_cmd(cmd)
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
mockPopen.call_args_list,
|
|
|
|
[
|
|
|
|
mock.call(
|
|
|
|
cmd,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2017-03-08 12:16:34 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("subprocess.Popen")
|
2017-03-08 12:16:34 +00:00
|
|
|
def test_unmount_cmd_fail_other_reason(self, mockPopen):
|
2020-01-22 10:02:22 +00:00
|
|
|
cmd = "unmount"
|
|
|
|
mockPopen.side_effect = [self._fakeProc(1, "It is broken")]
|
2017-03-08 12:16:34 +00:00
|
|
|
with self.assertRaises(RuntimeError) as ctx:
|
|
|
|
util.run_unmount_cmd(cmd)
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
str(ctx.exception), "Unhandled error when running 'unmount': 'It is broken'"
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
mockPopen.call_args_list,
|
|
|
|
[
|
|
|
|
mock.call(
|
|
|
|
cmd,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
@mock.patch("time.sleep")
|
|
|
|
@mock.patch("subprocess.Popen")
|
2017-03-08 12:16:34 +00:00
|
|
|
def test_unmount_cmd_fail_then_retry(self, mockPopen, mock_sleep):
|
2020-01-22 10:02:22 +00:00
|
|
|
cmd = "unmount"
|
|
|
|
mockPopen.side_effect = [
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
self._fakeProc(0, ""),
|
|
|
|
]
|
2017-03-08 12:16:34 +00:00
|
|
|
util.run_unmount_cmd(cmd)
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
mockPopen.call_args_list,
|
|
|
|
[
|
|
|
|
mock.call(
|
|
|
|
cmd,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
]
|
|
|
|
* 3,
|
|
|
|
)
|
|
|
|
self.assertEqual(mock_sleep.call_args_list, [mock.call(0), mock.call(1)])
|
|
|
|
|
|
|
|
@mock.patch("time.sleep")
|
|
|
|
@mock.patch("subprocess.Popen")
|
2017-03-08 12:16:34 +00:00
|
|
|
def test_unmount_cmd_fail_then_retry_and_fail(self, mockPopen, mock_sleep):
|
2020-01-22 10:02:22 +00:00
|
|
|
cmd = "unmount"
|
|
|
|
mockPopen.side_effect = [
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
]
|
2017-03-08 12:16:34 +00:00
|
|
|
with self.assertRaises(RuntimeError) as ctx:
|
|
|
|
util.run_unmount_cmd(cmd, max_retries=3)
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
mockPopen.call_args_list,
|
|
|
|
[
|
|
|
|
mock.call(
|
|
|
|
cmd,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
|
|
|
]
|
|
|
|
* 3,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
mock_sleep.call_args_list, [mock.call(0), mock.call(1), mock.call(2)]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
str(ctx.exception), "Failed to run 'unmount': Device or resource busy."
|
|
|
|
)
|
|
|
|
|
|
|
|
@mock.patch("time.sleep")
|
|
|
|
@mock.patch("subprocess.Popen")
|
|
|
|
def test_fusermount_fail_then_retry_and_fail_with_debug(
|
|
|
|
self, mockPopen, mock_sleep
|
|
|
|
):
|
2017-03-14 09:29:53 +00:00
|
|
|
logger = mock.Mock()
|
2020-01-22 10:02:22 +00:00
|
|
|
mockPopen.side_effect = [
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
self._fakeProc(1, "Device or resource busy"),
|
|
|
|
self._fakeProc(0, out="list of files"),
|
|
|
|
self._fakeProc(0, out="It is very busy"),
|
|
|
|
self._fakeProc(1, out="lsof output"),
|
|
|
|
]
|
2017-03-14 09:29:53 +00:00
|
|
|
with self.assertRaises(RuntimeError) as ctx:
|
2020-01-22 10:02:22 +00:00
|
|
|
util.run_unmount_cmd(
|
|
|
|
["fusermount", "-u", "/path"],
|
|
|
|
path="/path",
|
|
|
|
max_retries=3,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
cmd = ["fusermount", "-u", "/path"]
|
|
|
|
expected = [
|
|
|
|
mock.call(
|
|
|
|
cmd,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
),
|
|
|
|
mock.call(
|
|
|
|
cmd,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
),
|
|
|
|
mock.call(
|
|
|
|
cmd,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
),
|
|
|
|
mock.call(
|
|
|
|
["ls", "-lA", "/path"],
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
),
|
|
|
|
mock.call(
|
|
|
|
["fuser", "-vm", "/path"],
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
),
|
|
|
|
mock.call(
|
|
|
|
["lsof", "+D", "/path"],
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
),
|
|
|
|
]
|
2017-03-14 09:29:53 +00:00
|
|
|
self.assertEqual(mockPopen.call_args_list, expected)
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
mock_sleep.call_args_list, [mock.call(0), mock.call(1), mock.call(2)]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
str(ctx.exception),
|
|
|
|
"Failed to run ['fusermount', '-u', '/path']: Device or resource busy.",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
logger.mock_calls,
|
|
|
|
[
|
|
|
|
mock.call.debug(
|
|
|
|
"`%s` exited with %s and following output:\n%s",
|
|
|
|
"ls -lA /path",
|
|
|
|
0,
|
|
|
|
"list of files",
|
|
|
|
),
|
|
|
|
mock.call.debug(
|
|
|
|
"`%s` exited with %s and following output:\n%s",
|
|
|
|
"fuser -vm /path",
|
|
|
|
0,
|
|
|
|
"It is very busy",
|
|
|
|
),
|
|
|
|
mock.call.debug(
|
|
|
|
"`%s` exited with %s and following output:\n%s",
|
|
|
|
"lsof +D /path",
|
|
|
|
1,
|
|
|
|
"lsof output",
|
|
|
|
),
|
|
|
|
],
|
|
|
|
)
|
2017-03-14 09:29:53 +00:00
|
|
|
|
2017-03-08 12:16:34 +00:00
|
|
|
|
2017-03-24 21:20:55 +00:00
|
|
|
class TranslatePathTestCase(unittest.TestCase):
|
|
|
|
def test_does_nothing_without_config(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
compose = mock.Mock(conf={"translate_paths": []})
|
|
|
|
ret = util.translate_path(compose, "/mnt/koji/compose/rawhide/XYZ")
|
|
|
|
self.assertEqual(ret, "/mnt/koji/compose/rawhide/XYZ")
|
2017-03-24 21:20:55 +00:00
|
|
|
|
|
|
|
def test_translates_prefix(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
compose = mock.Mock(
|
|
|
|
conf={"translate_paths": [("/mnt/koji", "http://example.com")]}
|
|
|
|
)
|
|
|
|
ret = util.translate_path(compose, "/mnt/koji/compose/rawhide/XYZ")
|
|
|
|
self.assertEqual(ret, "http://example.com/compose/rawhide/XYZ")
|
2017-03-24 21:20:55 +00:00
|
|
|
|
|
|
|
def test_does_not_translate_not_matching(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
compose = mock.Mock(
|
|
|
|
conf={"translate_paths": [("/mnt/koji", "http://example.com")]}
|
|
|
|
)
|
|
|
|
ret = util.translate_path(compose, "/mnt/fedora_koji/compose/rawhide/XYZ")
|
|
|
|
self.assertEqual(ret, "/mnt/fedora_koji/compose/rawhide/XYZ")
|
2017-03-24 21:20:55 +00:00
|
|
|
|
|
|
|
|
2017-03-27 21:53:08 +00:00
|
|
|
class GetRepoFuncsTestCase(unittest.TestCase):
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("pungi.compose.ComposeInfo")
|
2017-03-27 21:53:08 +00:00
|
|
|
def setUp(self, ci):
|
|
|
|
self.tmp_dir = tempfile.mkdtemp()
|
2020-01-22 10:02:22 +00:00
|
|
|
conf = {"translate_paths": [(self.tmp_dir, "http://example.com")]}
|
2017-03-27 21:53:08 +00:00
|
|
|
ci.return_value.compose.respin = 0
|
2020-01-22 10:02:22 +00:00
|
|
|
ci.return_value.compose.id = "RHEL-8.0-20180101.n.0"
|
|
|
|
ci.return_value.compose.date = "20160101"
|
|
|
|
ci.return_value.compose.type = "nightly"
|
|
|
|
ci.return_value.compose.type_suffix = ".n"
|
|
|
|
ci.return_value.compose.label = "RC-1.0"
|
|
|
|
ci.return_value.compose.label_major_version = "1"
|
2017-03-27 21:53:08 +00:00
|
|
|
|
|
|
|
compose_dir = os.path.join(self.tmp_dir, ci.return_value.compose.id)
|
|
|
|
self.compose = compose.Compose(conf, compose_dir)
|
2020-01-22 10:02:22 +00:00
|
|
|
server_variant = mock.Mock(uid="Server", type="variant")
|
|
|
|
client_variant = mock.Mock(uid="Client", type="variant")
|
2017-03-27 21:53:08 +00:00
|
|
|
self.compose.all_variants = {
|
2020-01-22 10:02:22 +00:00
|
|
|
"Server": server_variant,
|
|
|
|
"Client": client_variant,
|
2017-03-27 21:53:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
shutil.rmtree(self.tmp_dir)
|
|
|
|
|
|
|
|
def test_get_repo_url_from_normal_url(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.get_repo_url(self.compose, "http://example.com/repo")
|
|
|
|
self.assertEqual(url, "http://example.com/repo")
|
2017-03-27 21:53:08 +00:00
|
|
|
|
2019-02-21 09:00:03 +00:00
|
|
|
def test_get_repo_url_from_path(self):
|
|
|
|
url = util.get_repo_url(self.compose, os.path.join(self.tmp_dir, "repo"))
|
|
|
|
self.assertEqual(url, "http://example.com/repo")
|
|
|
|
|
2017-03-27 21:53:08 +00:00
|
|
|
def test_get_repo_url_from_variant_uid(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
url = util.get_repo_url(self.compose, "Server")
|
|
|
|
self.assertEqual(
|
|
|
|
url, "http://example.com/RHEL-8.0-20180101.n.0/compose/Server/$basearch/os"
|
|
|
|
)
|
2017-03-27 21:53:08 +00:00
|
|
|
|
|
|
|
def test_get_repo_url_from_repo_dict(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
repo = {"baseurl": "http://example.com/repo"}
|
2017-03-27 21:53:08 +00:00
|
|
|
url = util.get_repo_url(self.compose, repo)
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(url, "http://example.com/repo")
|
2017-03-27 21:53:08 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
repo = {"baseurl": "Server"}
|
2017-03-27 21:53:08 +00:00
|
|
|
url = util.get_repo_url(self.compose, repo)
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
url, "http://example.com/RHEL-8.0-20180101.n.0/compose/Server/$basearch/os"
|
|
|
|
)
|
2017-03-27 21:53:08 +00:00
|
|
|
|
|
|
|
def test_get_repo_urls(self):
|
|
|
|
repos = [
|
2020-01-22 10:02:22 +00:00
|
|
|
"http://example.com/repo",
|
|
|
|
"Server",
|
|
|
|
{"baseurl": "Client"},
|
|
|
|
{"baseurl": "ftp://example.com/linux/repo"},
|
2017-03-27 21:53:08 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
expect = [
|
2020-01-22 10:02:22 +00:00
|
|
|
"http://example.com/repo",
|
|
|
|
"http://example.com/RHEL-8.0-20180101.n.0/compose/Server/$basearch/os",
|
|
|
|
"http://example.com/RHEL-8.0-20180101.n.0/compose/Client/$basearch/os",
|
|
|
|
"ftp://example.com/linux/repo",
|
2017-03-27 21:53:08 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
self.assertEqual(util.get_repo_urls(self.compose, repos), expect)
|
|
|
|
|
|
|
|
def test_get_repo_dict_from_normal_url(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
repo_dict = util.get_repo_dict("http://example.com/repo")
|
|
|
|
expect = {
|
|
|
|
"name": "http:__example.com_repo",
|
|
|
|
"baseurl": "http://example.com/repo",
|
|
|
|
}
|
2017-03-27 21:53:08 +00:00
|
|
|
self.assertEqual(repo_dict, expect)
|
|
|
|
|
|
|
|
def test_get_repo_dict_from_variant_uid(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
repo_dict = util.get_repo_dict("Server") # this repo format is deprecated
|
2018-02-06 21:29:22 +00:00
|
|
|
expect = {}
|
2017-03-27 21:53:08 +00:00
|
|
|
self.assertEqual(repo_dict, expect)
|
|
|
|
|
|
|
|
def test_get_repo_dict_from_repo_dict(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
repo = {"baseurl": "Server"} # this repo format is deprecated
|
2018-02-06 21:29:22 +00:00
|
|
|
expect = {}
|
|
|
|
repo_dict = util.get_repo_dict(repo)
|
2017-03-27 21:53:08 +00:00
|
|
|
self.assertEqual(repo_dict, expect)
|
|
|
|
|
|
|
|
def test_get_repo_dicts(self):
|
|
|
|
repos = [
|
2020-01-22 10:02:22 +00:00
|
|
|
"http://example.com/repo",
|
2020-02-06 07:09:32 +00:00
|
|
|
"Server", # this repo format is deprecated (and will not be included into final repo_dict) # noqa: E501
|
2020-01-22 10:02:22 +00:00
|
|
|
{"baseurl": "Client"}, # this repo format is deprecated
|
|
|
|
{"baseurl": "ftp://example.com/linux/repo"},
|
|
|
|
{"name": "testrepo", "baseurl": "ftp://example.com/linux/repo"},
|
2017-03-27 21:53:08 +00:00
|
|
|
]
|
|
|
|
expect = [
|
2020-01-22 10:02:22 +00:00
|
|
|
{"name": "http:__example.com_repo", "baseurl": "http://example.com/repo"},
|
|
|
|
{
|
|
|
|
"name": "ftp:__example.com_linux_repo",
|
|
|
|
"baseurl": "ftp://example.com/linux/repo",
|
|
|
|
},
|
|
|
|
{"name": "testrepo", "baseurl": "ftp://example.com/linux/repo"},
|
2017-03-27 21:53:08 +00:00
|
|
|
]
|
2018-02-06 21:29:22 +00:00
|
|
|
repos = util.get_repo_dicts(repos)
|
2017-03-27 21:53:08 +00:00
|
|
|
self.assertEqual(repos, expect)
|
|
|
|
|
|
|
|
|
2017-04-07 13:33:43 +00:00
|
|
|
class TestVersionGenerator(unittest.TestCase):
|
2017-11-06 15:13:49 +00:00
|
|
|
def setUp(self):
|
|
|
|
ci = mock.MagicMock()
|
|
|
|
ci.respin = 0
|
2020-01-22 10:02:22 +00:00
|
|
|
ci.id = "RHEL-8.0-20180101.0"
|
|
|
|
ci.release.version = "8"
|
|
|
|
ci.type = "nightly"
|
|
|
|
ci.type_suffix = ""
|
|
|
|
ci.label = "RC-1.0"
|
|
|
|
ci.label_major_version = "1"
|
2017-11-06 15:13:49 +00:00
|
|
|
|
|
|
|
self.compose = mock.MagicMock()
|
|
|
|
self.compose.ci_base = ci
|
2017-11-09 15:37:57 +00:00
|
|
|
self.compose.compose_respin = 0
|
2020-01-22 10:02:22 +00:00
|
|
|
self.compose.compose_date = "20160101"
|
2017-11-06 15:13:49 +00:00
|
|
|
|
2017-04-07 13:33:43 +00:00
|
|
|
def test_unknown_generator(self):
|
|
|
|
compose = mock.Mock()
|
|
|
|
with self.assertRaises(RuntimeError) as ctx:
|
2020-01-22 10:02:22 +00:00
|
|
|
util.version_generator(compose, "!GIMME_VERSION")
|
2017-04-07 13:33:43 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
str(ctx.exception), "Unknown version generator '!GIMME_VERSION'"
|
|
|
|
)
|
2017-04-07 13:33:43 +00:00
|
|
|
|
|
|
|
def test_passthrough_value(self):
|
|
|
|
compose = mock.Mock()
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(util.version_generator(compose, "1.2.3"), "1.2.3")
|
2017-04-07 13:33:43 +00:00
|
|
|
|
|
|
|
def test_passthrough_none(self):
|
|
|
|
compose = mock.Mock()
|
|
|
|
self.assertEqual(util.version_generator(compose, None), None)
|
|
|
|
|
2017-11-09 15:37:57 +00:00
|
|
|
def test_release_from_version_date_respin(self):
|
2020-01-22 10:02:22 +00:00
|
|
|
self.assertEqual(
|
|
|
|
util.version_generator(self.compose, "!VERSION_FROM_VERSION_DATE_RESPIN"),
|
|
|
|
"8.20160101.0",
|
|
|
|
)
|
2017-11-06 15:13:49 +00:00
|
|
|
|
2018-07-04 14:55:16 +00:00
|
|
|
def test_release_from_date_respin(self):
|
|
|
|
self.assertEqual(
|
|
|
|
util.version_generator(self.compose, "!RELEASE_FROM_DATE_RESPIN"),
|
|
|
|
"20160101.0",
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_version_from_version(self):
|
|
|
|
self.assertEqual(
|
2020-01-22 10:02:22 +00:00
|
|
|
util.version_generator(self.compose, "!VERSION_FROM_VERSION"), "8",
|
2018-07-04 14:55:16 +00:00
|
|
|
)
|
|
|
|
|
2017-04-07 13:33:43 +00:00
|
|
|
|
2017-09-11 08:23:07 +00:00
|
|
|
class TestTZOffset(unittest.TestCase):
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("time.daylight", new=False)
|
|
|
|
@mock.patch("time.altzone", new=7200)
|
|
|
|
@mock.patch("time.timezone", new=3600)
|
|
|
|
@mock.patch("time.localtime", new=lambda: mock.Mock(tm_isdst=0))
|
2017-09-11 08:23:07 +00:00
|
|
|
def test_zone_without_dst(self):
|
2017-09-12 07:36:45 +00:00
|
|
|
self.assertEqual(util.get_tz_offset(), "-01:00")
|
2017-09-11 08:23:07 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("time.daylight", new=True)
|
|
|
|
@mock.patch("time.altzone", new=7200)
|
|
|
|
@mock.patch("time.timezone", new=3600)
|
|
|
|
@mock.patch("time.localtime", new=lambda: mock.Mock(tm_isdst=0))
|
2017-09-11 08:23:07 +00:00
|
|
|
def test_with_active_dst(self):
|
2017-09-12 07:36:45 +00:00
|
|
|
self.assertEqual(util.get_tz_offset(), "-01:00")
|
2017-09-11 08:23:07 +00:00
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("time.daylight", new=True)
|
|
|
|
@mock.patch("time.altzone", new=-9000)
|
|
|
|
@mock.patch("time.timezone", new=-3600)
|
|
|
|
@mock.patch("time.localtime", new=lambda: mock.Mock(tm_isdst=1))
|
2017-09-11 08:23:07 +00:00
|
|
|
def test_with_inactive_dst(self):
|
|
|
|
self.assertEqual(util.get_tz_offset(), "+02:30")
|
|
|
|
|
2020-01-22 10:02:22 +00:00
|
|
|
@mock.patch("time.daylight", new=False)
|
|
|
|
@mock.patch("time.altzone", new=0)
|
|
|
|
@mock.patch("time.timezone", new=0)
|
|
|
|
@mock.patch("time.localtime", new=lambda: mock.Mock(tm_isdst=0))
|
2017-09-11 08:23:07 +00:00
|
|
|
def test_utc(self):
|
|
|
|
self.assertEqual(util.get_tz_offset(), "+00:00")
|
|
|
|
|
|
|
|
|
2018-05-30 14:09:31 +00:00
|
|
|
class TestParseKojiEvent(PungiTestCase):
|
|
|
|
def test_number(self):
|
|
|
|
self.assertEqual(util.parse_koji_event("1234"), 1234)
|
|
|
|
|
|
|
|
def test_correct_path(self):
|
|
|
|
touch(
|
2020-01-22 10:02:22 +00:00
|
|
|
os.path.join(self.topdir, "work/global/koji-event"),
|
|
|
|
'{"id": 19769058, "ts": 1527641311.22855}',
|
2018-05-30 14:09:31 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(util.parse_koji_event(self.topdir), 19769058)
|
|
|
|
|
|
|
|
def test_bad_path(self):
|
|
|
|
with self.assertRaises(argparse.ArgumentTypeError):
|
|
|
|
util.parse_koji_event(self.topdir)
|
|
|
|
|
|
|
|
|
2018-11-15 08:09:12 +00:00
|
|
|
class TestCopyAll(PungiTestCase):
|
|
|
|
def setUp(self):
|
|
|
|
super(TestCopyAll, self).setUp()
|
|
|
|
self.src = os.path.join(self.topdir, "src")
|
|
|
|
self.dst = os.path.join(self.topdir, "dst")
|
|
|
|
util.makedirs(self.src)
|
|
|
|
|
|
|
|
def test_preserve_symlink(self):
|
|
|
|
touch(os.path.join(self.src, "target"))
|
|
|
|
os.symlink("target", os.path.join(self.src, "symlink"))
|
|
|
|
|
|
|
|
util.copy_all(self.src, self.dst)
|
|
|
|
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(self.dst, "target")))
|
|
|
|
self.assertTrue(os.path.islink(os.path.join(self.dst, "symlink")))
|
|
|
|
self.assertEqual(os.readlink(os.path.join(self.dst, "symlink")), "target")
|
|
|
|
|
|
|
|
def test_copy_broken_symlink(self):
|
|
|
|
os.symlink("broken", os.path.join(self.src, "symlink"))
|
|
|
|
|
|
|
|
util.copy_all(self.src, self.dst)
|
|
|
|
|
|
|
|
self.assertTrue(os.path.islink(os.path.join(self.dst, "symlink")))
|
|
|
|
self.assertEqual(os.readlink(os.path.join(self.dst, "symlink")), "broken")
|
2019-11-19 07:51:18 +00:00
|
|
|
|
|
|
|
|
2020-03-03 07:07:42 +00:00
|
|
|
class TestMoveAll(PungiTestCase):
|
|
|
|
def setUp(self):
|
|
|
|
super(TestMoveAll, self).setUp()
|
|
|
|
self.src = os.path.join(self.topdir, "src")
|
|
|
|
self.dst = os.path.join(self.topdir, "dst")
|
|
|
|
util.makedirs(self.src)
|
|
|
|
|
|
|
|
def test_move_all(self):
|
|
|
|
touch(os.path.join(self.src, "target"))
|
|
|
|
util.move_all(self.src, self.dst)
|
|
|
|
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(self.dst, "target")))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(self.src)))
|
|
|
|
self.assertFalse(os.path.isfile(os.path.join(self.src, "target")))
|
|
|
|
|
|
|
|
def test_move_all_rm_src_dir(self):
|
|
|
|
touch(os.path.join(self.src, "target"))
|
|
|
|
util.move_all(self.src, self.dst, rm_src_dir=True)
|
|
|
|
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(self.dst, "target")))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(self.src)))
|
|
|
|
self.assertFalse(os.path.isfile(os.path.join(self.src, "target")))
|
|
|
|
|
|
|
|
|
2019-11-19 07:51:18 +00:00
|
|
|
@mock.patch("six.moves.urllib.request.urlretrieve")
|
|
|
|
class TestAsLocalFile(PungiTestCase):
|
|
|
|
def test_local_file(self, urlretrieve):
|
|
|
|
with util.as_local_file("/tmp/foo") as fn:
|
|
|
|
self.assertEqual(fn, "/tmp/foo")
|
|
|
|
self.assertEqual(urlretrieve.call_args_list, [])
|
|
|
|
|
|
|
|
def test_http(self, urlretrieve):
|
|
|
|
url = "http://example.com/repodata/repomd.xml"
|
|
|
|
|
|
|
|
def my_mock(url_):
|
|
|
|
self.assertEqual(url, url_)
|
|
|
|
self.filename = os.path.join(self.topdir, "my-file")
|
|
|
|
touch(self.filename)
|
|
|
|
return self.filename, {}
|
|
|
|
|
|
|
|
urlretrieve.side_effect = my_mock
|
|
|
|
|
|
|
|
with util.as_local_file(url) as fn:
|
|
|
|
self.assertEqual(fn, self.filename)
|
|
|
|
self.assertTrue(os.path.exists(self.filename))
|
|
|
|
self.assertFalse(os.path.exists(self.filename))
|