import dnf-4.4.2-10.el8
This commit is contained in:
commit
3c4e616886
1
.dnf.metadata
Normal file
1
.dnf.metadata
Normal file
@ -0,0 +1 @@
|
||||
5941a49cfd466aeed4ec882a33647912c2a89245 SOURCES/dnf-4.4.2.tar.gz
|
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
SOURCES/dnf-4.4.2.tar.gz
|
45
SOURCES/0001-tests-SQL-write-a-readonly-folder.patch
Normal file
45
SOURCES/0001-tests-SQL-write-a-readonly-folder.patch
Normal file
@ -0,0 +1,45 @@
|
||||
From 66e08009b8254462cb2c454ff2320355633c20d6 Mon Sep 17 00:00:00 2001
|
||||
From: Nicola Sella <nsella@redhat.com>
|
||||
Date: Tue, 10 Nov 2020 12:11:17 +0100
|
||||
Subject: [PATCH 1/1] [tests] SQL write a readonly folder
|
||||
|
||||
fixes on rhel8.4 for test_dnf_base and test_dnf_db_group
|
||||
libdnf._error.Error: SQLite error on "/var/lib/dnf/history.sqlite":
|
||||
Executing an SQL statement failed: attempt to write a readonly
|
||||
database
|
||||
|
||||
=changelog=
|
||||
msg: fixes SQL write a readonly folder
|
||||
type: bugfix
|
||||
---
|
||||
tests/api/test_dnf_base.py | 1 +
|
||||
tests/api/test_dnf_db_group.py | 1 +
|
||||
2 files changed, 2 insertions(+)
|
||||
|
||||
diff --git a/tests/api/test_dnf_base.py b/tests/api/test_dnf_base.py
|
||||
index b1cf49fb..ca71b75c 100644
|
||||
--- a/tests/api/test_dnf_base.py
|
||||
+++ b/tests/api/test_dnf_base.py
|
||||
@@ -14,6 +14,7 @@ from .common import TOUR_4_4
|
||||
class DnfBaseApiTest(TestCase):
|
||||
def setUp(self):
|
||||
self.base = dnf.Base(dnf.conf.Conf())
|
||||
+ self.base.conf.persistdir = "/tmp/tests"
|
||||
|
||||
def tearDown(self):
|
||||
self.base.close()
|
||||
diff --git a/tests/api/test_dnf_db_group.py b/tests/api/test_dnf_db_group.py
|
||||
index 447fd121..e1828cb4 100644
|
||||
--- a/tests/api/test_dnf_db_group.py
|
||||
+++ b/tests/api/test_dnf_db_group.py
|
||||
@@ -12,6 +12,7 @@ from .common import TestCase
|
||||
class DnfRPMTransactionApiTest(TestCase):
|
||||
def setUp(self):
|
||||
self.base = dnf.Base(dnf.conf.Conf())
|
||||
+ self.base.conf.persistdir = "/tmp/tests"
|
||||
self.base.fill_sack(False, False)
|
||||
self.base.resolve()
|
||||
self.rpmTrans = self.base.transaction
|
||||
--
|
||||
2.26.2
|
||||
|
@ -0,0 +1,26 @@
|
||||
From c2e4901cec947e5be2e5ff5afa22691841d00bdc Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Hr=C3=A1zk=C3=BD?= <lhrazky@redhat.com>
|
||||
Date: Tue, 10 Nov 2020 13:46:57 +0100
|
||||
Subject: [PATCH] Revert "Fix --setopt=cachedir writing outside of installroot"
|
||||
|
||||
This reverts commit 70fffff61f7a006d406b46adc592d21a685c12a8.
|
||||
|
||||
The commit breaks resetting excludes with an empty --exclude= CLI switch
|
||||
if the excludes were set in the config file.
|
||||
---
|
||||
dnf/cli/cli.py | 2 ++
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
diff --git a/dnf/cli/cli.py b/dnf/cli/cli.py
|
||||
index b5f7bca07b..5878c2aa15 100644
|
||||
--- a/dnf/cli/cli.py
|
||||
+++ b/dnf/cli/cli.py
|
||||
@@ -974,6 +974,8 @@ def configure(self, args, option_parser=None):
|
||||
|
||||
self.base.configure_plugins()
|
||||
|
||||
+ self.base.conf._configure_from_options(opts)
|
||||
+
|
||||
self.command.configure()
|
||||
|
||||
if self.base.conf.destdir:
|
@ -0,0 +1,567 @@
|
||||
From 9ed390d08a9f2b66f4e352532fa526fc64e329d4 Mon Sep 17 00:00:00 2001
|
||||
From: Marek Blaha <mblaha@redhat.com>
|
||||
Date: Tue, 28 Jul 2020 09:50:10 +0200
|
||||
Subject: [PATCH 1/3] Remove unused loops attribute from
|
||||
DepSolveProgressCallBack
|
||||
|
||||
---
|
||||
dnf/cli/output.py | 5 -----
|
||||
1 file changed, 5 deletions(-)
|
||||
|
||||
diff --git a/dnf/cli/output.py b/dnf/cli/output.py
|
||||
index de188ffbd1..44d5f8b89f 100644
|
||||
--- a/dnf/cli/output.py
|
||||
+++ b/dnf/cli/output.py
|
||||
@@ -1987,10 +1987,6 @@ def historyInfoCmdPkgsAltered(self, old, pats=[]):
|
||||
class DepSolveProgressCallBack(dnf.callback.Depsolve):
|
||||
"""Provides text output callback functions for Dependency Solver callback."""
|
||||
|
||||
- def __init__(self):
|
||||
- """requires yum-cli log and errorlog functions as arguments"""
|
||||
- self.loops = 0
|
||||
-
|
||||
def pkg_added(self, pkg, mode):
|
||||
"""Print information about a package being added to the
|
||||
transaction set.
|
||||
@@ -2037,7 +2033,6 @@ def start(self):
|
||||
process.
|
||||
"""
|
||||
logger.debug(_('--> Starting dependency resolution'))
|
||||
- self.loops += 1
|
||||
|
||||
def end(self):
|
||||
"""Output a message stating that dependency resolution has finished."""
|
||||
|
||||
From 0ee646f4965c597f2832ed3df9d9f0e6546dcc54 Mon Sep 17 00:00:00 2001
|
||||
From: Marek Blaha <mblaha@redhat.com>
|
||||
Date: Wed, 21 Oct 2020 11:47:43 +0200
|
||||
Subject: [PATCH 2/3] Remove unused parameter of _make_lists()
|
||||
|
||||
---
|
||||
dnf/cli/output.py | 7 ++++---
|
||||
1 file changed, 4 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/dnf/cli/output.py b/dnf/cli/output.py
|
||||
index 44d5f8b89f..af8a968770 100644
|
||||
--- a/dnf/cli/output.py
|
||||
+++ b/dnf/cli/output.py
|
||||
@@ -52,7 +52,8 @@
|
||||
|
||||
logger = logging.getLogger('dnf')
|
||||
|
||||
-def _make_lists(transaction, goal):
|
||||
+
|
||||
+def _make_lists(transaction):
|
||||
b = dnf.util.Bunch({
|
||||
'downgraded': [],
|
||||
'erased': [],
|
||||
@@ -1101,7 +1102,7 @@ def list_transaction(self, transaction, total_width=None):
|
||||
# in order to display module changes when RPM transaction is empty
|
||||
transaction = []
|
||||
|
||||
- list_bunch = _make_lists(transaction, self.base._goal)
|
||||
+ list_bunch = _make_lists(transaction)
|
||||
pkglist_lines = []
|
||||
data = {'n' : {}, 'v' : {}, 'r' : {}}
|
||||
a_wid = 0 # Arch can't get "that big" ... so always use the max.
|
||||
@@ -1488,7 +1489,7 @@ def _tsi_or_pkg_nevra_cmp(item1, item2):
|
||||
return (item1.arch > item2.arch) - (item1.arch < item2.arch)
|
||||
|
||||
out = ''
|
||||
- list_bunch = _make_lists(transaction, self.base._goal)
|
||||
+ list_bunch = _make_lists(transaction)
|
||||
|
||||
skipped_conflicts, skipped_broken = self._skipped_packages(
|
||||
report_problems=False, transaction=transaction)
|
||||
|
||||
From 865b7183453684de6a25e77fddf5a2d11fbffba8 Mon Sep 17 00:00:00 2001
|
||||
From: Marek Blaha <mblaha@redhat.com>
|
||||
Date: Wed, 21 Oct 2020 17:59:46 +0200
|
||||
Subject: [PATCH 3/3] Post transaction summary is logged for API users
|
||||
(RhBug:1855158)
|
||||
|
||||
Post transaction summary is always logged into /var/log/dnf.log.
|
||||
When transaction is called from cli, the summary is also printed to
|
||||
stdout in columns (as previously).
|
||||
|
||||
= changelog =
|
||||
msg: Packages installed/removed via DNF API are logged into dnf.log
|
||||
type: enhancement
|
||||
resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1855158
|
||||
---
|
||||
dnf/base.py | 46 ++++++++++++-
|
||||
dnf/cli/cli.py | 8 ++-
|
||||
dnf/cli/output.py | 167 ++++++++--------------------------------------
|
||||
dnf/util.py | 102 +++++++++++++++++++++++++++-
|
||||
4 files changed, 177 insertions(+), 146 deletions(-)
|
||||
|
||||
diff --git a/dnf/base.py b/dnf/base.py
|
||||
index 075e74265a..c0d7712605 100644
|
||||
--- a/dnf/base.py
|
||||
+++ b/dnf/base.py
|
||||
@@ -28,12 +28,12 @@
|
||||
import dnf
|
||||
import libdnf.transaction
|
||||
|
||||
+from copy import deepcopy
|
||||
from dnf.comps import CompsQuery
|
||||
from dnf.i18n import _, P_, ucd
|
||||
from dnf.util import _parse_specs
|
||||
from dnf.db.history import SwdbInterface
|
||||
from dnf.yum import misc
|
||||
-from functools import reduce
|
||||
try:
|
||||
from collections.abc import Sequence
|
||||
except ImportError:
|
||||
@@ -549,7 +549,7 @@ def _ts(self):
|
||||
if self.conf.ignorearch:
|
||||
self._rpm_probfilter.add(rpm.RPMPROB_FILTER_IGNOREARCH)
|
||||
|
||||
- probfilter = reduce(operator.or_, self._rpm_probfilter, 0)
|
||||
+ probfilter = functools.reduce(operator.or_, self._rpm_probfilter, 0)
|
||||
self._priv_ts.setProbFilter(probfilter)
|
||||
return self._priv_ts
|
||||
|
||||
@@ -890,6 +890,15 @@ def do_transaction(self, display=()):
|
||||
self._plugins.unload_removed_plugins(self.transaction)
|
||||
self._plugins.run_transaction()
|
||||
|
||||
+ # log post transaction summary
|
||||
+ def _pto_callback(action, tsis):
|
||||
+ msgs = []
|
||||
+ for tsi in tsis:
|
||||
+ msgs.append('{}: {}'.format(action, str(tsi)))
|
||||
+ return msgs
|
||||
+ for msg in dnf.util._post_transaction_output(self, self.transaction, _pto_callback):
|
||||
+ logger.debug(msg)
|
||||
+
|
||||
return tid
|
||||
|
||||
def _trans_error_summary(self, errstring):
|
||||
@@ -1311,7 +1320,7 @@ def _do_package_lists(self, pkgnarrow='all', patterns=None, showdups=None,
|
||||
if patterns is None or len(patterns) == 0:
|
||||
return list_fn(None)
|
||||
yghs = map(list_fn, patterns)
|
||||
- return reduce(lambda a, b: a.merge_lists(b), yghs)
|
||||
+ return functools.reduce(lambda a, b: a.merge_lists(b), yghs)
|
||||
|
||||
def _list_pattern(self, pkgnarrow, pattern, showdups, ignore_case,
|
||||
reponame=None):
|
||||
@@ -2579,6 +2588,37 @@ def setup_loggers(self):
|
||||
"""
|
||||
self._logging._setup_from_dnf_conf(self.conf, file_loggers_only=True)
|
||||
|
||||
+ def _skipped_packages(self, report_problems, transaction):
|
||||
+ """returns set of conflicting packages and set of packages with broken dependency that would
|
||||
+ be additionally installed when --best and --allowerasing"""
|
||||
+ if self._goal.actions & (hawkey.INSTALL | hawkey.UPGRADE | hawkey.UPGRADE_ALL):
|
||||
+ best = True
|
||||
+ else:
|
||||
+ best = False
|
||||
+ ng = deepcopy(self._goal)
|
||||
+ params = {"allow_uninstall": self._allow_erasing,
|
||||
+ "force_best": best,
|
||||
+ "ignore_weak": True}
|
||||
+ ret = ng.run(**params)
|
||||
+ if not ret and report_problems:
|
||||
+ msg = dnf.util._format_resolve_problems(ng.problem_rules())
|
||||
+ logger.warning(msg)
|
||||
+ problem_conflicts = set(ng.problem_conflicts(available=True))
|
||||
+ problem_dependency = set(ng.problem_broken_dependency(available=True)) - problem_conflicts
|
||||
+
|
||||
+ def _nevra(item):
|
||||
+ return hawkey.NEVRA(name=item.name, epoch=item.epoch, version=item.version,
|
||||
+ release=item.release, arch=item.arch)
|
||||
+
|
||||
+ # Sometimes, pkg is not in transaction item, therefore, comparing by nevra
|
||||
+ transaction_nevras = [_nevra(tsi) for tsi in transaction]
|
||||
+ skipped_conflicts = set(
|
||||
+ [pkg for pkg in problem_conflicts if _nevra(pkg) not in transaction_nevras])
|
||||
+ skipped_dependency = set(
|
||||
+ [pkg for pkg in problem_dependency if _nevra(pkg) not in transaction_nevras])
|
||||
+
|
||||
+ return skipped_conflicts, skipped_dependency
|
||||
+
|
||||
|
||||
def _msg_installed(pkg):
|
||||
name = ucd(pkg)
|
||||
diff --git a/dnf/cli/cli.py b/dnf/cli/cli.py
|
||||
index 0bc2c119d0..334000362c 100644
|
||||
--- a/dnf/cli/cli.py
|
||||
+++ b/dnf/cli/cli.py
|
||||
@@ -252,8 +252,12 @@ def do_transaction(self, display=()):
|
||||
trans = None
|
||||
|
||||
if trans:
|
||||
- msg = self.output.post_transaction_output(trans)
|
||||
- logger.info(msg)
|
||||
+ # the post transaction summary is already written to log during
|
||||
+ # Base.do_transaction() so here only print the messages to the
|
||||
+ # user arranged in columns
|
||||
+ print()
|
||||
+ print('\n'.join(self.output.post_transaction_output(trans)))
|
||||
+ print()
|
||||
for tsi in trans:
|
||||
if tsi.state == libdnf.transaction.TransactionItemState_ERROR:
|
||||
raise dnf.exceptions.Error(_('Transaction failed'))
|
||||
diff --git a/dnf/cli/output.py b/dnf/cli/output.py
|
||||
index af8a968770..6d729b63ba 100644
|
||||
--- a/dnf/cli/output.py
|
||||
+++ b/dnf/cli/output.py
|
||||
@@ -21,9 +21,7 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
-from copy import deepcopy
|
||||
import fnmatch
|
||||
-import functools
|
||||
import hawkey
|
||||
import itertools
|
||||
import libdnf.transaction
|
||||
@@ -53,51 +51,6 @@
|
||||
logger = logging.getLogger('dnf')
|
||||
|
||||
|
||||
-def _make_lists(transaction):
|
||||
- b = dnf.util.Bunch({
|
||||
- 'downgraded': [],
|
||||
- 'erased': [],
|
||||
- 'erased_clean': [],
|
||||
- 'erased_dep': [],
|
||||
- 'installed': [],
|
||||
- 'installed_group': [],
|
||||
- 'installed_dep': [],
|
||||
- 'installed_weak': [],
|
||||
- 'reinstalled': [],
|
||||
- 'upgraded': [],
|
||||
- 'failed': [],
|
||||
- })
|
||||
-
|
||||
- for tsi in transaction:
|
||||
- if tsi.state == libdnf.transaction.TransactionItemState_ERROR:
|
||||
- b.failed.append(tsi)
|
||||
- elif tsi.action == libdnf.transaction.TransactionItemAction_DOWNGRADE:
|
||||
- b.downgraded.append(tsi)
|
||||
- elif tsi.action == libdnf.transaction.TransactionItemAction_INSTALL:
|
||||
- if tsi.reason == libdnf.transaction.TransactionItemReason_GROUP:
|
||||
- b.installed_group.append(tsi)
|
||||
- elif tsi.reason == libdnf.transaction.TransactionItemReason_DEPENDENCY:
|
||||
- b.installed_dep.append(tsi)
|
||||
- elif tsi.reason == libdnf.transaction.TransactionItemReason_WEAK_DEPENDENCY:
|
||||
- b.installed_weak.append(tsi)
|
||||
- else:
|
||||
- # TransactionItemReason_USER
|
||||
- b.installed.append(tsi)
|
||||
- elif tsi.action == libdnf.transaction.TransactionItemAction_REINSTALL:
|
||||
- b.reinstalled.append(tsi)
|
||||
- elif tsi.action == libdnf.transaction.TransactionItemAction_REMOVE:
|
||||
- if tsi.reason == libdnf.transaction.TransactionItemReason_CLEAN:
|
||||
- b.erased_clean.append(tsi)
|
||||
- elif tsi.reason == libdnf.transaction.TransactionItemReason_DEPENDENCY:
|
||||
- b.erased_dep.append(tsi)
|
||||
- else:
|
||||
- b.erased.append(tsi)
|
||||
- elif tsi.action == libdnf.transaction.TransactionItemAction_UPGRADE:
|
||||
- b.upgraded.append(tsi)
|
||||
-
|
||||
- return b
|
||||
-
|
||||
-
|
||||
def _spread_in_columns(cols_count, label, lst):
|
||||
left = itertools.chain((label,), itertools.repeat(''))
|
||||
lst_length = len(lst)
|
||||
@@ -1057,37 +1010,6 @@ def list_group_transaction(self, comps, history, diff):
|
||||
out[0:0] = self._banner(col_data, (_('Group'), _('Packages'), '', ''))
|
||||
return '\n'.join(out)
|
||||
|
||||
- def _skipped_packages(self, report_problems, transaction):
|
||||
- """returns set of conflicting packages and set of packages with broken dependency that would
|
||||
- be additionally installed when --best and --allowerasing"""
|
||||
- if self.base._goal.actions & (hawkey.INSTALL | hawkey.UPGRADE | hawkey.UPGRADE_ALL):
|
||||
- best = True
|
||||
- else:
|
||||
- best = False
|
||||
- ng = deepcopy(self.base._goal)
|
||||
- params = {"allow_uninstall": self.base._allow_erasing,
|
||||
- "force_best": best,
|
||||
- "ignore_weak": True}
|
||||
- ret = ng.run(**params)
|
||||
- if not ret and report_problems:
|
||||
- msg = dnf.util._format_resolve_problems(ng.problem_rules())
|
||||
- logger.warning(msg)
|
||||
- problem_conflicts = set(ng.problem_conflicts(available=True))
|
||||
- problem_dependency = set(ng.problem_broken_dependency(available=True)) - problem_conflicts
|
||||
-
|
||||
- def _nevra(item):
|
||||
- return hawkey.NEVRA(name=item.name, epoch=item.epoch, version=item.version,
|
||||
- release=item.release, arch=item.arch)
|
||||
-
|
||||
- # Sometimes, pkg is not in transaction item, therefore, comparing by nevra
|
||||
- transaction_nevras = [_nevra(tsi) for tsi in transaction]
|
||||
- skipped_conflicts = set(
|
||||
- [pkg for pkg in problem_conflicts if _nevra(pkg) not in transaction_nevras])
|
||||
- skipped_dependency = set(
|
||||
- [pkg for pkg in problem_dependency if _nevra(pkg) not in transaction_nevras])
|
||||
-
|
||||
- return skipped_conflicts, skipped_dependency
|
||||
-
|
||||
def list_transaction(self, transaction, total_width=None):
|
||||
"""Return a string representation of the transaction in an
|
||||
easy-to-read format.
|
||||
@@ -1102,7 +1024,7 @@ def list_transaction(self, transaction, total_width=None):
|
||||
# in order to display module changes when RPM transaction is empty
|
||||
transaction = []
|
||||
|
||||
- list_bunch = _make_lists(transaction)
|
||||
+ list_bunch = dnf.util._make_lists(transaction)
|
||||
pkglist_lines = []
|
||||
data = {'n' : {}, 'v' : {}, 'r' : {}}
|
||||
a_wid = 0 # Arch can't get "that big" ... so always use the max.
|
||||
@@ -1271,7 +1193,7 @@ def format_line(group):
|
||||
# show skipped conflicting packages
|
||||
if not self.conf.best and self.base._goal.actions & forward_actions:
|
||||
lines = []
|
||||
- skipped_conflicts, skipped_broken = self._skipped_packages(
|
||||
+ skipped_conflicts, skipped_broken = self.base._skipped_packages(
|
||||
report_problems=True, transaction=transaction)
|
||||
skipped_broken = dict((str(pkg), pkg) for pkg in skipped_broken)
|
||||
for pkg in sorted(skipped_conflicts):
|
||||
@@ -1436,13 +1358,8 @@ def format_line(group):
|
||||
max_msg_count, count, msg_pkgs))
|
||||
return ''.join(out)
|
||||
|
||||
- def post_transaction_output(self, transaction):
|
||||
- """Returns a human-readable summary of the results of the
|
||||
- transaction.
|
||||
|
||||
- :return: a string containing a human-readable summary of the
|
||||
- results of the transaction
|
||||
- """
|
||||
+ def _pto_callback(self, action, tsis):
|
||||
# Works a bit like calcColumns, but we never overflow a column we just
|
||||
# have a dynamic number of columns.
|
||||
def _fits_in_cols(msgs, num):
|
||||
@@ -1472,61 +1389,33 @@ def _fits_in_cols(msgs, num):
|
||||
col_lens[col] *= -1
|
||||
return col_lens
|
||||
|
||||
- def _tsi_or_pkg_nevra_cmp(item1, item2):
|
||||
- """Compares two transaction items or packages by nevra.
|
||||
- Used as a fallback when tsi does not contain package object.
|
||||
- """
|
||||
- ret = (item1.name > item2.name) - (item1.name < item2.name)
|
||||
- if ret != 0:
|
||||
- return ret
|
||||
- nevra1 = hawkey.NEVRA(name=item1.name, epoch=item1.epoch, version=item1.version,
|
||||
- release=item1.release, arch=item1.arch)
|
||||
- nevra2 = hawkey.NEVRA(name=item2.name, epoch=item2.epoch, version=item2.version,
|
||||
- release=item2.release, arch=item2.arch)
|
||||
- ret = nevra1.evr_cmp(nevra2, self.sack)
|
||||
- if ret != 0:
|
||||
- return ret
|
||||
- return (item1.arch > item2.arch) - (item1.arch < item2.arch)
|
||||
-
|
||||
- out = ''
|
||||
- list_bunch = _make_lists(transaction)
|
||||
-
|
||||
- skipped_conflicts, skipped_broken = self._skipped_packages(
|
||||
- report_problems=False, transaction=transaction)
|
||||
- skipped = skipped_conflicts.union(skipped_broken)
|
||||
-
|
||||
- for (action, tsis) in [(_('Upgraded'), list_bunch.upgraded),
|
||||
- (_('Downgraded'), list_bunch.downgraded),
|
||||
- (_('Installed'), list_bunch.installed +
|
||||
- list_bunch.installed_group +
|
||||
- list_bunch.installed_weak +
|
||||
- list_bunch.installed_dep),
|
||||
- (_('Reinstalled'), list_bunch.reinstalled),
|
||||
- (_('Skipped'), skipped),
|
||||
- (_('Removed'), list_bunch.erased +
|
||||
- list_bunch.erased_dep +
|
||||
- list_bunch.erased_clean),
|
||||
- (_('Failed'), list_bunch.failed)]:
|
||||
- if not tsis:
|
||||
- continue
|
||||
- msgs = []
|
||||
- out += '\n%s:\n' % action
|
||||
- for tsi in sorted(tsis, key=functools.cmp_to_key(_tsi_or_pkg_nevra_cmp)):
|
||||
- msgs.append(str(tsi))
|
||||
- for num in (8, 7, 6, 5, 4, 3, 2):
|
||||
- cols = _fits_in_cols(msgs, num)
|
||||
- if cols:
|
||||
- break
|
||||
- if not cols:
|
||||
- cols = [-(self.term.columns - 2)]
|
||||
- while msgs:
|
||||
- current_msgs = msgs[:len(cols)]
|
||||
- out += ' '
|
||||
- out += self.fmtColumns(zip(current_msgs, cols), end=u'\n')
|
||||
- msgs = msgs[len(cols):]
|
||||
-
|
||||
+ if not tsis:
|
||||
+ return ''
|
||||
+ out = []
|
||||
+ msgs = []
|
||||
+ out.append('{}:'.format(action))
|
||||
+ for tsi in tsis:
|
||||
+ msgs.append(str(tsi))
|
||||
+ for num in (8, 7, 6, 5, 4, 3, 2):
|
||||
+ cols = _fits_in_cols(msgs, num)
|
||||
+ if cols:
|
||||
+ break
|
||||
+ if not cols:
|
||||
+ cols = [-(self.term.columns - 2)]
|
||||
+ while msgs:
|
||||
+ current_msgs = msgs[:len(cols)]
|
||||
+ out.append(' {}'.format(self.fmtColumns(zip(current_msgs, cols))))
|
||||
+ msgs = msgs[len(cols):]
|
||||
return out
|
||||
|
||||
+
|
||||
+ def post_transaction_output(self, transaction):
|
||||
+ """
|
||||
+ Return a human-readable summary of the transaction. Packages in sections
|
||||
+ are arranged to columns.
|
||||
+ """
|
||||
+ return dnf.util._post_transaction_output(self.base, transaction, self._pto_callback)
|
||||
+
|
||||
def setup_progress_callbacks(self):
|
||||
"""Set up the progress callbacks and various
|
||||
output bars based on debug level.
|
||||
diff --git a/dnf/util.py b/dnf/util.py
|
||||
index 8cf362706d..0beb04424d 100644
|
||||
--- a/dnf/util.py
|
||||
+++ b/dnf/util.py
|
||||
@@ -24,13 +24,14 @@
|
||||
|
||||
from .pycomp import PY3, basestring
|
||||
from dnf.i18n import _, ucd
|
||||
-from functools import reduce
|
||||
import argparse
|
||||
import dnf
|
||||
import dnf.callback
|
||||
import dnf.const
|
||||
import dnf.pycomp
|
||||
import errno
|
||||
+import functools
|
||||
+import hawkey
|
||||
import itertools
|
||||
import locale
|
||||
import logging
|
||||
@@ -41,6 +42,7 @@
|
||||
import tempfile
|
||||
import time
|
||||
import libdnf.repo
|
||||
+import libdnf.transaction
|
||||
|
||||
logger = logging.getLogger('dnf')
|
||||
|
||||
@@ -195,7 +197,7 @@ def group_by_filter(fn, iterable):
|
||||
def splitter(acc, item):
|
||||
acc[not bool(fn(item))].append(item)
|
||||
return acc
|
||||
- return reduce(splitter, iterable, ([], []))
|
||||
+ return functools.reduce(splitter, iterable, ([], []))
|
||||
|
||||
def insert_if(item, iterable, condition):
|
||||
"""Insert an item into an iterable by a condition."""
|
||||
@@ -504,3 +506,99 @@ def __setattr__(self, what, val):
|
||||
def setter(item):
|
||||
setattr(item, what, val)
|
||||
return list(map(setter, self))
|
||||
+
|
||||
+
|
||||
+def _make_lists(transaction):
|
||||
+ b = Bunch({
|
||||
+ 'downgraded': [],
|
||||
+ 'erased': [],
|
||||
+ 'erased_clean': [],
|
||||
+ 'erased_dep': [],
|
||||
+ 'installed': [],
|
||||
+ 'installed_group': [],
|
||||
+ 'installed_dep': [],
|
||||
+ 'installed_weak': [],
|
||||
+ 'reinstalled': [],
|
||||
+ 'upgraded': [],
|
||||
+ 'failed': [],
|
||||
+ })
|
||||
+
|
||||
+ for tsi in transaction:
|
||||
+ if tsi.state == libdnf.transaction.TransactionItemState_ERROR:
|
||||
+ b.failed.append(tsi)
|
||||
+ elif tsi.action == libdnf.transaction.TransactionItemAction_DOWNGRADE:
|
||||
+ b.downgraded.append(tsi)
|
||||
+ elif tsi.action == libdnf.transaction.TransactionItemAction_INSTALL:
|
||||
+ if tsi.reason == libdnf.transaction.TransactionItemReason_GROUP:
|
||||
+ b.installed_group.append(tsi)
|
||||
+ elif tsi.reason == libdnf.transaction.TransactionItemReason_DEPENDENCY:
|
||||
+ b.installed_dep.append(tsi)
|
||||
+ elif tsi.reason == libdnf.transaction.TransactionItemReason_WEAK_DEPENDENCY:
|
||||
+ b.installed_weak.append(tsi)
|
||||
+ else:
|
||||
+ # TransactionItemReason_USER
|
||||
+ b.installed.append(tsi)
|
||||
+ elif tsi.action == libdnf.transaction.TransactionItemAction_REINSTALL:
|
||||
+ b.reinstalled.append(tsi)
|
||||
+ elif tsi.action == libdnf.transaction.TransactionItemAction_REMOVE:
|
||||
+ if tsi.reason == libdnf.transaction.TransactionItemReason_CLEAN:
|
||||
+ b.erased_clean.append(tsi)
|
||||
+ elif tsi.reason == libdnf.transaction.TransactionItemReason_DEPENDENCY:
|
||||
+ b.erased_dep.append(tsi)
|
||||
+ else:
|
||||
+ b.erased.append(tsi)
|
||||
+ elif tsi.action == libdnf.transaction.TransactionItemAction_UPGRADE:
|
||||
+ b.upgraded.append(tsi)
|
||||
+
|
||||
+ return b
|
||||
+
|
||||
+
|
||||
+def _post_transaction_output(base, transaction, action_callback):
|
||||
+ """Returns a human-readable summary of the results of the
|
||||
+ transaction.
|
||||
+
|
||||
+ :param action_callback: function generating output for specific action. It
|
||||
+ takes two parameters - action as a string and list of affected packages for
|
||||
+ this action
|
||||
+ :return: a list of lines containing a human-readable summary of the
|
||||
+ results of the transaction
|
||||
+ """
|
||||
+ def _tsi_or_pkg_nevra_cmp(item1, item2):
|
||||
+ """Compares two transaction items or packages by nevra.
|
||||
+ Used as a fallback when tsi does not contain package object.
|
||||
+ """
|
||||
+ ret = (item1.name > item2.name) - (item1.name < item2.name)
|
||||
+ if ret != 0:
|
||||
+ return ret
|
||||
+ nevra1 = hawkey.NEVRA(name=item1.name, epoch=item1.epoch, version=item1.version,
|
||||
+ release=item1.release, arch=item1.arch)
|
||||
+ nevra2 = hawkey.NEVRA(name=item2.name, epoch=item2.epoch, version=item2.version,
|
||||
+ release=item2.release, arch=item2.arch)
|
||||
+ ret = nevra1.evr_cmp(nevra2, base.sack)
|
||||
+ if ret != 0:
|
||||
+ return ret
|
||||
+ return (item1.arch > item2.arch) - (item1.arch < item2.arch)
|
||||
+
|
||||
+ list_bunch = dnf.util._make_lists(transaction)
|
||||
+
|
||||
+ skipped_conflicts, skipped_broken = base._skipped_packages(
|
||||
+ report_problems=False, transaction=transaction)
|
||||
+ skipped = skipped_conflicts.union(skipped_broken)
|
||||
+
|
||||
+ out = []
|
||||
+ for (action, tsis) in [(_('Upgraded'), list_bunch.upgraded),
|
||||
+ (_('Downgraded'), list_bunch.downgraded),
|
||||
+ (_('Installed'), list_bunch.installed +
|
||||
+ list_bunch.installed_group +
|
||||
+ list_bunch.installed_weak +
|
||||
+ list_bunch.installed_dep),
|
||||
+ (_('Reinstalled'), list_bunch.reinstalled),
|
||||
+ (_('Skipped'), skipped),
|
||||
+ (_('Removed'), list_bunch.erased +
|
||||
+ list_bunch.erased_dep +
|
||||
+ list_bunch.erased_clean),
|
||||
+ (_('Failed'), list_bunch.failed)]:
|
||||
+ out.extend(action_callback(
|
||||
+ action, sorted(tsis, key=functools.cmp_to_key(_tsi_or_pkg_nevra_cmp))))
|
||||
+
|
||||
+ return out
|
@ -0,0 +1,130 @@
|
||||
From df64fd36d7fefe39a96fea3f41e35785bebd37ec Mon Sep 17 00:00:00 2001
|
||||
From: Marek Blaha <mblaha@redhat.com>
|
||||
Date: Wed, 2 Dec 2020 16:33:26 +0100
|
||||
Subject: [PATCH 1/2] Log scriptlets output also for API users (RhBug:1847340)
|
||||
|
||||
Messages logged into /var/log/dnf.rpm.log are now the same for both
|
||||
command line and API usage.
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1847340
|
||||
---
|
||||
dnf/cli/output.py | 7 +------
|
||||
dnf/yum/rpmtrans.py | 9 ++++++++-
|
||||
2 files changed, 9 insertions(+), 7 deletions(-)
|
||||
|
||||
diff --git a/dnf/cli/output.py b/dnf/cli/output.py
|
||||
index 51d6829ca6..86260661fc 100644
|
||||
--- a/dnf/cli/output.py
|
||||
+++ b/dnf/cli/output.py
|
||||
@@ -2151,12 +2151,7 @@ def error(self, message):
|
||||
pass
|
||||
|
||||
def scriptout(self, msgs):
|
||||
- """Print messages originating from a package script.
|
||||
-
|
||||
- :param msgs: the messages coming from the script
|
||||
- """
|
||||
- if msgs:
|
||||
- self.rpm_logger.info(ucd(msgs))
|
||||
+ pass
|
||||
|
||||
def _makefmt(self, percent, ts_done, ts_total, progress=True,
|
||||
pkgname=None, wid1=15):
|
||||
diff --git a/dnf/yum/rpmtrans.py b/dnf/yum/rpmtrans.py
|
||||
index 447639a476..d6c549d2ed 100644
|
||||
--- a/dnf/yum/rpmtrans.py
|
||||
+++ b/dnf/yum/rpmtrans.py
|
||||
@@ -113,7 +113,10 @@ def progress(self, package, action, ti_done, ti_total, ts_done, ts_total):
|
||||
pass
|
||||
|
||||
def scriptout(self, msgs):
|
||||
- """msgs is the messages that were output (if any)."""
|
||||
+ """Hook for reporting an rpm scriptlet output.
|
||||
+
|
||||
+ :param msgs: the scriptlet output
|
||||
+ """
|
||||
pass
|
||||
|
||||
def error(self, message):
|
||||
@@ -156,6 +159,10 @@ def filelog(self, package, action):
|
||||
msg = '%s: %s' % (action_str, package)
|
||||
self.rpm_logger.log(dnf.logging.SUBDEBUG, msg)
|
||||
|
||||
+ def scriptout(self, msgs):
|
||||
+ if msgs:
|
||||
+ self.rpm_logger.info(ucd(msgs))
|
||||
+
|
||||
|
||||
class RPMTransaction(object):
|
||||
def __init__(self, base, test=False, displays=()):
|
||||
|
||||
From ee6ffcf640180b2b08d2db50b4b81d2bdefb1f2f Mon Sep 17 00:00:00 2001
|
||||
From: Marek Blaha <mblaha@redhat.com>
|
||||
Date: Thu, 3 Dec 2020 10:08:09 +0100
|
||||
Subject: [PATCH 2/2] Straighten inheritance of *Display classes
|
||||
|
||||
---
|
||||
dnf/cli/output.py | 15 +++------------
|
||||
dnf/yum/rpmtrans.py | 2 +-
|
||||
2 files changed, 4 insertions(+), 13 deletions(-)
|
||||
|
||||
diff --git a/dnf/cli/output.py b/dnf/cli/output.py
|
||||
index 86260661fc..de188ffbd1 100644
|
||||
--- a/dnf/cli/output.py
|
||||
+++ b/dnf/cli/output.py
|
||||
@@ -37,7 +37,7 @@
|
||||
from dnf.cli.format import format_number, format_time
|
||||
from dnf.i18n import _, C_, P_, ucd, fill_exact_width, textwrap_fill, exact_width, select_short_long
|
||||
from dnf.pycomp import xrange, basestring, long, unicode, sys_maxsize
|
||||
-from dnf.yum.rpmtrans import LoggingTransactionDisplay
|
||||
+from dnf.yum.rpmtrans import TransactionDisplay
|
||||
from dnf.db.history import MergedTransactionWrapper
|
||||
import dnf.base
|
||||
import dnf.callback
|
||||
@@ -2071,7 +2071,7 @@ def short_id(id):
|
||||
return self.output.userconfirm()
|
||||
|
||||
|
||||
-class CliTransactionDisplay(LoggingTransactionDisplay):
|
||||
+class CliTransactionDisplay(TransactionDisplay):
|
||||
"""A YUM specific callback class for RPM operations."""
|
||||
|
||||
width = property(lambda self: dnf.cli.term._term_width())
|
||||
@@ -2093,7 +2093,7 @@ def progress(self, package, action, ti_done, ti_total, ts_done, ts_total):
|
||||
:param package: the package involved in the event
|
||||
:param action: the type of action that is taking place. Valid
|
||||
values are given by
|
||||
- :func:`rpmtrans.LoggingTransactionDisplay.action.keys()`
|
||||
+ :func:`rpmtrans.TransactionDisplay.action.keys()`
|
||||
:param ti_done: a number representing the amount of work
|
||||
already done in the current transaction
|
||||
:param ti_total: a number representing the total amount of work
|
||||
@@ -2144,15 +2144,6 @@ def _out_progress(self, ti_done, ti_total, ts_done, ts_total,
|
||||
if ti_done == ti_total:
|
||||
print(" ")
|
||||
|
||||
- def filelog(self, package, action):
|
||||
- pass
|
||||
-
|
||||
- def error(self, message):
|
||||
- pass
|
||||
-
|
||||
- def scriptout(self, msgs):
|
||||
- pass
|
||||
-
|
||||
def _makefmt(self, percent, ts_done, ts_total, progress=True,
|
||||
pkgname=None, wid1=15):
|
||||
l = len(str(ts_total))
|
||||
diff --git a/dnf/yum/rpmtrans.py b/dnf/yum/rpmtrans.py
|
||||
index d6c549d2ed..51fa921d3e 100644
|
||||
--- a/dnf/yum/rpmtrans.py
|
||||
+++ b/dnf/yum/rpmtrans.py
|
||||
@@ -143,7 +143,7 @@ def error(self, message):
|
||||
dnf.util._terminal_messenger('print', message, sys.stderr)
|
||||
|
||||
|
||||
-class LoggingTransactionDisplay(ErrorTransactionDisplay):
|
||||
+class LoggingTransactionDisplay(TransactionDisplay):
|
||||
'''
|
||||
Base class for a RPMTransaction display callback class
|
||||
'''
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,150 @@
|
||||
From 8f3ce4868ac009976da7323ea39ebcd9a062e32d Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Mon, 23 Nov 2020 17:00:01 +0100
|
||||
Subject: [PATCH 1/3] Remove source packages from install/upgrade set
|
||||
(RhBug:1898548)
|
||||
|
||||
It prevents Error: Will not install a source rpm package ()
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1898548
|
||||
---
|
||||
dnf/module/module_base.py | 16 ++++++++++------
|
||||
1 file changed, 10 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/dnf/module/module_base.py b/dnf/module/module_base.py
|
||||
index 04701b9d..49c871c4 100644
|
||||
--- a/dnf/module/module_base.py
|
||||
+++ b/dnf/module/module_base.py
|
||||
@@ -140,20 +140,21 @@ class ModuleBase(object):
|
||||
if fail_safe_repo_used:
|
||||
raise dnf.exceptions.Error(_(
|
||||
"Installing module from Fail-Safe repository is not allowed"))
|
||||
- install_base_query = self.base.sack.query().filterm(
|
||||
- nevra_strict=install_set_artefacts).apply()
|
||||
+ # Remove source packages they cannot be installed or upgraded
|
||||
+ base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply()
|
||||
+ install_base_query = base_no_source_query.filter(nevra_strict=install_set_artefacts)
|
||||
|
||||
# add hot-fix packages
|
||||
hot_fix_repos = [i.id for i in self.base.repos.iter_enabled() if i.module_hotfixes]
|
||||
- hotfix_packages = self.base.sack.query().filterm(reponame=hot_fix_repos).filterm(
|
||||
- name=install_dict.keys())
|
||||
+ hotfix_packages = base_no_source_query.filter(
|
||||
+ reponame=hot_fix_repos, name=install_dict.keys())
|
||||
install_base_query = install_base_query.union(hotfix_packages)
|
||||
|
||||
for pkg_name, set_specs in install_dict.items():
|
||||
query = install_base_query.filter(name=pkg_name)
|
||||
if not query:
|
||||
# package can also be non-modular or part of another stream
|
||||
- query = self.base.sack.query().filterm(name=pkg_name)
|
||||
+ query = base_no_source_query.filter(name=pkg_name)
|
||||
if not query:
|
||||
for spec in set_specs:
|
||||
logger.error(_("Unable to resolve argument {}").format(spec))
|
||||
@@ -182,6 +183,9 @@ class ModuleBase(object):
|
||||
fail_safe_repo = hawkey.MODULE_FAIL_SAFE_REPO_NAME
|
||||
fail_safe_repo_used = False
|
||||
|
||||
+ # Remove source packages they cannot be installed or upgraded
|
||||
+ base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply()
|
||||
+
|
||||
for spec in module_specs:
|
||||
module_list, nsvcap = self._get_modules(spec)
|
||||
if not module_list:
|
||||
@@ -221,7 +225,7 @@ class ModuleBase(object):
|
||||
|
||||
if not upgrade_package_set:
|
||||
logger.error(_("Unable to match profile in argument {}").format(spec))
|
||||
- query = self.base.sack.query().filterm(name=upgrade_package_set)
|
||||
+ query = base_no_source_query.filter(name=upgrade_package_set)
|
||||
if query:
|
||||
sltr = dnf.selector.Selector(self.base.sack)
|
||||
sltr.set(pkg=query)
|
||||
--
|
||||
2.26.2
|
||||
|
||||
|
||||
From c42680b292b2cca38b24fb18f46f06f800c1934f Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Mon, 23 Nov 2020 17:04:05 +0100
|
||||
Subject: [PATCH 2/3] Remove all source packages from query
|
||||
|
||||
---
|
||||
dnf/base.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/dnf/base.py b/dnf/base.py
|
||||
index a2955051..a3d9b63f 100644
|
||||
--- a/dnf/base.py
|
||||
+++ b/dnf/base.py
|
||||
@@ -1550,7 +1550,7 @@ class Base(object):
|
||||
if (comps_pkg.basearchonly):
|
||||
query_args.update({'arch': basearch})
|
||||
q = self.sack.query().filterm(**query_args).apply()
|
||||
- q.filterm(arch__neq="src")
|
||||
+ q.filterm(arch__neq=["src", "nosrc"])
|
||||
if not q:
|
||||
package_string = comps_pkg.name
|
||||
if comps_pkg.basearchonly:
|
||||
--
|
||||
2.26.2
|
||||
|
||||
|
||||
From 1f68fa6dc59fb350e71a24e787135475f3fb5b4c Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Mon, 23 Nov 2020 17:29:45 +0100
|
||||
Subject: [PATCH 3/3] Run tests with sack in tmp directory
|
||||
|
||||
---
|
||||
tests/api/test_dnf_module_base.py | 17 +++++++++++------
|
||||
1 file changed, 11 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/tests/api/test_dnf_module_base.py b/tests/api/test_dnf_module_base.py
|
||||
index aa47555b..18dd080d 100644
|
||||
--- a/tests/api/test_dnf_module_base.py
|
||||
+++ b/tests/api/test_dnf_module_base.py
|
||||
@@ -7,16 +7,26 @@ from __future__ import unicode_literals
|
||||
import dnf
|
||||
import dnf.module.module_base
|
||||
|
||||
+import os
|
||||
+import shutil
|
||||
+import tempfile
|
||||
+
|
||||
from .common import TestCase
|
||||
|
||||
|
||||
class DnfModuleBaseApiTest(TestCase):
|
||||
def setUp(self):
|
||||
self.base = dnf.Base(dnf.conf.Conf())
|
||||
+ self._installroot = tempfile.mkdtemp(prefix="dnf_test_installroot_")
|
||||
+ self.base.conf.installroot = self._installroot
|
||||
+ self.base.conf.cachedir = os.path.join(self._installroot, "var/cache/dnf")
|
||||
+ self.base._sack = dnf.sack._build_sack(self.base)
|
||||
self.moduleBase = dnf.module.module_base.ModuleBase(self.base)
|
||||
|
||||
def tearDown(self):
|
||||
self.base.close()
|
||||
+ if self._installroot.startswith("/tmp/"):
|
||||
+ shutil.rmtree(self._installroot)
|
||||
|
||||
def test_init(self):
|
||||
moduleBase = dnf.module.module_base.ModuleBase(self.base)
|
||||
@@ -51,12 +61,7 @@ class DnfModuleBaseApiTest(TestCase):
|
||||
def test_install(self):
|
||||
# ModuleBase.install()
|
||||
self.assertHasAttr(self.moduleBase, "install")
|
||||
- self.assertRaises(
|
||||
- AttributeError,
|
||||
- self.moduleBase.install,
|
||||
- module_specs=[],
|
||||
- strict=False,
|
||||
- )
|
||||
+ self.moduleBase.install(module_specs=[], strict=False)
|
||||
|
||||
def test_remove(self):
|
||||
# ModuleBase.remove()
|
||||
--
|
||||
2.26.2
|
||||
|
@ -0,0 +1,30 @@
|
||||
From f3c254581bcb0591a543aee0c7e031c3c9d0a9a1 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Hr=C3=A1zk=C3=BD?= <lhrazky@redhat.com>
|
||||
Date: Mon, 11 Jan 2021 16:43:25 +0100
|
||||
Subject: [PATCH] Fix documentation of globs not supporting curly brackets
|
||||
|
||||
= changelog =
|
||||
msg: Fix documentation of globs not supporting curly brackets
|
||||
type: bugfix
|
||||
resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1913418
|
||||
---
|
||||
doc/command_ref.rst | 6 ++++--
|
||||
1 file changed, 4 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/doc/command_ref.rst b/doc/command_ref.rst
|
||||
index bbce3ddcf4..d11e8dd502 100644
|
||||
--- a/doc/command_ref.rst
|
||||
+++ b/doc/command_ref.rst
|
||||
@@ -1763,8 +1763,10 @@ The following patterns are supported:
|
||||
those two characters, inclusive, is matched. If the first character
|
||||
following the ``[`` is a ``!`` or a ``^`` then any character not enclosed
|
||||
is matched.
|
||||
-``{}``
|
||||
- Matches any of the comma separated list of enclosed strings.
|
||||
+
|
||||
+Note: Curly brackets (``{}``) are not supported. You can still use them in
|
||||
+shells that support them and let the shell do the expansion, but if quoted or
|
||||
+escaped, dnf will not expand them.
|
||||
|
||||
--------------
|
||||
NEVRA Matching
|
507
SOURCES/0008-Module-switch-command.patch
Normal file
507
SOURCES/0008-Module-switch-command.patch
Normal file
@ -0,0 +1,507 @@
|
||||
From 6ed0458744090ab307da9d9118690372b2e66ca8 Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Wed, 11 Nov 2020 12:47:21 +0100
|
||||
Subject: [PATCH 1/5] Make module_base better industrialized for method reuse
|
||||
|
||||
It will allow to use internal for module switch command.
|
||||
---
|
||||
dnf/module/module_base.py | 29 ++++++++++++++++++++---------
|
||||
1 file changed, 20 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/dnf/module/module_base.py b/dnf/module/module_base.py
|
||||
index 49c871c4..0da4fab1 100644
|
||||
--- a/dnf/module/module_base.py
|
||||
+++ b/dnf/module/module_base.py
|
||||
@@ -323,7 +323,7 @@ class ModuleBase(object):
|
||||
assert len(streamDict) == 1
|
||||
return moduleDict
|
||||
|
||||
- def _resolve_specs_enable_update_sack(self, module_specs):
|
||||
+ def _resolve_specs_enable(self, module_specs):
|
||||
no_match_specs = []
|
||||
error_spec = []
|
||||
module_dicts = {}
|
||||
@@ -339,6 +339,9 @@ class ModuleBase(object):
|
||||
error_spec.append(spec)
|
||||
logger.error(ucd(e))
|
||||
logger.error(_("Unable to resolve argument {}").format(spec))
|
||||
+ return no_match_specs, error_spec, module_dicts
|
||||
+
|
||||
+ def _update_sack(self):
|
||||
hot_fix_repos = [i.id for i in self.base.repos.iter_enabled() if i.module_hotfixes]
|
||||
try:
|
||||
solver_errors = self.base.sack.filter_modules(
|
||||
@@ -347,6 +350,10 @@ class ModuleBase(object):
|
||||
debugsolver=self.base.conf.debug_solver)
|
||||
except hawkey.Exception as e:
|
||||
raise dnf.exceptions.Error(ucd(e))
|
||||
+ return solver_errors
|
||||
+
|
||||
+ def _enable_dependencies(self, module_dicts):
|
||||
+ error_spec = []
|
||||
for spec, (nsvcap, moduleDict) in module_dicts.items():
|
||||
for streamDict in moduleDict.values():
|
||||
for modules in streamDict.values():
|
||||
@@ -357,6 +364,17 @@ class ModuleBase(object):
|
||||
error_spec.append(spec)
|
||||
logger.error(ucd(e))
|
||||
logger.error(_("Unable to resolve argument {}").format(spec))
|
||||
+ return error_spec
|
||||
+
|
||||
+ def _resolve_specs_enable_update_sack(self, module_specs):
|
||||
+ no_match_specs, error_spec, module_dicts = self._resolve_specs_enable(module_specs)
|
||||
+
|
||||
+ solver_errors = self._update_sack()
|
||||
+
|
||||
+ dependency_error_spec = self._enable_dependencies(module_dicts)
|
||||
+ if dependency_error_spec:
|
||||
+ error_spec.extend(dependency_error_spec)
|
||||
+
|
||||
return no_match_specs, error_spec, solver_errors, module_dicts
|
||||
|
||||
def _modules_reset_or_disable(self, module_specs, to_state):
|
||||
@@ -379,14 +397,7 @@ class ModuleBase(object):
|
||||
if to_state == STATE_DISABLED:
|
||||
self.base._moduleContainer.disable(name)
|
||||
|
||||
- hot_fix_repos = [i.id for i in self.base.repos.iter_enabled() if i.module_hotfixes]
|
||||
- try:
|
||||
- solver_errors = self.base.sack.filter_modules(
|
||||
- self.base._moduleContainer, hot_fix_repos, self.base.conf.installroot,
|
||||
- self.base.conf.module_platform_id, update_only=True,
|
||||
- debugsolver=self.base.conf.debug_solver)
|
||||
- except hawkey.Exception as e:
|
||||
- raise dnf.exceptions.Error(ucd(e))
|
||||
+ solver_errors = self._update_sack()
|
||||
return no_match_specs, solver_errors
|
||||
|
||||
def _get_package_name_set_and_remove_profiles(self, module_list, nsvcap, remove=False):
|
||||
--
|
||||
2.26.2
|
||||
|
||||
|
||||
From e6473f4e6f17bb635e023b8905f29b318b8795bf Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Wed, 11 Nov 2020 17:09:16 +0100
|
||||
Subject: [PATCH 2/5] Add module switch-to support (RhBug:1792020)
|
||||
|
||||
It is a combination of module rpm distrosync, module profile switch and
|
||||
module stream switch.
|
||||
|
||||
= changelog =
|
||||
msg: Add new `module switch-to` command for switching content
|
||||
of module streams
|
||||
type: enhancement
|
||||
resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1792020
|
||||
---
|
||||
VERSION.cmake | 2 +-
|
||||
dnf.spec | 2 +-
|
||||
dnf/cli/commands/module.py | 24 ++++-
|
||||
dnf/module/module_base.py | 182 ++++++++++++++++++++++++++++++++-----
|
||||
4 files changed, 185 insertions(+), 25 deletions(-)
|
||||
|
||||
diff --git a/dnf/cli/commands/module.py b/dnf/cli/commands/module.py
|
||||
index 5a6c0069..4cdc915e 100644
|
||||
--- a/dnf/cli/commands/module.py
|
||||
+++ b/dnf/cli/commands/module.py
|
||||
@@ -271,6 +271,28 @@ class ModuleCommand(commands.Command):
|
||||
|
||||
logger.error(dnf.exceptions.MarkingErrors(no_match_group_specs=skipped_groups))
|
||||
|
||||
+ class SwitchToSubCommand(SubCommand):
|
||||
+
|
||||
+ aliases = ('switch-to',)
|
||||
+ summary = _('switch a module to a stream and distrosync rpm packages')
|
||||
+
|
||||
+ def configure(self):
|
||||
+ demands = self.cli.demands
|
||||
+ demands.available_repos = True
|
||||
+ demands.sack_activation = True
|
||||
+ demands.resolving = True
|
||||
+ demands.root_user = True
|
||||
+ self.base.conf.module_stream_switch = True
|
||||
+
|
||||
+ def run_on_module(self):
|
||||
+ try:
|
||||
+ self.module_base.switch_to(self.opts.module_spec, strict=self.base.conf.strict)
|
||||
+ except dnf.exceptions.MarkingErrors as e:
|
||||
+ if self.base.conf.strict:
|
||||
+ if e.no_match_group_specs or e.error_group_specs:
|
||||
+ raise e
|
||||
+ logger.error(str(e))
|
||||
+
|
||||
class ProvidesSubCommand(SubCommand):
|
||||
|
||||
aliases = ("provides", )
|
||||
@@ -319,7 +341,7 @@ class ModuleCommand(commands.Command):
|
||||
|
||||
SUBCMDS = {ListSubCommand, InfoSubCommand, EnableSubCommand,
|
||||
DisableSubCommand, ResetSubCommand, InstallSubCommand, UpdateSubCommand,
|
||||
- RemoveSubCommand, ProvidesSubCommand, RepoquerySubCommand}
|
||||
+ RemoveSubCommand, SwitchToSubCommand, ProvidesSubCommand, RepoquerySubCommand}
|
||||
|
||||
SUBCMDS_NOT_REQUIRED_ARG = {ListSubCommand}
|
||||
|
||||
diff --git a/dnf/module/module_base.py b/dnf/module/module_base.py
|
||||
index 0da4fab1..03d54f72 100644
|
||||
--- a/dnf/module/module_base.py
|
||||
+++ b/dnf/module/module_base.py
|
||||
@@ -140,31 +140,140 @@ class ModuleBase(object):
|
||||
if fail_safe_repo_used:
|
||||
raise dnf.exceptions.Error(_(
|
||||
"Installing module from Fail-Safe repository is not allowed"))
|
||||
- # Remove source packages they cannot be installed or upgraded
|
||||
- base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply()
|
||||
- install_base_query = base_no_source_query.filter(nevra_strict=install_set_artefacts)
|
||||
+ __, profiles_errors = self._install_profiles_internal(
|
||||
+ install_set_artefacts, install_dict, strict)
|
||||
+ if profiles_errors:
|
||||
+ error_specs.extend(profiles_errors)
|
||||
|
||||
- # add hot-fix packages
|
||||
- hot_fix_repos = [i.id for i in self.base.repos.iter_enabled() if i.module_hotfixes]
|
||||
- hotfix_packages = base_no_source_query.filter(
|
||||
- reponame=hot_fix_repos, name=install_dict.keys())
|
||||
- install_base_query = install_base_query.union(hotfix_packages)
|
||||
+ if no_match_specs or error_specs or solver_errors:
|
||||
+ raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_specs,
|
||||
+ error_group_specs=error_specs,
|
||||
+ module_depsolv_errors=solver_errors)
|
||||
|
||||
- for pkg_name, set_specs in install_dict.items():
|
||||
- query = install_base_query.filter(name=pkg_name)
|
||||
- if not query:
|
||||
- # package can also be non-modular or part of another stream
|
||||
- query = base_no_source_query.filter(name=pkg_name)
|
||||
- if not query:
|
||||
- for spec in set_specs:
|
||||
- logger.error(_("Unable to resolve argument {}").format(spec))
|
||||
- logger.error(_("No match for package {}").format(pkg_name))
|
||||
- error_specs.extend(set_specs)
|
||||
- continue
|
||||
- self.base._goal.group_members.add(pkg_name)
|
||||
+ def switch_to(self, module_specs, strict=True):
|
||||
+ # :api
|
||||
+ no_match_specs, error_specs, module_dicts = self._resolve_specs_enable(module_specs)
|
||||
+ # collect name of artifacts from new modules for distrosync
|
||||
+ new_artifacts_names = set()
|
||||
+ # collect name of artifacts from active modules for distrosync before sack update
|
||||
+ active_artifacts_names = set()
|
||||
+ src_arches = {"nosrc", "src"}
|
||||
+ for spec, (nsvcap, moduledict) in module_dicts.items():
|
||||
+ for name in moduledict.keys():
|
||||
+ for module in self.base._moduleContainer.query(name, "", "", "", ""):
|
||||
+ if self.base._moduleContainer.isModuleActive(module):
|
||||
+ for artifact in module.getArtifacts():
|
||||
+ arch = artifact.rsplit(".", 1)[1]
|
||||
+ if arch in src_arches:
|
||||
+ continue
|
||||
+ pkg_name = artifact.rsplit("-", 2)[0]
|
||||
+ active_artifacts_names.add(pkg_name)
|
||||
+
|
||||
+ solver_errors = self._update_sack()
|
||||
+
|
||||
+ dependency_error_spec = self._enable_dependencies(module_dicts)
|
||||
+ if dependency_error_spec:
|
||||
+ error_specs.extend(dependency_error_spec)
|
||||
+
|
||||
+ # <package_name, set_of_spec>
|
||||
+ fail_safe_repo = hawkey.MODULE_FAIL_SAFE_REPO_NAME
|
||||
+ install_dict = {}
|
||||
+ install_set_artifacts = set()
|
||||
+ fail_safe_repo_used = False
|
||||
+
|
||||
+ # list of name: [profiles] for module profiles being removed
|
||||
+ removed_profiles = self.base._moduleContainer.getRemovedProfiles()
|
||||
+
|
||||
+ for spec, (nsvcap, moduledict) in module_dicts.items():
|
||||
+ for name, streamdict in moduledict.items():
|
||||
+ for stream, module_list in streamdict.items():
|
||||
+ install_module_list = [x for x in module_list
|
||||
+ if self.base._moduleContainer.isModuleActive(x.getId())]
|
||||
+ if not install_module_list:
|
||||
+ "No active matches for argument '{0}' in module '{1}:{2}'"
|
||||
+ logger.error(_("No active matches for argument '{0}' in module "
|
||||
+ "'{1}:{2}'").format(spec, name, stream))
|
||||
+ error_specs.append(spec)
|
||||
+ continue
|
||||
+ profiles = []
|
||||
+ latest_module = self._get_latest(install_module_list)
|
||||
+ if latest_module.getRepoID() == fail_safe_repo:
|
||||
+ msg = _(
|
||||
+ "Installing module '{0}' from Fail-Safe repository {1} is not allowed")
|
||||
+ logger.critical(msg.format(latest_module.getNameStream(), fail_safe_repo))
|
||||
+ fail_safe_repo_used = True
|
||||
+ if nsvcap.profile:
|
||||
+ profiles.extend(latest_module.getProfiles(nsvcap.profile))
|
||||
+ if not profiles:
|
||||
+ available_profiles = latest_module.getProfiles()
|
||||
+ if available_profiles:
|
||||
+ profile_names = ", ".join(sorted(
|
||||
+ [profile.getName() for profile in available_profiles]))
|
||||
+ msg = _("Unable to match profile for argument {}. Available "
|
||||
+ "profiles for '{}:{}': {}").format(
|
||||
+ spec, name, stream, profile_names)
|
||||
+ else:
|
||||
+ msg = _("Unable to match profile for argument {}").format(spec)
|
||||
+ logger.error(msg)
|
||||
+ no_match_specs.append(spec)
|
||||
+ continue
|
||||
+ elif name in removed_profiles:
|
||||
+
|
||||
+ for profile in removed_profiles[name]:
|
||||
+ module_profiles = latest_module.getProfiles(profile)
|
||||
+ if not module_profiles:
|
||||
+ logger.warning(
|
||||
+ _("Installed profile '{0}' is not available in module "
|
||||
+ "'{1}' stream '{2}'").format(profile, name, stream))
|
||||
+ continue
|
||||
+ profiles.extend(module_profiles)
|
||||
+ for profile in profiles:
|
||||
+ self.base._moduleContainer.install(latest_module, profile.getName())
|
||||
+ for pkg_name in profile.getContent():
|
||||
+ install_dict.setdefault(pkg_name, set()).add(spec)
|
||||
+ for module in install_module_list:
|
||||
+ artifacts = module.getArtifacts()
|
||||
+ install_set_artifacts.update(artifacts)
|
||||
+ for artifact in artifacts:
|
||||
+ arch = artifact.rsplit(".", 1)[1]
|
||||
+ if arch in src_arches:
|
||||
+ continue
|
||||
+ pkg_name = artifact.rsplit("-", 2)[0]
|
||||
+ new_artifacts_names.add(pkg_name)
|
||||
+ if fail_safe_repo_used:
|
||||
+ raise dnf.exceptions.Error(_(
|
||||
+ "Installing module from Fail-Safe repository is not allowed"))
|
||||
+ install_base_query, profiles_errors = self._install_profiles_internal(
|
||||
+ install_set_artifacts, install_dict, strict)
|
||||
+ if profiles_errors:
|
||||
+ error_specs.extend(profiles_errors)
|
||||
+
|
||||
+ # distrosync module name
|
||||
+ all_names = set()
|
||||
+ all_names.update(new_artifacts_names)
|
||||
+ all_names.update(active_artifacts_names)
|
||||
+ remove_query = self.base.sack.query().filterm(empty=True)
|
||||
+ for pkg_name in all_names:
|
||||
+ query = self.base.sack.query().filterm(name=pkg_name)
|
||||
+ installed = query.installed()
|
||||
+ if not installed:
|
||||
+ continue
|
||||
+ available = query.available()
|
||||
+ if not available:
|
||||
+ logger.warning(_("No packages available to distrosync for package name "
|
||||
+ "'{}'").format(pkg_name))
|
||||
+ if pkg_name not in new_artifacts_names:
|
||||
+ remove_query = remove_query.union(query)
|
||||
+ continue
|
||||
+
|
||||
+ only_new_module = query.intersection(install_base_query)
|
||||
+ if only_new_module:
|
||||
+ query = only_new_module
|
||||
sltr = dnf.selector.Selector(self.base.sack)
|
||||
sltr.set(pkg=query)
|
||||
- self.base._goal.install(select=sltr, optional=(not strict))
|
||||
+ self.base._goal.distupgrade(select=sltr)
|
||||
+ self.base._remove_if_unneeded(remove_query)
|
||||
+
|
||||
if no_match_specs or error_specs or solver_errors:
|
||||
raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_specs,
|
||||
error_group_specs=error_specs,
|
||||
@@ -183,7 +292,7 @@ class ModuleBase(object):
|
||||
fail_safe_repo = hawkey.MODULE_FAIL_SAFE_REPO_NAME
|
||||
fail_safe_repo_used = False
|
||||
|
||||
- # Remove source packages they cannot be installed or upgraded
|
||||
+ # Remove source packages because they cannot be installed or upgraded
|
||||
base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply()
|
||||
|
||||
for spec in module_specs:
|
||||
@@ -694,6 +803,35 @@ class ModuleBase(object):
|
||||
def _format_repoid(self, repo_name):
|
||||
return "{}\n".format(self.base.output.term.bold(repo_name))
|
||||
|
||||
+ def _install_profiles_internal(self, install_set_artifacts, install_dict, strict):
|
||||
+ # Remove source packages because they cannot be installed or upgraded
|
||||
+ base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply()
|
||||
+ install_base_query = base_no_source_query.filter(nevra_strict=install_set_artifacts)
|
||||
+ error_specs = []
|
||||
+
|
||||
+ # add hot-fix packages
|
||||
+ hot_fix_repos = [i.id for i in self.base.repos.iter_enabled() if i.module_hotfixes]
|
||||
+ hotfix_packages = base_no_source_query.filter(
|
||||
+ reponame=hot_fix_repos, name=install_dict.keys())
|
||||
+ install_base_query = install_base_query.union(hotfix_packages)
|
||||
+
|
||||
+ for pkg_name, set_specs in install_dict.items():
|
||||
+ query = install_base_query.filter(name=pkg_name)
|
||||
+ if not query:
|
||||
+ # package can also be non-modular or part of another stream
|
||||
+ query = base_no_source_query.filter(name=pkg_name)
|
||||
+ if not query:
|
||||
+ for spec in set_specs:
|
||||
+ logger.error(_("Unable to resolve argument {}").format(spec))
|
||||
+ logger.error(_("No match for package {}").format(pkg_name))
|
||||
+ error_specs.extend(set_specs)
|
||||
+ continue
|
||||
+ self.base._goal.group_members.add(pkg_name)
|
||||
+ sltr = dnf.selector.Selector(self.base.sack)
|
||||
+ sltr.set(pkg=query)
|
||||
+ self.base._goal.install(select=sltr, optional=(not strict))
|
||||
+ return install_base_query, error_specs
|
||||
+
|
||||
|
||||
def format_modular_solver_errors(errors):
|
||||
msg = dnf.util._format_resolve_problems(errors)
|
||||
--
|
||||
2.26.2
|
||||
|
||||
|
||||
From df8c74679193bf27db584b3ad225997b2f5f4b87 Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Thu, 12 Nov 2020 13:51:02 +0100
|
||||
Subject: [PATCH 3/5] [minor] Rename all variables with artefact to artifact
|
||||
|
||||
---
|
||||
dnf/module/module_base.py | 10 +++++-----
|
||||
1 file changed, 5 insertions(+), 5 deletions(-)
|
||||
|
||||
diff --git a/dnf/module/module_base.py b/dnf/module/module_base.py
|
||||
index 03d54f72..7298c9a3 100644
|
||||
--- a/dnf/module/module_base.py
|
||||
+++ b/dnf/module/module_base.py
|
||||
@@ -73,7 +73,7 @@ class ModuleBase(object):
|
||||
# <package_name, set_of_spec>
|
||||
fail_safe_repo = hawkey.MODULE_FAIL_SAFE_REPO_NAME
|
||||
install_dict = {}
|
||||
- install_set_artefacts = set()
|
||||
+ install_set_artifacts = set()
|
||||
fail_safe_repo_used = False
|
||||
for spec, (nsvcap, moduledict) in module_dicts.items():
|
||||
for name, streamdict in moduledict.items():
|
||||
@@ -136,12 +136,12 @@ class ModuleBase(object):
|
||||
for pkg_name in profile.getContent():
|
||||
install_dict.setdefault(pkg_name, set()).add(spec)
|
||||
for module in install_module_list:
|
||||
- install_set_artefacts.update(module.getArtifacts())
|
||||
+ install_set_artifacts.update(module.getArtifacts())
|
||||
if fail_safe_repo_used:
|
||||
raise dnf.exceptions.Error(_(
|
||||
"Installing module from Fail-Safe repository is not allowed"))
|
||||
__, profiles_errors = self._install_profiles_internal(
|
||||
- install_set_artefacts, install_dict, strict)
|
||||
+ install_set_artifacts, install_dict, strict)
|
||||
if profiles_errors:
|
||||
error_specs.extend(profiles_errors)
|
||||
|
||||
@@ -326,8 +326,8 @@ class ModuleBase(object):
|
||||
else:
|
||||
for profile in latest_module.getProfiles():
|
||||
upgrade_package_set.update(profile.getContent())
|
||||
- for artefact in latest_module.getArtifacts():
|
||||
- subj = hawkey.Subject(artefact)
|
||||
+ for artifact in latest_module.getArtifacts():
|
||||
+ subj = hawkey.Subject(artifact)
|
||||
for nevra_obj in subj.get_nevra_possibilities(
|
||||
forms=[hawkey.FORM_NEVRA]):
|
||||
upgrade_package_set.add(nevra_obj.name)
|
||||
--
|
||||
2.26.2
|
||||
|
||||
|
||||
From 0818bb80fc0846f602f338a2119671be97c47217 Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Thu, 12 Nov 2020 15:11:29 +0100
|
||||
Subject: [PATCH 4/5] [doc] Add description of dnf module switch-to
|
||||
|
||||
---
|
||||
doc/command_ref.rst | 30 ++++++++++++++++++++++--------
|
||||
1 file changed, 22 insertions(+), 8 deletions(-)
|
||||
|
||||
diff --git a/doc/command_ref.rst b/doc/command_ref.rst
|
||||
index 83879013..c12837ea 100644
|
||||
--- a/doc/command_ref.rst
|
||||
+++ b/doc/command_ref.rst
|
||||
@@ -979,15 +979,31 @@ Module subcommands take :ref:`\<module-spec>\ <specifying_modules-label>`... arg
|
||||
In case no profile was provided, all default profiles get installed.
|
||||
Module streams get enabled accordingly.
|
||||
|
||||
- This command cannot be used for switching module streams. It is recommended to remove all
|
||||
- installed content from the module and reset the module using the
|
||||
- :ref:`reset <module_reset_command-label>` command. After you reset the module, you can install
|
||||
- the other stream.
|
||||
+ This command cannot be used for switching module streams. Use the
|
||||
+ :ref:`dnf module switch-to <module_switch_to_command-label>` command for that.
|
||||
|
||||
``dnf [options] module update <module-spec>...``
|
||||
Update packages associated with an active module stream, optionally restricted to a profile.
|
||||
If the `profile_name` is provided, only the packages referenced by that profile will be updated.
|
||||
|
||||
+.. _module_switch_to_command-label:
|
||||
+
|
||||
+``dnf [options] module switch-to <module-spec>...``
|
||||
+ Switch to or enable a module stream, change versions of installed packages to versions provided
|
||||
+ by the new stream, and remove packages from the old stream that are no longer available. It also
|
||||
+ updates installed profiles if they are available for the new stream. When a profile was
|
||||
+ provided, it installs that profile and does not update any already installed profiles.
|
||||
+
|
||||
+ This command can be used as a stronger version of the
|
||||
+ :ref:`dnf module enable <module_enable_command-label>` command, which not only enables modules,
|
||||
+ but also does a `distrosync` to all modular packages in the enabled modules.
|
||||
+
|
||||
+ It can also be used as a stronger version of the
|
||||
+ :ref:`dnf module install <module_install_command-label>` command, but it requires to specify
|
||||
+ profiles that are supposed to be installed, because `switch-to` command does not use `default
|
||||
+ profiles`. The `switch-to` command doesn't only install profiles, it also makes a `distrosync`
|
||||
+ to all modular packages in the installed module.
|
||||
+
|
||||
``dnf [options] module remove <module-spec>...``
|
||||
Remove installed module profiles, including packages that were installed with the
|
||||
:ref:`dnf module install <module_install_command-label>` command. Will not remove packages
|
||||
@@ -1010,10 +1026,8 @@ Module subcommands take :ref:`\<module-spec>\ <specifying_modules-label>`... arg
|
||||
of modular dependency issue the operation will be rejected. To perform the action anyway please use
|
||||
\-\ :ref:`-skip-broken <skip-broken_option-label>` option.
|
||||
|
||||
- This command cannot be used for switching module streams. It is recommended to remove all
|
||||
- installed content from the module, and reset the module using the
|
||||
- :ref:`reset <module_reset_command-label>` command. After you reset the module, you can enable
|
||||
- the other stream.
|
||||
+ This command cannot be used for switching module streams. Use the
|
||||
+ :ref:`dnf module switch-to <module_switch_to_command-label>` command for that.
|
||||
|
||||
.. _module_disable_command-label:
|
||||
|
||||
--
|
||||
2.26.2
|
||||
|
||||
|
||||
From 6b0b2b99e40c20706145e774626658825f5bc55d Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Wed, 25 Nov 2020 12:34:30 +0100
|
||||
Subject: [PATCH 5/5] Do not use source rpms for module switch
|
||||
|
||||
It prevents misleading message from libsolv that it tries to install
|
||||
source rpm.
|
||||
---
|
||||
dnf/module/module_base.py | 4 +++-
|
||||
1 file changed, 3 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/dnf/module/module_base.py b/dnf/module/module_base.py
|
||||
index 7298c9a3..02d5d5a3 100644
|
||||
--- a/dnf/module/module_base.py
|
||||
+++ b/dnf/module/module_base.py
|
||||
@@ -253,8 +253,10 @@ class ModuleBase(object):
|
||||
all_names.update(new_artifacts_names)
|
||||
all_names.update(active_artifacts_names)
|
||||
remove_query = self.base.sack.query().filterm(empty=True)
|
||||
+ base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply()
|
||||
+
|
||||
for pkg_name in all_names:
|
||||
- query = self.base.sack.query().filterm(name=pkg_name)
|
||||
+ query = base_no_source_query.filter(name=pkg_name)
|
||||
installed = query.installed()
|
||||
if not installed:
|
||||
continue
|
||||
--
|
||||
2.26.2
|
||||
|
@ -0,0 +1,107 @@
|
||||
From de8bbccc4e035a9a9b5baa3aeb0dbf0cb12f1fe2 Mon Sep 17 00:00:00 2001
|
||||
From: Marek Blaha <mblaha@redhat.com>
|
||||
Date: Wed, 9 Dec 2020 13:45:46 +0100
|
||||
Subject: [PATCH 1/1] yum.misc.decompress() to handle uncompressed files
|
||||
(RhBug:1895059)
|
||||
|
||||
The underlying libdnf function is capable to handle even uncompressed
|
||||
files - so now uncompressed files are just copied to the destination.
|
||||
Also unused fn_only parameter of the function was removed.
|
||||
|
||||
This fixes issue with "reposync -m" command when the group metadata file
|
||||
in the repository is a plain xml file (not compressed).
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1895059
|
||||
---
|
||||
dnf/yum/misc.py | 60 +++++++++++++++++++++++++++----------------------
|
||||
1 file changed, 33 insertions(+), 27 deletions(-)
|
||||
|
||||
diff --git a/dnf/yum/misc.py b/dnf/yum/misc.py
|
||||
index 0f922350..3e3905fe 100644
|
||||
--- a/dnf/yum/misc.py
|
||||
+++ b/dnf/yum/misc.py
|
||||
@@ -386,34 +386,39 @@ def getloginuid():
|
||||
_cached_getloginuid = _getloginuid()
|
||||
return _cached_getloginuid
|
||||
|
||||
-def decompress(filename, dest=None, fn_only=False, check_timestamps=False):
|
||||
- """take a filename and decompress it into the same relative location.
|
||||
- if the file is not compressed just return the file"""
|
||||
-
|
||||
- ztype = None
|
||||
- out = filename # If the file is not compressed, it returns the same file
|
||||
|
||||
- dot_pos = filename.rfind('.')
|
||||
- if dot_pos > 0:
|
||||
- ext = filename[dot_pos:]
|
||||
- if ext in ('.zck', '.xz', '.bz2', '.gz'):
|
||||
- ztype = ext
|
||||
- out = dest if dest else filename[:dot_pos]
|
||||
-
|
||||
- if ztype and not fn_only:
|
||||
- if check_timestamps:
|
||||
- fi = stat_f(filename)
|
||||
- fo = stat_f(out)
|
||||
- if fi and fo and fo.st_mtime == fi.st_mtime:
|
||||
- return out
|
||||
+def decompress(filename, dest=None, check_timestamps=False):
|
||||
+ """take a filename and decompress it into the same relative location.
|
||||
+ When the compression type is not recognized (or file is not compressed),
|
||||
+ the content of the file is copied to the destination"""
|
||||
+
|
||||
+ if dest:
|
||||
+ out = dest
|
||||
+ else:
|
||||
+ out = None
|
||||
+ dot_pos = filename.rfind('.')
|
||||
+ if dot_pos > 0:
|
||||
+ ext = filename[dot_pos:]
|
||||
+ if ext in ('.zck', '.xz', '.bz2', '.gz', '.lzma', '.zst'):
|
||||
+ out = filename[:dot_pos]
|
||||
+ if out is None:
|
||||
+ raise dnf.exceptions.MiscError("Could not determine destination filename")
|
||||
+
|
||||
+ if check_timestamps:
|
||||
+ fi = stat_f(filename)
|
||||
+ fo = stat_f(out)
|
||||
+ if fi and fo and fo.st_mtime == fi.st_mtime:
|
||||
+ return out
|
||||
|
||||
- try:
|
||||
- libdnf.utils.decompress(filename, out, 0o644, ztype)
|
||||
- except RuntimeError as e:
|
||||
- raise dnf.exceptions.MiscError(str(e))
|
||||
+ try:
|
||||
+ # libdnf.utils.decompress either decompress file to the destination or
|
||||
+ # copy the content if the compression type is not recognized
|
||||
+ libdnf.utils.decompress(filename, out, 0o644)
|
||||
+ except RuntimeError as e:
|
||||
+ raise dnf.exceptions.MiscError(str(e))
|
||||
|
||||
- if check_timestamps and fi:
|
||||
- os.utime(out, (fi.st_mtime, fi.st_mtime))
|
||||
+ if check_timestamps and fi:
|
||||
+ os.utime(out, (fi.st_mtime, fi.st_mtime))
|
||||
|
||||
return out
|
||||
|
||||
@@ -424,13 +429,14 @@ def calculate_repo_gen_dest(filename, generated_name):
|
||||
os.makedirs(dest, mode=0o755)
|
||||
return dest + '/' + generated_name
|
||||
|
||||
-def repo_gen_decompress(filename, generated_name, cached=False):
|
||||
+
|
||||
+def repo_gen_decompress(filename, generated_name):
|
||||
""" This is a wrapper around decompress, where we work out a cached
|
||||
generated name, and use check_timestamps. filename _must_ be from
|
||||
a repo. and generated_name is the type of the file. """
|
||||
|
||||
dest = calculate_repo_gen_dest(filename, generated_name)
|
||||
- return decompress(filename, dest=dest, check_timestamps=True, fn_only=cached)
|
||||
+ return decompress(filename, dest=dest, check_timestamps=True)
|
||||
|
||||
def read_in_items_from_dot_dir(thisglob, line_as_list=True):
|
||||
""" Takes a glob of a dir (like /etc/foo.d/\\*.foo) returns a list of all
|
||||
--
|
||||
2.26.2
|
||||
|
@ -0,0 +1,22 @@
|
||||
From 04b1a90bb24b7e98d4e001c44f8b3f563ad5f0f6 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Tue, 24 Nov 2020 14:31:21 +0100
|
||||
Subject: [PATCH] Make rotated log file (mode, owner, group) match previous log
|
||||
settings (RhBug:1894344)
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1894344
|
||||
---
|
||||
etc/logrotate.d/dnf | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/etc/logrotate.d/dnf b/etc/logrotate.d/dnf
|
||||
index b96c6ff9b4..0ce2629f1b 100644
|
||||
--- a/etc/logrotate.d/dnf
|
||||
+++ b/etc/logrotate.d/dnf
|
||||
@@ -3,5 +3,5 @@
|
||||
notifempty
|
||||
rotate 4
|
||||
weekly
|
||||
- create 0600 root root
|
||||
+ create
|
||||
}
|
117
SOURCES/0011-Add-new-attribute-for-Package--from-repo.patch
Normal file
117
SOURCES/0011-Add-new-attribute-for-Package--from-repo.patch
Normal file
@ -0,0 +1,117 @@
|
||||
From eb2aa8c14208da7a567a0d79a8baa9f5201640cd Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Tue, 24 Nov 2020 09:17:41 +0100
|
||||
Subject: [PATCH 1/3] Add `from_repo` attribute for Package class
|
||||
(RhBug:1898968,1879168)
|
||||
|
||||
It as an alias for private attribute _from_repo.
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1898968
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1879168
|
||||
---
|
||||
dnf/cli/commands/repoquery.py | 2 +-
|
||||
dnf/package.py | 7 +++++--
|
||||
doc/api_package.rst | 6 ++++++
|
||||
3 files changed, 12 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/dnf/cli/commands/repoquery.py b/dnf/cli/commands/repoquery.py
|
||||
index 099a9312d9..a11b440525 100644
|
||||
--- a/dnf/cli/commands/repoquery.py
|
||||
+++ b/dnf/cli/commands/repoquery.py
|
||||
@@ -44,7 +44,7 @@
|
||||
QFORMAT_MATCH = re.compile(r'%(-?\d*?){([:.\w]+?)}')
|
||||
|
||||
QUERY_TAGS = """\
|
||||
-name, arch, epoch, version, release, reponame (repoid), evr,
|
||||
+name, arch, epoch, version, release, reponame (repoid), from_repo, evr,
|
||||
debug_name, source_name, source_debug_name,
|
||||
installtime, buildtime, size, downloadsize, installsize,
|
||||
provides, requires, obsoletes, conflicts, sourcerpm,
|
||||
diff --git a/dnf/package.py b/dnf/package.py
|
||||
index d44ce6706c..f647df6bff 100644
|
||||
--- a/dnf/package.py
|
||||
+++ b/dnf/package.py
|
||||
@@ -76,12 +76,15 @@ def _from_repo(self):
|
||||
pkgrepo = None
|
||||
if self._from_system:
|
||||
pkgrepo = self.base.history.repo(self)
|
||||
- else:
|
||||
- pkgrepo = {}
|
||||
if pkgrepo:
|
||||
return '@' + pkgrepo
|
||||
return self.reponame
|
||||
|
||||
+ @property
|
||||
+ def from_repo(self):
|
||||
+ # :api
|
||||
+ return self._from_repo
|
||||
+
|
||||
@property
|
||||
def _header(self):
|
||||
return dnf.rpm._header(self.localPkg())
|
||||
diff --git a/doc/api_package.rst b/doc/api_package.rst
|
||||
index 95df5d4b23..48ef8f1d22 100644
|
||||
--- a/doc/api_package.rst
|
||||
+++ b/doc/api_package.rst
|
||||
@@ -74,6 +74,12 @@
|
||||
|
||||
Files the package provides (list of strings).
|
||||
|
||||
+ .. attribute:: from_repo
|
||||
+
|
||||
+ For installed packages returns id of repository from which the package was installed prefixed
|
||||
+ with '@' (if such information is available in the history database). Otherwise returns id of
|
||||
+ repository the package belongs to (@System for installed packages of unknown origin) (string).
|
||||
+
|
||||
.. attribute:: group
|
||||
|
||||
Group of the package (string).
|
||||
|
||||
From 1a933f8e036cd704fa6e7f77a8448263e93e540f Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Tue, 24 Nov 2020 09:19:42 +0100
|
||||
Subject: [PATCH 2/3] Correct description of Package().reponane attribute
|
||||
|
||||
---
|
||||
doc/api_package.rst | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/doc/api_package.rst b/doc/api_package.rst
|
||||
index 48ef8f1d22..a78897babe 100644
|
||||
--- a/doc/api_package.rst
|
||||
+++ b/doc/api_package.rst
|
||||
@@ -138,7 +138,7 @@
|
||||
|
||||
.. attribute:: reponame
|
||||
|
||||
- Id of repository the package was installed from (string).
|
||||
+ Id of repository the package belongs to (@System for installed packages) (string).
|
||||
|
||||
.. attribute:: requires
|
||||
|
||||
|
||||
From 24cdb68776507fdae25bed0e82d80df3018aecfc Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Tue, 24 Nov 2020 09:22:07 +0100
|
||||
Subject: [PATCH 3/3] Add unittest for new API
|
||||
|
||||
---
|
||||
tests/api/test_dnf_package.py | 5 +++++
|
||||
1 file changed, 5 insertions(+)
|
||||
|
||||
diff --git a/tests/api/test_dnf_package.py b/tests/api/test_dnf_package.py
|
||||
index 04cddc7ecc..5952352bb5 100644
|
||||
--- a/tests/api/test_dnf_package.py
|
||||
+++ b/tests/api/test_dnf_package.py
|
||||
@@ -163,6 +163,11 @@ def test_reponame(self):
|
||||
self.assertHasAttr(self.package, "reponame")
|
||||
self.assertHasType(self.package.reponame, str)
|
||||
|
||||
+ def test_from_repo(self):
|
||||
+ # Package.reponame
|
||||
+ self.assertHasAttr(self.package, "from_repo")
|
||||
+ self.assertHasType(self.package.from_repo, str)
|
||||
+
|
||||
def test_requires(self):
|
||||
# Package.requires
|
||||
self.assertHasAttr(self.package, "requires")
|
80
SOURCES/0012-Change-behaviour-of-Package-.from-repo.patch
Normal file
80
SOURCES/0012-Change-behaviour-of-Package-.from-repo.patch
Normal file
@ -0,0 +1,80 @@
|
||||
From ca06d200d738fd6b23cb05b9776c9fd29288665f Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Wed, 25 Nov 2020 13:00:22 +0100
|
||||
Subject: [PATCH 1/2] Change behaviour of Package().from_repo
|
||||
|
||||
The change makes a difference between private attribute _from_repo and
|
||||
API attribute. _from_repo is required for `dnf info` and we have to keep
|
||||
it, but for API the magic handling behind could be confusing.
|
||||
---
|
||||
dnf/package.py | 8 +++++++-
|
||||
doc/api_package.rst | 5 ++---
|
||||
2 files changed, 9 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/dnf/package.py b/dnf/package.py
|
||||
index f647df6bff..28ca5ef760 100644
|
||||
--- a/dnf/package.py
|
||||
+++ b/dnf/package.py
|
||||
@@ -73,6 +73,12 @@ def _from_system(self):
|
||||
|
||||
@property
|
||||
def _from_repo(self):
|
||||
+ """
|
||||
+ For installed packages returns id of repository from which the package was installed
|
||||
+ prefixed with '@' (if such information is available in the history database). Otherwise
|
||||
+ returns id of repository the package belongs to (@System for installed packages of unknown
|
||||
+ origin)
|
||||
+ """
|
||||
pkgrepo = None
|
||||
if self._from_system:
|
||||
pkgrepo = self.base.history.repo(self)
|
||||
@@ -83,7 +89,7 @@ def _from_repo(self):
|
||||
@property
|
||||
def from_repo(self):
|
||||
# :api
|
||||
- return self._from_repo
|
||||
+ return self.base.history.repo(self)
|
||||
|
||||
@property
|
||||
def _header(self):
|
||||
diff --git a/doc/api_package.rst b/doc/api_package.rst
|
||||
index a78897babe..634f504ca6 100644
|
||||
--- a/doc/api_package.rst
|
||||
+++ b/doc/api_package.rst
|
||||
@@ -76,9 +76,8 @@
|
||||
|
||||
.. attribute:: from_repo
|
||||
|
||||
- For installed packages returns id of repository from which the package was installed prefixed
|
||||
- with '@' (if such information is available in the history database). Otherwise returns id of
|
||||
- repository the package belongs to (@System for installed packages of unknown origin) (string).
|
||||
+ For installed packages returns id of repository from which the package was installed if such
|
||||
+ information is available in the history database. Otherwise returns an empty string (string).
|
||||
|
||||
.. attribute:: group
|
||||
|
||||
|
||||
From 895e61a1281db753dd28f01c20816e83c5316cdd Mon Sep 17 00:00:00 2001
|
||||
From: Jaroslav Mracek <jmracek@redhat.com>
|
||||
Date: Thu, 26 Nov 2020 10:02:08 +0100
|
||||
Subject: [PATCH 2/2] fixup! Change behaviour of Package().from_repo
|
||||
|
||||
---
|
||||
dnf/package.py | 4 +++-
|
||||
1 file changed, 3 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/dnf/package.py b/dnf/package.py
|
||||
index 28ca5ef760..baef04fa5b 100644
|
||||
--- a/dnf/package.py
|
||||
+++ b/dnf/package.py
|
||||
@@ -89,7 +89,9 @@ def _from_repo(self):
|
||||
@property
|
||||
def from_repo(self):
|
||||
# :api
|
||||
- return self.base.history.repo(self)
|
||||
+ if self._from_system:
|
||||
+ return self.base.history.repo(self)
|
||||
+ return ""
|
||||
|
||||
@property
|
||||
def _header(self):
|
94
SOURCES/0013-Package-add-a-get-header--method.patch
Normal file
94
SOURCES/0013-Package-add-a-get-header--method.patch
Normal file
@ -0,0 +1,94 @@
|
||||
From 38cc67385fb1b36aa0881bc5982bc58d75dac464 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Hr=C3=A1zk=C3=BD?= <lhrazky@redhat.com>
|
||||
Date: Wed, 11 Nov 2020 18:45:11 +0100
|
||||
Subject: [PATCH] Package: add a get_header() method (RhBug:1876606)
|
||||
|
||||
Adds get_header() method to the Package class, which returns the rpm
|
||||
header of an installed package.
|
||||
|
||||
= changelog =
|
||||
msg: Add get_header() method to the Package class
|
||||
type: enhancement
|
||||
resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1876606
|
||||
---
|
||||
dnf/package.py | 24 ++++++++++++++++++++++++
|
||||
tests/test_package.py | 12 ++++++++++++
|
||||
2 files changed, 36 insertions(+)
|
||||
|
||||
diff --git a/dnf/package.py b/dnf/package.py
|
||||
index baef04fa5b..836e0e4989 100644
|
||||
--- a/dnf/package.py
|
||||
+++ b/dnf/package.py
|
||||
@@ -26,11 +26,13 @@
|
||||
from dnf.i18n import _
|
||||
|
||||
import binascii
|
||||
+import dnf.exceptions
|
||||
import dnf.rpm
|
||||
import dnf.yum.misc
|
||||
import hawkey
|
||||
import logging
|
||||
import os
|
||||
+import rpm
|
||||
|
||||
logger = logging.getLogger("dnf")
|
||||
|
||||
@@ -95,6 +97,11 @@ def from_repo(self):
|
||||
|
||||
@property
|
||||
def _header(self):
|
||||
+ """
|
||||
+ Returns the header of a locally present rpm package file. As opposed to
|
||||
+ self.get_header(), which retrieves the header of an installed package
|
||||
+ from rpmdb.
|
||||
+ """
|
||||
return dnf.rpm._header(self.localPkg())
|
||||
|
||||
@property
|
||||
@@ -164,6 +171,23 @@ def debugsource_name(self):
|
||||
src_name = self.source_name if self.source_name is not None else self.name
|
||||
return src_name + self.DEBUGSOURCE_SUFFIX
|
||||
|
||||
+ def get_header(self):
|
||||
+ """
|
||||
+ Returns the rpm header of the package if it is installed. If not
|
||||
+ installed, returns None. The header is not cached, it is retrieved from
|
||||
+ rpmdb on every call. In case of a failure (e.g. when the rpmdb changes
|
||||
+ between loading the data and calling this method), raises an instance
|
||||
+ of PackageNotFoundError.
|
||||
+ """
|
||||
+ if not self._from_system:
|
||||
+ return None
|
||||
+
|
||||
+ try:
|
||||
+ # RPMDBI_PACKAGES stands for the header of the package
|
||||
+ return next(self.base._ts.dbMatch(rpm.RPMDBI_PACKAGES, self.rpmdbid))
|
||||
+ except StopIteration:
|
||||
+ raise dnf.exceptions.PackageNotFoundError("Package not found when attempting to retrieve header", str(self))
|
||||
+
|
||||
@property
|
||||
def source_debug_name(self):
|
||||
# :api
|
||||
diff --git a/tests/test_package.py b/tests/test_package.py
|
||||
index cd4872e631..514e5bf099 100644
|
||||
--- a/tests/test_package.py
|
||||
+++ b/tests/test_package.py
|
||||
@@ -68,6 +68,18 @@ def fn_getter():
|
||||
with self.assertRaises(IOError):
|
||||
pkg._header
|
||||
|
||||
+ # rpm.hdr() is not easy to construct with custom data, we just return a string
|
||||
+ # instead, as we don't actually need an instance of rpm.hdr for the test
|
||||
+ @mock.patch("rpm.TransactionSet.dbMatch", lambda self, a, b: iter(["package_header_test_data"]))
|
||||
+ def test_get_header(self):
|
||||
+ pkg = self.sack.query().installed().filter(name="pepper")[0]
|
||||
+ header = pkg.get_header()
|
||||
+ self.assertEqual(header, "package_header_test_data")
|
||||
+
|
||||
+ pkg = self.sack.query().available().filter(name="pepper")[0]
|
||||
+ header = pkg.get_header()
|
||||
+ self.assertEqual(header, None)
|
||||
+
|
||||
@mock.patch("dnf.package.Package.rpmdbid", long(3))
|
||||
def test_idx(self):
|
||||
""" pkg.idx is an int. """
|
@ -0,0 +1,115 @@
|
||||
From b3542a96c6f77e5cc0b5217e586fcc56fde074d8 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Wed, 2 Dec 2020 15:27:13 +0100
|
||||
Subject: [PATCH 1/2] Add api function: fill_sack_from_repos_in_cache
|
||||
(RhBug:1865803)
|
||||
|
||||
= changelog =
|
||||
msg: Add api function fill_sack_from_repos_in_cache to allow loading a repo cache with repomd and (solv file or primary xml) only
|
||||
type: enhancement
|
||||
resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1865803
|
||||
---
|
||||
dnf.spec | 2 +-
|
||||
dnf/base.py | 62 +++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
2 files changed, 63 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/dnf/base.py b/dnf/base.py
|
||||
index 075e74265a..a10b837340 100644
|
||||
--- a/dnf/base.py
|
||||
+++ b/dnf/base.py
|
||||
@@ -425,6 +425,68 @@ def fill_sack(self, load_system_repo=True, load_available_repos=True):
|
||||
self._plugins.run_sack()
|
||||
return self._sack
|
||||
|
||||
+ def fill_sack_from_repos_in_cache(self, load_system_repo=True):
|
||||
+ # :api
|
||||
+ """
|
||||
+ Prepare Sack and Goal objects and also load all enabled repositories from cache only,
|
||||
+ it doesn't download anything and it doesn't check if metadata are expired.
|
||||
+ If there is not enough metadata present (repond.xml or both primary.xml and solv file
|
||||
+ are missing) given repo is either skipped or it throws a RepoError exception depending
|
||||
+ on skip_if_unavailable configuration.
|
||||
+ """
|
||||
+ timer = dnf.logging.Timer('sack setup')
|
||||
+ self.reset(sack=True, goal=True)
|
||||
+ self._sack = dnf.sack._build_sack(self)
|
||||
+ lock = dnf.lock.build_metadata_lock(self.conf.cachedir, self.conf.exit_on_lock)
|
||||
+ with lock:
|
||||
+ if load_system_repo is not False:
|
||||
+ try:
|
||||
+ # FIXME: If build_cache=True, @System.solv is incorrectly updated in install-
|
||||
+ # remove loops
|
||||
+ self._sack.load_system_repo(build_cache=False)
|
||||
+ except IOError:
|
||||
+ if load_system_repo != 'auto':
|
||||
+ raise
|
||||
+
|
||||
+ error_repos = []
|
||||
+ # Iterate over installed GPG keys and check their validity using DNSSEC
|
||||
+ if self.conf.gpgkey_dns_verification:
|
||||
+ dnf.dnssec.RpmImportedKeys.check_imported_keys_validity()
|
||||
+ for repo in self.repos.iter_enabled():
|
||||
+ try:
|
||||
+ repo._repo.loadCache(throwExcept=True, ignoreMissing=True)
|
||||
+ mdload_flags = dict(load_filelists=True,
|
||||
+ load_presto=repo.deltarpm,
|
||||
+ load_updateinfo=True)
|
||||
+ if repo.load_metadata_other:
|
||||
+ mdload_flags["load_other"] = True
|
||||
+
|
||||
+ self._sack.load_repo(repo._repo, **mdload_flags)
|
||||
+
|
||||
+ logger.debug(_("%s: using metadata from %s."), repo.id,
|
||||
+ dnf.util.normalize_time(
|
||||
+ repo._repo.getMaxTimestamp()))
|
||||
+ except (RuntimeError, hawkey.Exception) as e:
|
||||
+ if repo.skip_if_unavailable is False:
|
||||
+ raise dnf.exceptions.RepoError(
|
||||
+ _("loading repo '{}' failure: {}").format(repo.id, e))
|
||||
+ else:
|
||||
+ logger.debug(_("loading repo '{}' failure: {}").format(repo.id, e))
|
||||
+ error_repos.append(repo.id)
|
||||
+ repo.disable()
|
||||
+ if error_repos:
|
||||
+ logger.warning(
|
||||
+ _("Ignoring repositories: %s"), ', '.join(error_repos))
|
||||
+
|
||||
+ conf = self.conf
|
||||
+ self._sack._configure(conf.installonlypkgs, conf.installonly_limit, conf.allow_vendor_change)
|
||||
+ self._setup_excludes_includes()
|
||||
+ timer()
|
||||
+ self._goal = dnf.goal.Goal(self._sack)
|
||||
+ self._goal.protect_running_kernel = conf.protect_running_kernel
|
||||
+ self._plugins.run_sack()
|
||||
+ return self._sack
|
||||
+
|
||||
def _finalize_base(self):
|
||||
self._tempfile_persistor = dnf.persistor.TempfilePersistor(
|
||||
self.conf.cachedir)
|
||||
|
||||
From 29ae53918d4a0b65a917aca2f8f43416fee15dfd Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Thu, 10 Dec 2020 14:54:16 +0100
|
||||
Subject: [PATCH 2/2] Add api test for new fill_sack_from_repos_in_cache
|
||||
|
||||
---
|
||||
tests/api/test_dnf_base.py | 6 ++++++
|
||||
1 file changed, 6 insertions(+)
|
||||
|
||||
diff --git a/tests/api/test_dnf_base.py b/tests/api/test_dnf_base.py
|
||||
index 656bd22584..335981897e 100644
|
||||
--- a/tests/api/test_dnf_base.py
|
||||
+++ b/tests/api/test_dnf_base.py
|
||||
@@ -107,6 +107,12 @@ def test_fill_sack(self):
|
||||
|
||||
self.base.fill_sack(load_system_repo=False, load_available_repos=False)
|
||||
|
||||
+ def test_fill_sack_from_repos_in_cache(self):
|
||||
+ # Base.fill_sack_from_repos_in_cache(self, load_system_repo=True):
|
||||
+ self.assertHasAttr(self.base, "fill_sack_from_repos_in_cache")
|
||||
+
|
||||
+ self.base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
def test_close(self):
|
||||
# Base.close()
|
||||
self.assertHasAttr(self.base, "close")
|
@ -0,0 +1,366 @@
|
||||
From a777ff01c79d5e0e2cf3ae7b0652795577253bc3 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Thu, 14 Jan 2021 09:58:30 +0100
|
||||
Subject: [PATCH 1/3] Fix recreate script
|
||||
|
||||
---
|
||||
tests/repos/rpm/recreate | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/tests/repos/rpm/recreate b/tests/repos/rpm/recreate
|
||||
index da348d9799..0fbb9396bd 100755
|
||||
--- a/tests/repos/rpm/recreate
|
||||
+++ b/tests/repos/rpm/recreate
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
-THISDIR="$( readlink -f "$( dirname "$0 )" )"
|
||||
+THISDIR="$( readlink -f "$( dirname "$0" )" )"
|
||||
cd "$THISDIR"
|
||||
git rm -rf repodata/
|
||||
createrepo --no-database -o . ..
|
||||
|
||||
From 5d4c0266f6967c7cd5f0e675b13fa3e9b395e4dd Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Thu, 14 Jan 2021 10:28:53 +0100
|
||||
Subject: [PATCH 2/3] Add unit test for fill_sack_from_repos_in_cache
|
||||
(RhBug:1865803)
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1865803
|
||||
---
|
||||
tests/test_fill_sack_from_repos_in_cache.py | 262 ++++++++++++++++++++
|
||||
1 file changed, 262 insertions(+)
|
||||
create mode 100644 tests/test_fill_sack_from_repos_in_cache.py
|
||||
|
||||
diff --git a/tests/test_fill_sack_from_repos_in_cache.py b/tests/test_fill_sack_from_repos_in_cache.py
|
||||
new file mode 100644
|
||||
index 0000000000..24b0d4598d
|
||||
--- /dev/null
|
||||
+++ b/tests/test_fill_sack_from_repos_in_cache.py
|
||||
@@ -0,0 +1,262 @@
|
||||
+# -*- coding: utf-8 -*-
|
||||
+
|
||||
+# Copyright (C) 2012-2021 Red Hat, Inc.
|
||||
+#
|
||||
+# This copyrighted material is made available to anyone wishing to use,
|
||||
+# modify, copy, or redistribute it subject to the terms and conditions of
|
||||
+# the GNU General Public License v.2, or (at your option) any later version.
|
||||
+# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
+# ANY WARRANTY expressed or implied, including the implied warranties of
|
||||
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
+# Public License for more details. You should have received a copy of the
|
||||
+# GNU General Public License along with this program; if not, write to the
|
||||
+# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
+# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
|
||||
+# source code or documentation are not subject to the GNU General Public
|
||||
+# License and may only be used or replicated with the express permission of
|
||||
+# Red Hat, Inc.
|
||||
+#
|
||||
+
|
||||
+from __future__ import absolute_import
|
||||
+from __future__ import unicode_literals
|
||||
+
|
||||
+import os
|
||||
+import tempfile
|
||||
+import glob
|
||||
+import shutil
|
||||
+import unittest
|
||||
+
|
||||
+import dnf.exceptions
|
||||
+import dnf.repo
|
||||
+import dnf.sack
|
||||
+
|
||||
+import hawkey
|
||||
+
|
||||
+import tests.support
|
||||
+from tests.support import mock
|
||||
+
|
||||
+TEST_REPO_NAME = "test-repo"
|
||||
+
|
||||
+
|
||||
+class FillSackFromReposInCacheTest(unittest.TestCase):
|
||||
+ def _create_cache_for_repo(self, repopath, tmpdir):
|
||||
+ conf = dnf.conf.MainConf()
|
||||
+ conf.cachedir = os.path.join(tmpdir, "cache")
|
||||
+
|
||||
+ base = dnf.Base(conf=conf)
|
||||
+
|
||||
+ repoconf = dnf.repo.Repo(TEST_REPO_NAME, base.conf)
|
||||
+ repoconf.baseurl = repopath
|
||||
+ repoconf.enable()
|
||||
+
|
||||
+ base.repos.add(repoconf)
|
||||
+
|
||||
+ base.fill_sack(load_system_repo=False)
|
||||
+ base.close()
|
||||
+
|
||||
+ def _setUp_from_repo_path(self, original_repo_path):
|
||||
+ self.tmpdir = tempfile.mkdtemp(prefix="dnf_test_")
|
||||
+
|
||||
+ self.repo_copy_path = os.path.join(self.tmpdir, "repo")
|
||||
+ shutil.copytree(original_repo_path, self.repo_copy_path)
|
||||
+
|
||||
+ self._create_cache_for_repo(self.repo_copy_path, self.tmpdir)
|
||||
+
|
||||
+ # Just to be sure remove repo (it shouldn't be used)
|
||||
+ shutil.rmtree(self.repo_copy_path)
|
||||
+
|
||||
+ # Prepare base for the actual test
|
||||
+ conf = dnf.conf.MainConf()
|
||||
+ conf.cachedir = os.path.join(self.tmpdir, "cache")
|
||||
+ self.test_base = dnf.Base(conf=conf)
|
||||
+ repoconf = dnf.repo.Repo(TEST_REPO_NAME, conf)
|
||||
+ repoconf.baseurl = self.repo_copy_path
|
||||
+ repoconf.enable()
|
||||
+ self.test_base.repos.add(repoconf)
|
||||
+
|
||||
+ def tearDown(self):
|
||||
+ self.test_base.close()
|
||||
+ shutil.rmtree(self.tmpdir)
|
||||
+
|
||||
+ def test_with_solv_solvx_repomd(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)), "repos/rpm"))
|
||||
+
|
||||
+ # Remove xml metadata except repomd
|
||||
+ # repomd.xml is not compressed and doesn't end with .gz
|
||||
+ repodata_without_repomd = glob.glob(os.path.join(self.tmpdir, "cache/test-repo-*/repodata/*.gz"))
|
||||
+ for f in repodata_without_repomd:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Now we only have cache with just solv, solvx files and repomd.xml
|
||||
+
|
||||
+ self.test_base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ q = self.test_base.sack.query()
|
||||
+ packages = q.run()
|
||||
+ self.assertEqual(len(packages), 9)
|
||||
+ self.assertEqual(packages[0].evr, "4-4")
|
||||
+
|
||||
+ # Use *-updateinfo.solvx
|
||||
+ adv_pkgs = q.get_advisory_pkgs(hawkey.LT | hawkey.EQ | hawkey.GT)
|
||||
+ adv_titles = set()
|
||||
+ for pkg in adv_pkgs:
|
||||
+ adv_titles.add(pkg.get_advisory(self.test_base.sack).title)
|
||||
+ self.assertEqual(len(adv_titles), 3)
|
||||
+
|
||||
+ def test_with_just_solv_repomd(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)), "repos/rpm"))
|
||||
+
|
||||
+ # Remove xml metadata except repomd
|
||||
+ # repomd.xml is not compressed and doesn't end with .gz
|
||||
+ repodata_without_repomd = glob.glob(os.path.join(self.tmpdir, "cache/test-repo-*/repodata/*.gz"))
|
||||
+ for f in repodata_without_repomd:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Remove solvx files
|
||||
+ solvx = glob.glob(os.path.join(self.tmpdir, "cache/*.solvx"))
|
||||
+ for f in solvx:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Now we only have cache with just solv files and repomd.xml
|
||||
+
|
||||
+ self.test_base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ q = self.test_base.sack.query()
|
||||
+ packages = q.run()
|
||||
+ self.assertEqual(len(packages), 9)
|
||||
+ self.assertEqual(packages[0].evr, "4-4")
|
||||
+
|
||||
+ # No *-updateinfo.solvx -> we get no advisory packages
|
||||
+ adv_pkgs = q.get_advisory_pkgs(hawkey.LT | hawkey.EQ | hawkey.GT)
|
||||
+ self.assertEqual(len(adv_pkgs), 0)
|
||||
+
|
||||
+ def test_with_xml_metadata(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)), "repos/rpm"))
|
||||
+
|
||||
+ # Remove all solv and solvx files
|
||||
+ solvx = glob.glob(os.path.join(self.tmpdir, "cache/*.solv*"))
|
||||
+ for f in solvx:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Now we only have cache with just xml metadata
|
||||
+
|
||||
+ self.test_base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ q = self.test_base.sack.query()
|
||||
+ packages = q.run()
|
||||
+ self.assertEqual(len(packages), 9)
|
||||
+ self.assertEqual(packages[0].evr, "4-4")
|
||||
+
|
||||
+ def test_exception_without_repomd(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)), "repos/rpm"))
|
||||
+
|
||||
+ # Remove xml metadata
|
||||
+ repodata_without_repomd = glob.glob(os.path.join(self.tmpdir, "cache/test-repo-*/repodata/*"))
|
||||
+ for f in repodata_without_repomd:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Now we only have cache with just solv and solvx files
|
||||
+ # Since we don't have repomd we cannot verify checksums -> fail (exception)
|
||||
+
|
||||
+ self.assertRaises(dnf.exceptions.RepoError,
|
||||
+ self.test_base.fill_sack_from_repos_in_cache, load_system_repo=False)
|
||||
+
|
||||
+ def test_exception_with_just_repomd(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)), "repos/rpm"))
|
||||
+
|
||||
+ # Remove xml metadata except repomd
|
||||
+ # repomd.xml is not compressed and doesn't end with .gz
|
||||
+ repodata_without_repomd = glob.glob(os.path.join(self.tmpdir, "cache/test-repo-*/repodata/*.gz"))
|
||||
+ for f in repodata_without_repomd:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Remove all solv and solvx files
|
||||
+ solvx = glob.glob(os.path.join(self.tmpdir, "cache/*.solv*"))
|
||||
+ for f in solvx:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Now we only have cache with just repomd
|
||||
+ # repomd is not enough, it doesn't contain the metadata it self -> fail (exception)
|
||||
+
|
||||
+ self.assertRaises(dnf.exceptions.RepoError,
|
||||
+ self.test_base.fill_sack_from_repos_in_cache, load_system_repo=False)
|
||||
+
|
||||
+ def test_exception_with_checksum_mismatch_and_only_repomd(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)), "repos/rpm"))
|
||||
+
|
||||
+ # Remove xml metadata except repomd
|
||||
+ # repomd.xml is not compressed and doesn't end with .gz
|
||||
+ repodata_without_repomd = glob.glob(os.path.join(self.tmpdir, "cache/test-repo-*/repodata/*.gz"))
|
||||
+ for f in repodata_without_repomd:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Modify checksum of solv file so it doesn't match with repomd
|
||||
+ solv = glob.glob(os.path.join(self.tmpdir, "cache/*.solv"))[0]
|
||||
+ with open(solv, "a") as opensolv:
|
||||
+ opensolv.write("appended text to change checksum")
|
||||
+
|
||||
+ # Now we only have cache with solvx, modified solv file and just repomd
|
||||
+ # Since we don't have original xml metadata we cannot regenerate solv -> fail (exception)
|
||||
+
|
||||
+ self.assertRaises(dnf.exceptions.RepoError,
|
||||
+ self.test_base.fill_sack_from_repos_in_cache, load_system_repo=False)
|
||||
+
|
||||
+ def test_checksum_mistmatch_regenerates_solv(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)), "repos/rpm"))
|
||||
+
|
||||
+ # Modify checksum of solv file so it doesn't match with repomd
|
||||
+ solv = glob.glob(os.path.join(self.tmpdir, "cache/*.solv"))[0]
|
||||
+ with open(solv, "a") as opensolv:
|
||||
+ opensolv.write("appended text to change checksum")
|
||||
+
|
||||
+ # Now we only have cache with solvx, modified solv file and xml metadata.
|
||||
+ # Checksum mistmatch causes regeneration of solv file and repo works.
|
||||
+
|
||||
+ self.test_base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ q = self.test_base.sack.query()
|
||||
+ packages = q.run()
|
||||
+ self.assertEqual(len(packages), 9)
|
||||
+ self.assertEqual(packages[0].evr, "4-4")
|
||||
+
|
||||
+ def test_with_modules_yaml(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
||||
+ "modules/modules/_all/x86_64"))
|
||||
+
|
||||
+ # Now we have full cache (also with modules.yaml)
|
||||
+
|
||||
+ self.test_base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ q = self.test_base.sack.query()
|
||||
+ packages = q.run()
|
||||
+ self.assertEqual(len(packages), 8)
|
||||
+ self.assertEqual(packages[0].evr, "2.02-0.40")
|
||||
+
|
||||
+ self.module_base = dnf.module.module_base.ModuleBase(self.test_base)
|
||||
+ modules, _ = self.module_base._get_modules("base-runtime*")
|
||||
+ self.assertEqual(len(modules), 3)
|
||||
+ self.assertEqual(modules[0].getFullIdentifier(), "base-runtime:f26:1::")
|
||||
+
|
||||
+ def test_with_modular_repo_without_modules_yaml(self):
|
||||
+ self._setUp_from_repo_path(os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
||||
+ "modules/modules/_all/x86_64"))
|
||||
+
|
||||
+ # Remove xml and yaml metadata except repomd
|
||||
+ # repomd.xml is not compressed and doesn't end with .gz
|
||||
+ repodata_without_repomd = glob.glob(os.path.join(self.tmpdir, "cache/test-repo-*/repodata/*.gz"))
|
||||
+ for f in repodata_without_repomd:
|
||||
+ os.remove(f)
|
||||
+
|
||||
+ # Now we have just solv, *-filenames.solvx and repomd.xml (modules.yaml are not processed into *-modules.solvx)
|
||||
+
|
||||
+ self.test_base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ q = self.test_base.sack.query()
|
||||
+ packages = q.run()
|
||||
+ # We have many more packages because they are not hidden by modules
|
||||
+ self.assertEqual(len(packages), 44)
|
||||
+ self.assertEqual(packages[0].evr, "10.0-7")
|
||||
+
|
||||
+ self.module_base = dnf.module.module_base.ModuleBase(self.test_base)
|
||||
+ modules, _ = self.module_base._get_modules("base-runtime*")
|
||||
+ self.assertEqual(len(modules), 0)
|
||||
|
||||
From de6177dba3dc20191e275eec14672570a0c4f4a8 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Thu, 14 Jan 2021 12:29:06 +0100
|
||||
Subject: [PATCH 3/3] Add docs and examples for fill_sack_from_repos_in_cache
|
||||
(RhBug:1865803)
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1865803
|
||||
---
|
||||
doc/api_base.rst | 41 +++++++++++++++++++++++++++++++++++++++++
|
||||
1 file changed, 41 insertions(+)
|
||||
|
||||
diff --git a/doc/api_base.rst b/doc/api_base.rst
|
||||
index 24ecb50e43..f0b1992e88 100644
|
||||
--- a/doc/api_base.rst
|
||||
+++ b/doc/api_base.rst
|
||||
@@ -111,6 +111,47 @@
|
||||
print("id: {}".format(repo.id))
|
||||
print("baseurl: {}".format(repo.baseurl))
|
||||
|
||||
+ .. method:: fill_sack_from_repos_in_cache(load_system_repo=True)
|
||||
+
|
||||
+ Prepare Sack and Goal objects and load all enabled repositories from cache only, it doesn't download anything and it doesn't check if metadata are expired.
|
||||
+ To successfully load a repository cache it requires repond.xml plus metadata (xml, yaml) or repond.xml plus generated cache files (solv, solvx).
|
||||
+ If there is not enough metadata given repo is either skipped or it throws a :exc:`dnf.exceptions.RepoError` exception depending on :attr:`dnf.conf.Conf.skip_if_unavailable` configuration.
|
||||
+
|
||||
+ All additional metadata are loaded if present but are not generally required. Note that some metadata like updateinfo.xml get processed into a solvx cache file and its sufficient to have either xml or solvx. Module metadata represented by modules.yaml are not processed therefore they are needed when they are defined in repomd.xml.
|
||||
+
|
||||
+ Example of loading all configured repositories from cache and printing available packages' names::
|
||||
+
|
||||
+ #!/usr/bin/python3
|
||||
+ import dnf
|
||||
+
|
||||
+ with dnf.Base() as base:
|
||||
+ base.read_all_repos()
|
||||
+
|
||||
+ base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ query = base.sack.query().available()
|
||||
+ for pkg in query.run():
|
||||
+ print(pkg.name)
|
||||
+
|
||||
+ Example of loading a single repository and printing available packages' names without reading repository configuration::
|
||||
+
|
||||
+ #!/usr/bin/python3
|
||||
+ import dnf
|
||||
+
|
||||
+ with dnf.Base() as base:
|
||||
+ repo = dnf.repo.Repo("rawhide", base.conf)
|
||||
+
|
||||
+ # Repository cache is also identified by its source therefore to find it you need to
|
||||
+ # set metalink, mirrorlist or baseurl to the same value from which it was created.
|
||||
+ repo.metalink = "https://mirrors.fedoraproject.org/metalink?repo=rawhide&arch=x86_64"
|
||||
+
|
||||
+ base.repos.add(repo)
|
||||
+
|
||||
+ base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
||||
+
|
||||
+ query = base.sack.query().available()
|
||||
+ for pkg in query.run():
|
||||
+ print(pkg.name)
|
||||
|
||||
.. method:: do_transaction([display])
|
||||
|
@ -0,0 +1,43 @@
|
||||
From 291071a937a1de398641f02002413678398e473c Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Mon, 8 Feb 2021 08:25:46 +0100
|
||||
Subject: [PATCH] Run tests for fill_sack_from_repos_in_cache in installroot
|
||||
(RhBug:1865803)
|
||||
|
||||
This prevents loading data (like failsafe) from host.
|
||||
|
||||
It also allows testing that there are no modules in the installroot not just
|
||||
no base-runtime* in test_with_modular_repo_without_modules_yaml.
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1865803
|
||||
---
|
||||
tests/test_fill_sack_from_repos_in_cache.py | 4 +++-
|
||||
1 file changed, 3 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/tests/test_fill_sack_from_repos_in_cache.py b/tests/test_fill_sack_from_repos_in_cache.py
|
||||
index 24b0d4598d..f27235bf84 100644
|
||||
--- a/tests/test_fill_sack_from_repos_in_cache.py
|
||||
+++ b/tests/test_fill_sack_from_repos_in_cache.py
|
||||
@@ -42,6 +42,7 @@ class FillSackFromReposInCacheTest(unittest.TestCase):
|
||||
def _create_cache_for_repo(self, repopath, tmpdir):
|
||||
conf = dnf.conf.MainConf()
|
||||
conf.cachedir = os.path.join(tmpdir, "cache")
|
||||
+ conf.installroot = os.path.join(tmpdir)
|
||||
|
||||
base = dnf.Base(conf=conf)
|
||||
|
||||
@@ -68,6 +69,7 @@ def _setUp_from_repo_path(self, original_repo_path):
|
||||
# Prepare base for the actual test
|
||||
conf = dnf.conf.MainConf()
|
||||
conf.cachedir = os.path.join(self.tmpdir, "cache")
|
||||
+ conf.installroot = os.path.join(self.tmpdir)
|
||||
self.test_base = dnf.Base(conf=conf)
|
||||
repoconf = dnf.repo.Repo(TEST_REPO_NAME, conf)
|
||||
repoconf.baseurl = self.repo_copy_path
|
||||
@@ -258,5 +260,5 @@ def test_with_modular_repo_without_modules_yaml(self):
|
||||
self.assertEqual(packages[0].evr, "10.0-7")
|
||||
|
||||
self.module_base = dnf.module.module_base.ModuleBase(self.test_base)
|
||||
- modules, _ = self.module_base._get_modules("base-runtime*")
|
||||
+ modules, _ = self.module_base._get_modules("*")
|
||||
self.assertEqual(len(modules), 0)
|
@ -0,0 +1,61 @@
|
||||
From 40e762da5cd2d876b6424f4c25b77e8dc2422a0f Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Mon, 8 Feb 2021 08:25:46 +0100
|
||||
Subject: [PATCH] Set persistdir and substitutions for
|
||||
fill_sack_from_repos_in_cache tests (RhBug:1865803)
|
||||
|
||||
Setting just installroot is not enough because persistdir is not
|
||||
automatically prepended with installroot if set via API.
|
||||
|
||||
Also assert exact package names which is more useful output in case the
|
||||
test fails.
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=1865803
|
||||
---
|
||||
tests/test_fill_sack_from_repos_in_cache.py | 19 +++++++++++++++----
|
||||
1 file changed, 15 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/tests/test_fill_sack_from_repos_in_cache.py b/tests/test_fill_sack_from_repos_in_cache.py
|
||||
index f27235bf84..23fd2a4337 100644
|
||||
--- a/tests/test_fill_sack_from_repos_in_cache.py
|
||||
+++ b/tests/test_fill_sack_from_repos_in_cache.py
|
||||
@@ -42,7 +42,10 @@ class FillSackFromReposInCacheTest(unittest.TestCase):
|
||||
def _create_cache_for_repo(self, repopath, tmpdir):
|
||||
conf = dnf.conf.MainConf()
|
||||
conf.cachedir = os.path.join(tmpdir, "cache")
|
||||
- conf.installroot = os.path.join(tmpdir)
|
||||
+ conf.installroot = tmpdir
|
||||
+ conf.persistdir = os.path.join(conf.installroot, conf.persistdir.lstrip("/"))
|
||||
+ conf.substitutions["arch"] = "x86_64"
|
||||
+ conf.substitutions["basearch"] = dnf.rpm.basearch(conf.substitutions["arch"])
|
||||
|
||||
base = dnf.Base(conf=conf)
|
||||
|
||||
@@ -69,7 +72,10 @@ def _setUp_from_repo_path(self, original_repo_path):
|
||||
# Prepare base for the actual test
|
||||
conf = dnf.conf.MainConf()
|
||||
conf.cachedir = os.path.join(self.tmpdir, "cache")
|
||||
- conf.installroot = os.path.join(self.tmpdir)
|
||||
+ conf.installroot = self.tmpdir
|
||||
+ conf.persistdir = os.path.join(conf.installroot, conf.persistdir.lstrip("/"))
|
||||
+ conf.substitutions["arch"] = "x86_64"
|
||||
+ conf.substitutions["basearch"] = dnf.rpm.basearch(conf.substitutions["arch"])
|
||||
self.test_base = dnf.Base(conf=conf)
|
||||
repoconf = dnf.repo.Repo(TEST_REPO_NAME, conf)
|
||||
repoconf.baseurl = self.repo_copy_path
|
||||
@@ -231,8 +237,13 @@ def test_with_modules_yaml(self):
|
||||
|
||||
q = self.test_base.sack.query()
|
||||
packages = q.run()
|
||||
- self.assertEqual(len(packages), 8)
|
||||
- self.assertEqual(packages[0].evr, "2.02-0.40")
|
||||
+
|
||||
+ pkg_names = []
|
||||
+ for pkg in packages:
|
||||
+ pkg_names.append(pkg.name)
|
||||
+
|
||||
+ self.assertEqual(pkg_names, ['grub2', 'httpd', 'httpd', 'httpd-doc', 'httpd-doc', 'httpd-provides-name-doc',
|
||||
+ 'httpd-provides-name-version-release-doc', 'libnghttp2'])
|
||||
|
||||
self.module_base = dnf.module.module_base.ModuleBase(self.test_base)
|
||||
modules, _ = self.module_base._get_modules("base-runtime*")
|
56
SOURCES/0018-Allow-stream-switching-if-option-enabled.patch
Normal file
56
SOURCES/0018-Allow-stream-switching-if-option-enabled.patch
Normal file
@ -0,0 +1,56 @@
|
||||
From 9ceb74f77479910f7844a9a87d4b7623687076be Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
||||
Date: Fri, 24 Jul 2020 07:59:38 +0200
|
||||
Subject: [PATCH] Allow stream switching if option enabled
|
||||
|
||||
= changelog =
|
||||
msg: New config option module_allow_stream_switch allows switching enabled streams
|
||||
type: enhancement
|
||||
---
|
||||
dnf.spec | 2 +-
|
||||
dnf/cli/cli.py | 19 ++++++++++---------
|
||||
2 files changed, 11 insertions(+), 10 deletions(-)
|
||||
|
||||
diff --git a/dnf.spec b/dnf.spec
|
||||
index 0e63b2b422..04f6f104c7 100644
|
||||
--- a/dnf.spec
|
||||
+++ b/dnf.spec
|
||||
@@ -2,7 +2,7 @@
|
||||
%undefine __cmake_in_source_build
|
||||
|
||||
# default dependencies
|
||||
-%global hawkey_version 0.54.4
|
||||
+%global hawkey_version 0.55.0
|
||||
%global libcomps_version 0.1.8
|
||||
%global libmodulemd_version 1.4.0
|
||||
%global rpm_version 4.14.0
|
||||
diff --git a/dnf/cli/cli.py b/dnf/cli/cli.py
|
||||
index be737ed3b7..29d7373fa3 100644
|
||||
--- a/dnf/cli/cli.py
|
||||
+++ b/dnf/cli/cli.py
|
||||
@@ -166,15 +166,16 @@ def do_transaction(self, display=()):
|
||||
:return: history database transaction ID or None
|
||||
"""
|
||||
if dnf.base.WITH_MODULES:
|
||||
- switchedModules = dict(self._moduleContainer.getSwitchedStreams())
|
||||
- if switchedModules:
|
||||
- report_module_switch(switchedModules)
|
||||
- msg = _("It is not possible to switch enabled streams of a module.\n"
|
||||
- "It is recommended to remove all installed content from the module, and "
|
||||
- "reset the module using '{prog} module reset <module_name>' command. After "
|
||||
- "you reset the module, you can install the other stream.").format(
|
||||
- prog=dnf.util.MAIN_PROG)
|
||||
- raise dnf.exceptions.Error(msg)
|
||||
+ if not self.conf.module_stream_switch:
|
||||
+ switchedModules = dict(self._moduleContainer.getSwitchedStreams())
|
||||
+ if switchedModules:
|
||||
+ report_module_switch(switchedModules)
|
||||
+ msg = _("It is not possible to switch enabled streams of a module.\n"
|
||||
+ "It is recommended to remove all installed content from the module, and "
|
||||
+ "reset the module using '{prog} module reset <module_name>' command. After "
|
||||
+ "you reset the module, you can install the other stream.").format(
|
||||
+ prog=dnf.util.MAIN_PROG)
|
||||
+ raise dnf.exceptions.Error(msg)
|
||||
|
||||
trans = self.transaction
|
||||
pkg_str = self.output.list_transaction(trans)
|
2709
SPECS/dnf.spec
Normal file
2709
SPECS/dnf.spec
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user