Update ELevate patch

This commit is contained in:
Andrew Lukoshko 2022-08-22 12:07:15 +00:00
parent 4bd6dbbadd
commit 93f6a68056
1 changed files with 455 additions and 165 deletions

View File

@ -188,15 +188,15 @@ index bb89c9f..2b8e7c8 100644
'--args', '--args',
diff --git a/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py diff --git a/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py
new file mode 100644 new file mode 100644
index 0000000..51d0c25 index 0000000..5284aec
--- /dev/null --- /dev/null
+++ b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py +++ b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py
@@ -0,0 +1,55 @@ @@ -0,0 +1,53 @@
+from leapp.actors import Actor +from leapp.actors import Actor
+from leapp.libraries.stdlib import api +from leapp.libraries.stdlib import api
+from leapp.models import ( +from leapp.models import (
+ RepositoriesFacts, + RepositoriesFacts,
+ VendorRepositoriesMapCollection, + VendorSourceRepos,
+ ActiveVendorList, + ActiveVendorList,
+) +)
+from leapp.tags import FactsPhaseTag, IPUWorkflowTag +from leapp.tags import FactsPhaseTag, IPUWorkflowTag
@ -210,20 +210,18 @@ index 0000000..51d0c25
+ """ + """
+ +
+ name = "check_enabled_vendor_repos" + name = "check_enabled_vendor_repos"
+ consumes = (RepositoriesFacts, VendorRepositoriesMapCollection) + consumes = (RepositoriesFacts, VendorSourceRepos)
+ produces = (ActiveVendorList) + produces = (ActiveVendorList)
+ tags = (IPUWorkflowTag, FactsPhaseTag.Before) + tags = (IPUWorkflowTag, FactsPhaseTag.Before)
+ +
+ def process(self): + def process(self):
+ vendor_mapping_data = {} + vendor_mapping_data = {}
+ active_vendors = [] + active_vendors = set()
+ +
+ # Make a dict for easy lookup of repoid -> vendor name. + # Make a dict for easy lookup of repoid -> vendor name.
+ for map_coll in api.consume(VendorRepositoriesMapCollection): + for vendor_src_repodata in api.consume(VendorSourceRepos):
+ for map in map_coll.maps: + for vendor_src_repo in vendor_src_repodata.source_repoids:
+ for repo in map.repositories: + vendor_mapping_data[vendor_src_repo] = vendor_src_repodata.vendor
+ # Cut the .csv, keep only the vendor name.
+ vendor_mapping_data[repo.from_repoid] = map.file[:-4]
+ +
+ # Is the repo listed in the vendor map as from_repoid present on the system? + # Is the repo listed in the vendor map as from_repoid present on the system?
+ for repos in api.consume(RepositoriesFacts): + for repos in api.consume(RepositoriesFacts):
@ -240,11 +238,11 @@ index 0000000..51d0c25
+ repo.repoid, new_vendor + repo.repoid, new_vendor
+ ) + )
+ ) + )
+ active_vendors.append(new_vendor) + active_vendors.add(new_vendor)
+ +
+ if active_vendors: + if active_vendors:
+ self.log.debug("Active vendor list: {}".format(active_vendors)) + self.log.debug("Active vendor list: {}".format(active_vendors))
+ api.produce(ActiveVendorList(data=active_vendors)) + api.produce(ActiveVendorList(data=list(active_vendors)))
+ else: + else:
+ self.log.info("No active vendors found, vendor list not generated") + self.log.info("No active vendors found, vendor list not generated")
diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh
@ -300,7 +298,7 @@ index edf978f..7fea4ec 100644
variant_id=data.get('VARIANT_ID', '').strip('"') or None variant_id=data.get('VARIANT_ID', '').strip('"') or None
) )
diff --git a/repos/system_upgrade/common/actors/peseventsscanner/actor.py b/repos/system_upgrade/common/actors/peseventsscanner/actor.py diff --git a/repos/system_upgrade/common/actors/peseventsscanner/actor.py b/repos/system_upgrade/common/actors/peseventsscanner/actor.py
index fadf76b..b86d364 100644 index fadf76b..7ef2664 100644
--- a/repos/system_upgrade/common/actors/peseventsscanner/actor.py --- a/repos/system_upgrade/common/actors/peseventsscanner/actor.py
+++ b/repos/system_upgrade/common/actors/peseventsscanner/actor.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/actor.py
@@ -1,3 +1,6 @@ @@ -1,3 +1,6 @@
@ -327,7 +325,7 @@ index fadf76b..b86d364 100644
class PesEventsScanner(Actor): class PesEventsScanner(Actor):
""" """
@@ -32,9 +39,21 @@ class PesEventsScanner(Actor): @@ -32,9 +39,22 @@ class PesEventsScanner(Actor):
RepositoriesMapping, RepositoriesMapping,
RHUIInfo, RHUIInfo,
RpmTransactionTasks, RpmTransactionTasks,
@ -344,17 +342,55 @@ index fadf76b..b86d364 100644
+ for vendor_list in self.consume(ActiveVendorList): + for vendor_list in self.consume(ActiveVendorList):
+ active_vendors.extend(vendor_list.data) + active_vendors.extend(vendor_list.data)
+ +
+ pes_json_suffix = "_pes.json"
+ if os.path.isdir(VENDORS_DIR): + if os.path.isdir(VENDORS_DIR):
+ vendor_pesfiles = list(filter(lambda vfile: ".json" in vfile, os.listdir(VENDORS_DIR))) + vendor_pesfiles = list(filter(lambda vfile: pes_json_suffix in vfile, os.listdir(VENDORS_DIR)))
+ +
+ for pesfile in vendor_pesfiles: + for pesfile in vendor_pesfiles:
+ if pesfile[:-5] in active_vendors: + if pesfile[:-len(pes_json_suffix)] in active_vendors:
+ pes_events_scanner(VENDORS_DIR, pesfile) + pes_events_scanner(VENDORS_DIR, pesfile)
diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py
index 1be2caa..8e5ca07 100644 index 1be2caa..072de17 100644
--- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py
+++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py
@@ -324,7 +324,7 @@ def parse_pes_events(json_data): @@ -138,19 +138,26 @@ def _get_repositories_mapping(target_pesids):
:return: Dictionary with all repositories mapped.
"""
- repositories_map_msgs = api.consume(RepositoriesMapping)
- repositories_map_msg = next(repositories_map_msgs, None)
- if list(repositories_map_msgs):
- api.current_logger().warning('Unexpectedly received more than one RepositoriesMapping message.')
- if not repositories_map_msg:
- raise StopActorExecutionError(
- 'Cannot parse RepositoriesMapping data properly',
- details={'Problem': 'Did not receive a message with mapped repositories'}
- )
+ composite_mapping = []
+ composite_repos = []
+
+ for repomap_msg in api.consume(RepositoriesMapping):
+ if not repomap_msg:
+ raise StopActorExecutionError(
+ 'Cannot parse RepositoriesMapping data properly',
+ details={'Problem': 'Received a blank message with mapped repositories'}
+ )
+ composite_mapping.extend(repomap_msg.mapping)
+ composite_repos.extend(repomap_msg.repositories)
+
+ composite_map_msg = RepositoriesMapping(
+ mapping=composite_mapping,
+ repositories=composite_repos
+ )
rhui_info = next(api.consume(RHUIInfo), RHUIInfo(provider=''))
- repomap = peseventsscanner_repomap.RepoMapDataHandler(repositories_map_msg, cloud_provider=rhui_info.provider)
+ repomap = peseventsscanner_repomap.RepoMapDataHandler(composite_map_msg, cloud_provider=rhui_info.provider)
# NOTE: We have to calculate expected target repositories
# like in the setuptargetrepos actor. It's planned to handle this in different
# way in future...
@@ -324,7 +331,7 @@ def parse_pes_events(json_data):
:return: List of Event tuples, where each event contains event type and input/output pkgs :return: List of Event tuples, where each event contains event type and input/output pkgs
""" """
data = json.loads(json_data) data = json.loads(json_data)
@ -507,6 +543,186 @@ index 01f6df3..4ba05f0 100644
self.produce(signed_pkgs) self.produce(signed_pkgs)
self.produce(unsigned_pkgs) self.produce(unsigned_pkgs)
diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py
index b2d00f3..e9458c5 100644
--- a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py
+++ b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py
@@ -1,12 +1,9 @@
-from collections import defaultdict
-import json
import os
-from leapp.exceptions import StopActorExecutionError
from leapp.libraries.common.config.version import get_target_major_version, get_source_major_version
-from leapp.libraries.common.fetch import read_or_fetch
+from leapp.libraries.common.repomaputils import RepoMapData, read_repofile, inhibit_upgrade
from leapp.libraries.stdlib import api
-from leapp.models import RepositoriesMapping, PESIDRepositoryEntry, RepoMapEntry
+from leapp.models import RepositoriesMapping
from leapp.models.fields import ModelViolationError
OLD_REPOMAP_FILE = 'repomap.csv'
@@ -16,144 +13,9 @@ REPOMAP_FILE = 'repomap.json'
"""The name of the new repository mapping file."""
-class RepoMapData(object):
- VERSION_FORMAT = '1.0.0'
-
- def __init__(self):
- self.repositories = []
- self.mapping = {}
-
- def add_repository(self, data, pesid):
- """
- Add new PESIDRepositoryEntry with given pesid from the provided dictionary.
-
- :param data: A dict containing the data of the added repository. The dictionary structure corresponds
- to the repositories entries in the repository mapping JSON schema.
- :type data: Dict[str, str]
- :param pesid: PES id of the repository family that the newly added repository belongs to.
- :type pesid: str
- """
- self.repositories.append(PESIDRepositoryEntry(
- repoid=data['repoid'],
- channel=data['channel'],
- rhui=data.get('rhui', ''),
- repo_type=data['repo_type'],
- arch=data['arch'],
- major_version=data['major_version'],
- pesid=pesid
- ))
-
- def get_repositories(self, valid_major_versions):
- """
- Return the list of PESIDRepositoryEntry object matching the specified major versions.
- """
- return [repo for repo in self.repositories if repo.major_version in valid_major_versions]
-
- def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid):
- """
- Add a new mapping entry that is mapping the source pesid to the destination pesid(s),
- relevant in an IPU from the supplied source major version to the supplied target
- major version.
-
- :param str source_major_version: Specifies the major version of the source system
- for which the added mapping applies.
- :param str target_major_version: Specifies the major version of the target system
- for which the added mapping applies.
- :param str source_pesid: PESID of the source repository.
- :param Union[str|List[str]] target_pesid: A single target PESID or a list of target
- PESIDs of the added mapping.
- """
- # NOTE: it could be more simple, but I prefer to be sure the input data
- # contains just one map per source PESID.
- key = '{}:{}'.format(source_major_version, target_major_version)
- rmap = self.mapping.get(key, defaultdict(set))
- self.mapping[key] = rmap
- if isinstance(target_pesid, list):
- rmap[source_pesid].update(target_pesid)
- else:
- rmap[source_pesid].add(target_pesid)
-
- def get_mappings(self, src_major_version, dst_major_version):
- """
- Return the list of RepoMapEntry objects for the specified upgrade path.
-
- IOW, the whole mapping for specified IPU.
- """
- key = '{}:{}'.format(src_major_version, dst_major_version)
- rmap = self.mapping.get(key, None)
- if not rmap:
- return None
- map_list = []
- for src_pesid in sorted(rmap.keys()):
- map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid])))
- return map_list
-
- @staticmethod
- def load_from_dict(data):
- if data['version_format'] != RepoMapData.VERSION_FORMAT:
- raise ValueError(
- 'The obtained repomap data has unsupported version of format.'
- ' Get {} required {}'
- .format(data['version_format'], RepoMapData.VERSION_FORMAT)
- )
-
- repomap = RepoMapData()
-
- # Load reposiories
- existing_pesids = set()
- for repo_family in data['repositories']:
- existing_pesids.add(repo_family['pesid'])
- for repo in repo_family['entries']:
- repomap.add_repository(repo, repo_family['pesid'])
-
- # Load mappings
- for mapping in data['mapping']:
- for entry in mapping['entries']:
- if not isinstance(entry['target'], list):
- raise ValueError(
- 'The target field of a mapping entry is not a list: {}'
- .format(entry)
- )
-
- for pesid in [entry['source']] + entry['target']:
- if pesid not in existing_pesids:
- raise ValueError(
- 'The {} pesid is not related to any repository.'
- .format(pesid)
- )
- repomap.add_mapping(
- source_major_version=mapping['source_major_version'],
- target_major_version=mapping['target_major_version'],
- source_pesid=entry['source'],
- target_pesid=entry['target'],
- )
- return repomap
-
-
-def _inhibit_upgrade(msg):
- raise StopActorExecutionError(
- msg,
- details={'hint': ('Read documentation at the following link for more'
- ' information about how to retrieve the valid file:'
- ' https://access.redhat.com/articles/3664871')})
-
-
-def _read_repofile(repofile):
- # NOTE: what about catch StopActorExecution error when the file cannot be
- # obtained -> then check whether old_repomap file exists and in such a case
- # inform user they have to provde the new repomap.json file (we have the
- # warning now only which could be potentially overlooked)
- try:
- return json.loads(read_or_fetch(repofile))
- except ValueError:
- # The data does not contain a valid json
- _inhibit_upgrade('The repository mapping file is invalid: file does not contain a valid JSON object.')
- return None # Avoids inconsistent-return-statements warning
-
-
-def scan_repositories(read_repofile_func=_read_repofile):
+def scan_repositories(read_repofile_func=read_repofile):
"""
- Scan the repository mapping file and produce RepositoriesMap msg.
+ Scan the repository mapping file and produce RepositoriesMapping msg.
See the description of the actor for more details.
"""
@@ -185,10 +47,10 @@ def scan_repositories(read_repofile_func=_read_repofile):
'the JSON does not match required schema (wrong field type/value): {}'
.format(err)
)
- _inhibit_upgrade(err_message)
+ inhibit_upgrade(err_message)
except KeyError as err:
- _inhibit_upgrade(
+ inhibit_upgrade(
'The repository mapping file is invalid: the JSON is missing a required field: {}'.format(err))
except ValueError as err:
# The error should contain enough information, so we do not need to clarify it further
- _inhibit_upgrade('The repository mapping file is invalid: {}'.format(err))
+ inhibit_upgrade('The repository mapping file is invalid: {}'.format(err))
diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py
index 3c0b04b..3480432 100644 index 3c0b04b..3480432 100644
--- a/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py --- a/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py
@ -871,13 +1087,14 @@ index 0000000..f74de27
+ ) + )
diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py
new file mode 100644 new file mode 100644
index 0000000..156d78c index 0000000..1325647
--- /dev/null --- /dev/null
+++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py +++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py
@@ -0,0 +1,22 @@ @@ -0,0 +1,19 @@
+from leapp.actors import Actor +from leapp.actors import Actor
+from leapp.libraries.common.repomaputils import scan_vendor_repomaps, VENDOR_REPOMAP_DIR +# from leapp.libraries.common.repomaputils import scan_vendor_repomaps, VENDOR_REPOMAP_DIR
+from leapp.models import VendorRepositoriesMapCollection, RepositoriesMap +from leapp.libraries.actor.vendorrepositoriesmapping import scan_vendor_repomaps
+from leapp.models import VendorSourceRepos, RepositoriesMapping
+from leapp.tags import FactsPhaseTag, IPUWorkflowTag +from leapp.tags import FactsPhaseTag, IPUWorkflowTag
+ +
+ +
@ -888,15 +1105,83 @@ index 0000000..156d78c
+ +
+ name = "vendor_repositories_mapping" + name = "vendor_repositories_mapping"
+ consumes = () + consumes = ()
+ produces = (RepositoriesMap, VendorRepositoriesMapCollection,) + produces = (RepositoriesMapping, VendorSourceRepos,)
+ tags = (IPUWorkflowTag, FactsPhaseTag.Before) + tags = (IPUWorkflowTag, FactsPhaseTag.Before)
+ +
+ def process(self): + def process(self):
+ vendor_repomap_collection = scan_vendor_repomaps(VENDOR_REPOMAP_DIR) + scan_vendor_repomaps()
+ if vendor_repomap_collection: diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py
+ self.produce(vendor_repomap_collection) new file mode 100644
+ for repomap in vendor_repomap_collection.maps: index 0000000..204d0dc
+ self.produce(repomap) --- /dev/null
+++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py
@@ -0,0 +1,66 @@
+import os
+
+from leapp.libraries.common.config.version import get_target_major_version, get_source_major_version
+from leapp.libraries.common.repomaputils import RepoMapData, read_repofile, inhibit_upgrade
+from leapp.libraries.stdlib import api
+from leapp.models import VendorSourceRepos, RepositoriesMapping
+from leapp.models.fields import ModelViolationError
+
+
+VENDORS_DIR = "/etc/leapp/files/vendors.d"
+"""The folder containing the vendor repository mapping files."""
+
+
+def read_repomap_file(repomap_file, read_repofile_func, vendor_name):
+ json_data = read_repofile_func(repomap_file, VENDORS_DIR)
+ try:
+ repomap_data = RepoMapData.load_from_dict(json_data)
+
+ api.produce(VendorSourceRepos(
+ vendor=vendor_name,
+ source_repoids=repomap_data.get_version_repoids(get_source_major_version())
+ ))
+
+ mapping = repomap_data.get_mappings(get_source_major_version(), get_target_major_version())
+ valid_major_versions = [get_source_major_version(), get_target_major_version()]
+ api.produce(RepositoriesMapping(
+ mapping=mapping,
+ repositories=repomap_data.get_repositories(valid_major_versions)
+ ))
+ except ModelViolationError as err:
+ err_message = (
+ 'The repository mapping file is invalid: '
+ 'the JSON does not match required schema (wrong field type/value): {}'
+ .format(err)
+ )
+ inhibit_upgrade(err_message)
+ except KeyError as err:
+ inhibit_upgrade(
+ 'The repository mapping file is invalid: the JSON is missing a required field: {}'.format(err))
+ except ValueError as err:
+ # The error should contain enough information, so we do not need to clarify it further
+ inhibit_upgrade('The repository mapping file is invalid: {}'.format(err))
+
+
+def scan_vendor_repomaps(read_repofile_func=read_repofile):
+ """
+ Scan the repository mapping file and produce RepositoriesMapping msg.
+
+ See the description of the actor for more details.
+ """
+
+ map_json_suffix = "_map.json"
+ if os.path.isdir(VENDORS_DIR):
+ vendor_mapfiles = list(filter(lambda vfile: map_json_suffix in vfile, os.listdir(VENDORS_DIR)))
+
+ for mapfile in vendor_mapfiles:
+ read_repomap_file(mapfile, read_repofile_func, mapfile[:-len(map_json_suffix)])
+ else:
+ api.current_logger().debug(
+ "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR)
+ )
+ # vendor_repomap_collection = scan_vendor_repomaps(VENDOR_REPOMAP_DIR)
+ # if vendor_repomap_collection:
+ # self.produce(vendor_repomap_collection)
+ # for repomap in vendor_repomap_collection.maps:
+ # self.produce(repomap)
diff --git a/repos/system_upgrade/common/libraries/config/version.py b/repos/system_upgrade/common/libraries/config/version.py diff --git a/repos/system_upgrade/common/libraries/config/version.py b/repos/system_upgrade/common/libraries/config/version.py
index 03f3cd4..783075d 100644 index 03f3cd4..783075d 100644
--- a/repos/system_upgrade/common/libraries/config/version.py --- a/repos/system_upgrade/common/libraries/config/version.py
@ -955,164 +1240,157 @@ index 1c58148..37313b6 100644
_raise_error(local_path, "File {lp} exists but couldn't be read".format(lp=local_path)) _raise_error(local_path, "File {lp} exists but couldn't be read".format(lp=local_path))
diff --git a/repos/system_upgrade/common/libraries/repomaputils.py b/repos/system_upgrade/common/libraries/repomaputils.py diff --git a/repos/system_upgrade/common/libraries/repomaputils.py b/repos/system_upgrade/common/libraries/repomaputils.py
new file mode 100644 new file mode 100644
index 0000000..7ca63d2 index 0000000..5c41620
--- /dev/null --- /dev/null
+++ b/repos/system_upgrade/common/libraries/repomaputils.py +++ b/repos/system_upgrade/common/libraries/repomaputils.py
@@ -0,0 +1,154 @@ @@ -0,0 +1,147 @@
+import os +import json
+import io # Python2/Python3 compatible IO (open etc.) +from collections import defaultdict
+ +
+from leapp.exceptions import StopActorExecutionError +from leapp.exceptions import StopActorExecutionError
+from leapp.libraries.common import config
+from leapp.libraries.common.fetch import read_or_fetch +from leapp.libraries.common.fetch import read_or_fetch
+from leapp.libraries.stdlib import api +from leapp.models import PESIDRepositoryEntry, RepoMapEntry
+from leapp.models import RepositoriesMap, RepositoryMap, VendorRepositoriesMapCollection
+from leapp.models.fields import ModelViolationError
+
+REPOMAP_FILE = "repomap.csv"
+"""Path to the repository mapping file."""
+BASE_REPOMAP_DIR = "/etc/leapp/files"
+VENDOR_REPOMAP_DIR = "/etc/leapp/files/vendors.d"
+ +
+ +
+def _raise_error(msg, details): +def inhibit_upgrade(msg):
+ raise StopActorExecutionError( + raise StopActorExecutionError(
+ msg, + msg,
+ details={ + details={'hint': ('Read documentation at the following link for more'
+ "details": details, + ' information about how to retrieve the valid file:'
+ "hint": ( + ' https://access.redhat.com/articles/3664871')})
+ "Read documentation at the following link for more"
+ " information about how to retrieve the valid file:"
+ " https://access.redhat.com/articles/3664871"
+ ),
+ },
+ )
+ +
+ +
+def read_local( +def read_repofile(repofile, directory="/etc/leapp/files"):
+ filename, + # NOTE: what about catch StopActorExecution error when the file cannot be
+ directory=BASE_REPOMAP_DIR, + # obtained -> then check whether old_repomap file exists and in such a case
+ allow_empty=False, + # inform user they have to provde the new repomap.json file (we have the
+ encoding="utf-8", + # warning now only which could be potentially overlooked)
+):
+ logger = api.current_logger()
+ local_path = os.path.join(directory, filename)
+ try: + try:
+ with io.open(local_path, encoding=encoding) as f: + return json.loads(read_or_fetch(repofile, directory))
+ data = f.read() + except ValueError:
+ if not allow_empty and not data: + # The data does not contain a valid json
+ _raise_error( + inhibit_upgrade('The repository mapping file is invalid: file does not contain a valid JSON object.')
+ local_path, "File {} exists but is empty".format(local_path) + return None # Avoids inconsistent-return-statements warning
+ )
+ logger.warning(
+ "File {lp} successfully read ({l} bytes)".format(
+ lp=local_path, l=len(data)
+ )
+ )
+ return [line.strip() for line in data.splitlines()]
+ except EnvironmentError:
+ _raise_error(
+ local_path, "File {} exists but couldn't be read".format(local_path)
+ )
+ except Exception as e:
+ raise e
+ +
+ +
+def read_or_fetch_repofile(repofile, directory): +class RepoMapData(object):
+ contents = read_or_fetch(repofile, directory) + VERSION_FORMAT = '1.0.0'
+ return [line.strip() for line in contents.splitlines()]
+ +
+ def __init__(self):
+ self.repositories = []
+ self.mapping = {}
+ +
+def scan_repomaps(repomap_file, repomap_dir, read_repofile_func=read_or_fetch_repofile): + def add_repository(self, data, pesid):
+ """ + """
+ Scan the repository mapping file and produce RepositoriesMap msg. + Add new PESIDRepositoryEntry with given pesid from the provided dictionary.
+ +
+ See the description of the actor for more details. + :param data: A dict containing the data of the added repository. The dictionary structure corresponds
+ """ + to the repositories entries in the repository mapping JSON schema.
+ _exp_src_prod_type = config.get_product_type("source") + :type data: Dict[str, str]
+ _exp_dst_prod_type = config.get_product_type("target") + :param pesid: PES id of the repository family that the newly added repository belongs to.
+ :type pesid: str
+ """
+ self.repositories.append(PESIDRepositoryEntry(
+ repoid=data['repoid'],
+ channel=data['channel'],
+ rhui=data.get('rhui', ''),
+ repo_type=data['repo_type'],
+ arch=data['arch'],
+ major_version=data['major_version'],
+ pesid=pesid
+ ))
+ +
+ repositories = [] + def get_repositories(self, valid_major_versions):
+ line_num = 0 + """
+ for line in read_repofile_func(repomap_file, repomap_dir)[1:]: + Return the list of PESIDRepositoryEntry object matching the specified major versions.
+ line_num += 1 + """
+ return [repo for repo in self.repositories if repo.major_version in valid_major_versions]
+ +
+ api.current_logger().debug("Grabbing line {} of file {}: \"{}\"".format(line_num, repomap_file, line)) + def get_version_repoids(self, major_version):
+ """
+ Return the list of repository ID strings for repositories matching the specified major version.
+ """
+ return [repo.repoid for repo in self.repositories if repo.major_version == major_version]
+ +
+ # skip empty lines and comments + def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid):
+ if not line or line.startswith("#"): + """
+ api.current_logger().debug("Line skipped") + Add a new mapping entry that is mapping the source pesid to the destination pesid(s),
+ continue + relevant in an IPU from the supplied source major version to the supplied target
+ major version.
+ +
+ try: + :param str source_major_version: Specifies the major version of the source system
+ ( + for which the added mapping applies.
+ from_repoid, + :param str target_major_version: Specifies the major version of the target system
+ to_repoid, + for which the added mapping applies.
+ to_pes_repo, + :param str source_pesid: PESID of the source repository.
+ from_minor_version, + :param Union[str|List[str]] target_pesid: A single target PESID or a list of target
+ to_minor_version, + PESIDs of the added mapping.
+ arch, + """
+ repo_type, + # NOTE: it could be more simple, but I prefer to be sure the input data
+ src_prod_type, + # contains just one map per source PESID.
+ dst_prod_type, + key = '{}:{}'.format(source_major_version, target_major_version)
+ ) = line.split(",") + rmap = self.mapping.get(key, defaultdict(set))
+ self.mapping[key] = rmap
+ if isinstance(target_pesid, list):
+ rmap[source_pesid].update(target_pesid)
+ else:
+ rmap[source_pesid].add(target_pesid)
+ +
+ # filter out records irrelevant for this run + def get_mappings(self, src_major_version, dst_major_version):
+ if ( + """
+ arch != api.current_actor().configuration.architecture + Return the list of RepoMapEntry objects for the specified upgrade path.
+ or _exp_src_prod_type != src_prod_type
+ or _exp_dst_prod_type != dst_prod_type
+ ):
+ api.current_logger().debug("Line filtered out")
+ continue
+ +
+ new_repo_map = RepositoryMap( + IOW, the whole mapping for specified IPU.
+ from_repoid=from_repoid, + """
+ to_repoid=to_repoid, + key = '{}:{}'.format(src_major_version, dst_major_version)
+ to_pes_repo=to_pes_repo, + rmap = self.mapping.get(key, None)
+ from_minor_version=from_minor_version, + if not rmap:
+ to_minor_version=to_minor_version, + return None
+ arch=arch, + map_list = []
+ repo_type=repo_type, + for src_pesid in sorted(rmap.keys()):
+ ) + map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid])))
+ return map_list
+ +
+ api.current_logger().debug("Map added: {}".format(new_repo_map.dump())) + @staticmethod
+ repositories.append(new_repo_map) + def load_from_dict(data):
+ + if data['version_format'] != RepoMapData.VERSION_FORMAT:
+ except (ModelViolationError, ValueError) as err: + raise ValueError(
+ _raise_error( + 'The obtained repomap data has unsupported version of format.'
+ "The repository mapping file is invalid. It is possible the file is out of date.", + ' Get {} required {}'
+ "Offending line number: {} ({}).".format(line_num, err), + .format(data['version_format'], RepoMapData.VERSION_FORMAT)
+ ) + )
+ +
+ if not repositories: + repomap = RepoMapData()
+ _raise_error(
+ "The repository mapping file is invalid. Could not find any repository mapping record.",
+ "",
+ )
+ +
+ return RepositoriesMap(file=repomap_file, repositories=repositories) + # Load reposiories
+ existing_pesids = set()
+ for repo_family in data['repositories']:
+ existing_pesids.add(repo_family['pesid'])
+ for repo in repo_family['entries']:
+ repomap.add_repository(repo, repo_family['pesid'])
+ +
+ # Load mappings
+ for mapping in data['mapping']:
+ for entry in mapping['entries']:
+ if not isinstance(entry['target'], list):
+ raise ValueError(
+ 'The target field of a mapping entry is not a list: {}'
+ .format(entry)
+ )
+ +
+def scan_vendor_repomaps(repomap_dir): + for pesid in [entry['source']] + entry['target']:
+ if not os.path.isdir(repomap_dir): + if pesid not in existing_pesids:
+ api.current_logger().debug( + raise ValueError(
+ "The {} directory doesn't exist. Nothing to do.".format(repomap_dir) + 'The {} pesid is not related to any repository.'
+ ) + .format(pesid)
+ return None + )
+ + repomap.add_mapping(
+ vendor_maps = [] + source_major_version=mapping['source_major_version'],
+ + target_major_version=mapping['target_major_version'],
+ for repomap_name in os.listdir(repomap_dir): + source_pesid=entry['source'],
+ # Only scan the .csv files, those are the maps. + target_pesid=entry['target'],
+ if not repomap_name.endswith(".csv"): + )
+ continue + return repomap
+ scanned_map = scan_repomaps(
+ repomap_name, repomap_dir, read_repofile_func=read_local
+ )
+ vendor_maps.append(scanned_map)
+
+ return VendorRepositoriesMapCollection(maps=vendor_maps)
diff --git a/repos/system_upgrade/common/libraries/rhsm.py b/repos/system_upgrade/common/libraries/rhsm.py diff --git a/repos/system_upgrade/common/libraries/rhsm.py b/repos/system_upgrade/common/libraries/rhsm.py
index b7e4b21..dc038bf 100644 index b7e4b21..dc038bf 100644
--- a/repos/system_upgrade/common/libraries/rhsm.py --- a/repos/system_upgrade/common/libraries/rhsm.py
@ -1140,20 +1418,14 @@ index 0000000..de4056f
+ topic = VendorTopic + topic = VendorTopic
+ data = fields.List(fields.String()) + data = fields.List(fields.String())
diff --git a/repos/system_upgrade/common/models/repositoriesmap.py b/repos/system_upgrade/common/models/repositoriesmap.py diff --git a/repos/system_upgrade/common/models/repositoriesmap.py b/repos/system_upgrade/common/models/repositoriesmap.py
index c187333..f5f23f4 100644 index c187333..a068a70 100644
--- a/repos/system_upgrade/common/models/repositoriesmap.py --- a/repos/system_upgrade/common/models/repositoriesmap.py
+++ b/repos/system_upgrade/common/models/repositoriesmap.py +++ b/repos/system_upgrade/common/models/repositoriesmap.py
@@ -92,3 +92,10 @@ class RepositoriesMapping(Model): @@ -92,3 +92,4 @@ class RepositoriesMapping(Model):
mapping = fields.List(fields.Model(RepoMapEntry), default=[]) mapping = fields.List(fields.Model(RepoMapEntry), default=[])
repositories = fields.List(fields.Model(PESIDRepositoryEntry), default=[]) repositories = fields.List(fields.Model(PESIDRepositoryEntry), default=[])
+ file = fields.String(default="repomap.csv")
+ +
+
+class VendorRepositoriesMapCollection(Model):
+ topic = TransactionTopic
+
+ maps = fields.List(fields.Model(RepositoriesMapping))
diff --git a/repos/system_upgrade/common/models/vendorsignatures.py b/repos/system_upgrade/common/models/vendorsignatures.py diff --git a/repos/system_upgrade/common/models/vendorsignatures.py b/repos/system_upgrade/common/models/vendorsignatures.py
new file mode 100644 new file mode 100644
index 0000000..f456aec index 0000000..f456aec
@ -1168,6 +1440,24 @@ index 0000000..f456aec
+ topic = VendorTopic + topic = VendorTopic
+ vendor = fields.String() + vendor = fields.String()
+ sigs = fields.List(fields.String()) + sigs = fields.List(fields.String())
diff --git a/repos/system_upgrade/common/models/vendorsourcerepos.py b/repos/system_upgrade/common/models/vendorsourcerepos.py
new file mode 100644
index 0000000..b7a219b
--- /dev/null
+++ b/repos/system_upgrade/common/models/vendorsourcerepos.py
@@ -0,0 +1,12 @@
+from leapp.models import Model, fields
+from leapp.topics import VendorTopic
+
+
+class VendorSourceRepos(Model):
+ """
+ This model contains the data on all source repositories associated with a specific vendor.
+ Its data is used to determine whether the vendor should be included into the upgrade process.
+ """
+ topic = VendorTopic
+ vendor = fields.String()
+ source_repoids = fields.List(fields.String())
diff --git a/repos/system_upgrade/common/topics/vendortopic.py b/repos/system_upgrade/common/topics/vendortopic.py diff --git a/repos/system_upgrade/common/topics/vendortopic.py b/repos/system_upgrade/common/topics/vendortopic.py
new file mode 100644 new file mode 100644
index 0000000..014b7af index 0000000..014b7af