Update ELevate patch

This commit is contained in:
Andrew Lukoshko 2022-08-22 12:07:15 +00:00
parent 4bd6dbbadd
commit 93f6a68056

View File

@ -188,15 +188,15 @@ index bb89c9f..2b8e7c8 100644
'--args',
diff --git a/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py
new file mode 100644
index 0000000..51d0c25
index 0000000..5284aec
--- /dev/null
+++ b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py
@@ -0,0 +1,55 @@
@@ -0,0 +1,53 @@
+from leapp.actors import Actor
+from leapp.libraries.stdlib import api
+from leapp.models import (
+ RepositoriesFacts,
+ VendorRepositoriesMapCollection,
+ VendorSourceRepos,
+ ActiveVendorList,
+)
+from leapp.tags import FactsPhaseTag, IPUWorkflowTag
@ -210,20 +210,18 @@ index 0000000..51d0c25
+ """
+
+ name = "check_enabled_vendor_repos"
+ consumes = (RepositoriesFacts, VendorRepositoriesMapCollection)
+ consumes = (RepositoriesFacts, VendorSourceRepos)
+ produces = (ActiveVendorList)
+ tags = (IPUWorkflowTag, FactsPhaseTag.Before)
+
+ def process(self):
+ vendor_mapping_data = {}
+ active_vendors = []
+ active_vendors = set()
+
+ # Make a dict for easy lookup of repoid -> vendor name.
+ for map_coll in api.consume(VendorRepositoriesMapCollection):
+ for map in map_coll.maps:
+ for repo in map.repositories:
+ # Cut the .csv, keep only the vendor name.
+ vendor_mapping_data[repo.from_repoid] = map.file[:-4]
+ for vendor_src_repodata in api.consume(VendorSourceRepos):
+ for vendor_src_repo in vendor_src_repodata.source_repoids:
+ vendor_mapping_data[vendor_src_repo] = vendor_src_repodata.vendor
+
+ # Is the repo listed in the vendor map as from_repoid present on the system?
+ for repos in api.consume(RepositoriesFacts):
@ -240,11 +238,11 @@ index 0000000..51d0c25
+ repo.repoid, new_vendor
+ )
+ )
+ active_vendors.append(new_vendor)
+ active_vendors.add(new_vendor)
+
+ if active_vendors:
+ self.log.debug("Active vendor list: {}".format(active_vendors))
+ api.produce(ActiveVendorList(data=active_vendors))
+ api.produce(ActiveVendorList(data=list(active_vendors)))
+ else:
+ self.log.info("No active vendors found, vendor list not generated")
diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh
@ -300,7 +298,7 @@ index edf978f..7fea4ec 100644
variant_id=data.get('VARIANT_ID', '').strip('"') or None
)
diff --git a/repos/system_upgrade/common/actors/peseventsscanner/actor.py b/repos/system_upgrade/common/actors/peseventsscanner/actor.py
index fadf76b..b86d364 100644
index fadf76b..7ef2664 100644
--- a/repos/system_upgrade/common/actors/peseventsscanner/actor.py
+++ b/repos/system_upgrade/common/actors/peseventsscanner/actor.py
@@ -1,3 +1,6 @@
@ -327,7 +325,7 @@ index fadf76b..b86d364 100644
class PesEventsScanner(Actor):
"""
@@ -32,9 +39,21 @@ class PesEventsScanner(Actor):
@@ -32,9 +39,22 @@ class PesEventsScanner(Actor):
RepositoriesMapping,
RHUIInfo,
RpmTransactionTasks,
@ -344,17 +342,55 @@ index fadf76b..b86d364 100644
+ for vendor_list in self.consume(ActiveVendorList):
+ active_vendors.extend(vendor_list.data)
+
+ pes_json_suffix = "_pes.json"
+ if os.path.isdir(VENDORS_DIR):
+ vendor_pesfiles = list(filter(lambda vfile: ".json" in vfile, os.listdir(VENDORS_DIR)))
+ vendor_pesfiles = list(filter(lambda vfile: pes_json_suffix in vfile, os.listdir(VENDORS_DIR)))
+
+ for pesfile in vendor_pesfiles:
+ if pesfile[:-5] in active_vendors:
+ if pesfile[:-len(pes_json_suffix)] in active_vendors:
+ pes_events_scanner(VENDORS_DIR, pesfile)
diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py
index 1be2caa..8e5ca07 100644
index 1be2caa..072de17 100644
--- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py
+++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner.py
@@ -324,7 +324,7 @@ def parse_pes_events(json_data):
@@ -138,19 +138,26 @@ def _get_repositories_mapping(target_pesids):
:return: Dictionary with all repositories mapped.
"""
- repositories_map_msgs = api.consume(RepositoriesMapping)
- repositories_map_msg = next(repositories_map_msgs, None)
- if list(repositories_map_msgs):
- api.current_logger().warning('Unexpectedly received more than one RepositoriesMapping message.')
- if not repositories_map_msg:
- raise StopActorExecutionError(
- 'Cannot parse RepositoriesMapping data properly',
- details={'Problem': 'Did not receive a message with mapped repositories'}
- )
+ composite_mapping = []
+ composite_repos = []
+
+ for repomap_msg in api.consume(RepositoriesMapping):
+ if not repomap_msg:
+ raise StopActorExecutionError(
+ 'Cannot parse RepositoriesMapping data properly',
+ details={'Problem': 'Received a blank message with mapped repositories'}
+ )
+ composite_mapping.extend(repomap_msg.mapping)
+ composite_repos.extend(repomap_msg.repositories)
+
+ composite_map_msg = RepositoriesMapping(
+ mapping=composite_mapping,
+ repositories=composite_repos
+ )
rhui_info = next(api.consume(RHUIInfo), RHUIInfo(provider=''))
- repomap = peseventsscanner_repomap.RepoMapDataHandler(repositories_map_msg, cloud_provider=rhui_info.provider)
+ repomap = peseventsscanner_repomap.RepoMapDataHandler(composite_map_msg, cloud_provider=rhui_info.provider)
# NOTE: We have to calculate expected target repositories
# like in the setuptargetrepos actor. It's planned to handle this in different
# way in future...
@@ -324,7 +331,7 @@ def parse_pes_events(json_data):
:return: List of Event tuples, where each event contains event type and input/output pkgs
"""
data = json.loads(json_data)
@ -507,6 +543,186 @@ index 01f6df3..4ba05f0 100644
self.produce(signed_pkgs)
self.produce(unsigned_pkgs)
diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py
index b2d00f3..e9458c5 100644
--- a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py
+++ b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py
@@ -1,12 +1,9 @@
-from collections import defaultdict
-import json
import os
-from leapp.exceptions import StopActorExecutionError
from leapp.libraries.common.config.version import get_target_major_version, get_source_major_version
-from leapp.libraries.common.fetch import read_or_fetch
+from leapp.libraries.common.repomaputils import RepoMapData, read_repofile, inhibit_upgrade
from leapp.libraries.stdlib import api
-from leapp.models import RepositoriesMapping, PESIDRepositoryEntry, RepoMapEntry
+from leapp.models import RepositoriesMapping
from leapp.models.fields import ModelViolationError
OLD_REPOMAP_FILE = 'repomap.csv'
@@ -16,144 +13,9 @@ REPOMAP_FILE = 'repomap.json'
"""The name of the new repository mapping file."""
-class RepoMapData(object):
- VERSION_FORMAT = '1.0.0'
-
- def __init__(self):
- self.repositories = []
- self.mapping = {}
-
- def add_repository(self, data, pesid):
- """
- Add new PESIDRepositoryEntry with given pesid from the provided dictionary.
-
- :param data: A dict containing the data of the added repository. The dictionary structure corresponds
- to the repositories entries in the repository mapping JSON schema.
- :type data: Dict[str, str]
- :param pesid: PES id of the repository family that the newly added repository belongs to.
- :type pesid: str
- """
- self.repositories.append(PESIDRepositoryEntry(
- repoid=data['repoid'],
- channel=data['channel'],
- rhui=data.get('rhui', ''),
- repo_type=data['repo_type'],
- arch=data['arch'],
- major_version=data['major_version'],
- pesid=pesid
- ))
-
- def get_repositories(self, valid_major_versions):
- """
- Return the list of PESIDRepositoryEntry object matching the specified major versions.
- """
- return [repo for repo in self.repositories if repo.major_version in valid_major_versions]
-
- def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid):
- """
- Add a new mapping entry that is mapping the source pesid to the destination pesid(s),
- relevant in an IPU from the supplied source major version to the supplied target
- major version.
-
- :param str source_major_version: Specifies the major version of the source system
- for which the added mapping applies.
- :param str target_major_version: Specifies the major version of the target system
- for which the added mapping applies.
- :param str source_pesid: PESID of the source repository.
- :param Union[str|List[str]] target_pesid: A single target PESID or a list of target
- PESIDs of the added mapping.
- """
- # NOTE: it could be more simple, but I prefer to be sure the input data
- # contains just one map per source PESID.
- key = '{}:{}'.format(source_major_version, target_major_version)
- rmap = self.mapping.get(key, defaultdict(set))
- self.mapping[key] = rmap
- if isinstance(target_pesid, list):
- rmap[source_pesid].update(target_pesid)
- else:
- rmap[source_pesid].add(target_pesid)
-
- def get_mappings(self, src_major_version, dst_major_version):
- """
- Return the list of RepoMapEntry objects for the specified upgrade path.
-
- IOW, the whole mapping for specified IPU.
- """
- key = '{}:{}'.format(src_major_version, dst_major_version)
- rmap = self.mapping.get(key, None)
- if not rmap:
- return None
- map_list = []
- for src_pesid in sorted(rmap.keys()):
- map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid])))
- return map_list
-
- @staticmethod
- def load_from_dict(data):
- if data['version_format'] != RepoMapData.VERSION_FORMAT:
- raise ValueError(
- 'The obtained repomap data has unsupported version of format.'
- ' Get {} required {}'
- .format(data['version_format'], RepoMapData.VERSION_FORMAT)
- )
-
- repomap = RepoMapData()
-
- # Load reposiories
- existing_pesids = set()
- for repo_family in data['repositories']:
- existing_pesids.add(repo_family['pesid'])
- for repo in repo_family['entries']:
- repomap.add_repository(repo, repo_family['pesid'])
-
- # Load mappings
- for mapping in data['mapping']:
- for entry in mapping['entries']:
- if not isinstance(entry['target'], list):
- raise ValueError(
- 'The target field of a mapping entry is not a list: {}'
- .format(entry)
- )
-
- for pesid in [entry['source']] + entry['target']:
- if pesid not in existing_pesids:
- raise ValueError(
- 'The {} pesid is not related to any repository.'
- .format(pesid)
- )
- repomap.add_mapping(
- source_major_version=mapping['source_major_version'],
- target_major_version=mapping['target_major_version'],
- source_pesid=entry['source'],
- target_pesid=entry['target'],
- )
- return repomap
-
-
-def _inhibit_upgrade(msg):
- raise StopActorExecutionError(
- msg,
- details={'hint': ('Read documentation at the following link for more'
- ' information about how to retrieve the valid file:'
- ' https://access.redhat.com/articles/3664871')})
-
-
-def _read_repofile(repofile):
- # NOTE: what about catch StopActorExecution error when the file cannot be
- # obtained -> then check whether old_repomap file exists and in such a case
- # inform user they have to provde the new repomap.json file (we have the
- # warning now only which could be potentially overlooked)
- try:
- return json.loads(read_or_fetch(repofile))
- except ValueError:
- # The data does not contain a valid json
- _inhibit_upgrade('The repository mapping file is invalid: file does not contain a valid JSON object.')
- return None # Avoids inconsistent-return-statements warning
-
-
-def scan_repositories(read_repofile_func=_read_repofile):
+def scan_repositories(read_repofile_func=read_repofile):
"""
- Scan the repository mapping file and produce RepositoriesMap msg.
+ Scan the repository mapping file and produce RepositoriesMapping msg.
See the description of the actor for more details.
"""
@@ -185,10 +47,10 @@ def scan_repositories(read_repofile_func=_read_repofile):
'the JSON does not match required schema (wrong field type/value): {}'
.format(err)
)
- _inhibit_upgrade(err_message)
+ inhibit_upgrade(err_message)
except KeyError as err:
- _inhibit_upgrade(
+ inhibit_upgrade(
'The repository mapping file is invalid: the JSON is missing a required field: {}'.format(err))
except ValueError as err:
# The error should contain enough information, so we do not need to clarify it further
- _inhibit_upgrade('The repository mapping file is invalid: {}'.format(err))
+ inhibit_upgrade('The repository mapping file is invalid: {}'.format(err))
diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py
index 3c0b04b..3480432 100644
--- a/repos/system_upgrade/common/actors/repositoriesmapping/tests/unit_test_repositoriesmapping.py
@ -871,13 +1087,14 @@ index 0000000..f74de27
+ )
diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py
new file mode 100644
index 0000000..156d78c
index 0000000..1325647
--- /dev/null
+++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py
@@ -0,0 +1,22 @@
@@ -0,0 +1,19 @@
+from leapp.actors import Actor
+from leapp.libraries.common.repomaputils import scan_vendor_repomaps, VENDOR_REPOMAP_DIR
+from leapp.models import VendorRepositoriesMapCollection, RepositoriesMap
+# from leapp.libraries.common.repomaputils import scan_vendor_repomaps, VENDOR_REPOMAP_DIR
+from leapp.libraries.actor.vendorrepositoriesmapping import scan_vendor_repomaps
+from leapp.models import VendorSourceRepos, RepositoriesMapping
+from leapp.tags import FactsPhaseTag, IPUWorkflowTag
+
+
@ -888,15 +1105,83 @@ index 0000000..156d78c
+
+ name = "vendor_repositories_mapping"
+ consumes = ()
+ produces = (RepositoriesMap, VendorRepositoriesMapCollection,)
+ produces = (RepositoriesMapping, VendorSourceRepos,)
+ tags = (IPUWorkflowTag, FactsPhaseTag.Before)
+
+ def process(self):
+ vendor_repomap_collection = scan_vendor_repomaps(VENDOR_REPOMAP_DIR)
+ if vendor_repomap_collection:
+ self.produce(vendor_repomap_collection)
+ for repomap in vendor_repomap_collection.maps:
+ self.produce(repomap)
+ scan_vendor_repomaps()
diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py
new file mode 100644
index 0000000..204d0dc
--- /dev/null
+++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py
@@ -0,0 +1,66 @@
+import os
+
+from leapp.libraries.common.config.version import get_target_major_version, get_source_major_version
+from leapp.libraries.common.repomaputils import RepoMapData, read_repofile, inhibit_upgrade
+from leapp.libraries.stdlib import api
+from leapp.models import VendorSourceRepos, RepositoriesMapping
+from leapp.models.fields import ModelViolationError
+
+
+VENDORS_DIR = "/etc/leapp/files/vendors.d"
+"""The folder containing the vendor repository mapping files."""
+
+
+def read_repomap_file(repomap_file, read_repofile_func, vendor_name):
+ json_data = read_repofile_func(repomap_file, VENDORS_DIR)
+ try:
+ repomap_data = RepoMapData.load_from_dict(json_data)
+
+ api.produce(VendorSourceRepos(
+ vendor=vendor_name,
+ source_repoids=repomap_data.get_version_repoids(get_source_major_version())
+ ))
+
+ mapping = repomap_data.get_mappings(get_source_major_version(), get_target_major_version())
+ valid_major_versions = [get_source_major_version(), get_target_major_version()]
+ api.produce(RepositoriesMapping(
+ mapping=mapping,
+ repositories=repomap_data.get_repositories(valid_major_versions)
+ ))
+ except ModelViolationError as err:
+ err_message = (
+ 'The repository mapping file is invalid: '
+ 'the JSON does not match required schema (wrong field type/value): {}'
+ .format(err)
+ )
+ inhibit_upgrade(err_message)
+ except KeyError as err:
+ inhibit_upgrade(
+ 'The repository mapping file is invalid: the JSON is missing a required field: {}'.format(err))
+ except ValueError as err:
+ # The error should contain enough information, so we do not need to clarify it further
+ inhibit_upgrade('The repository mapping file is invalid: {}'.format(err))
+
+
+def scan_vendor_repomaps(read_repofile_func=read_repofile):
+ """
+ Scan the repository mapping file and produce RepositoriesMapping msg.
+
+ See the description of the actor for more details.
+ """
+
+ map_json_suffix = "_map.json"
+ if os.path.isdir(VENDORS_DIR):
+ vendor_mapfiles = list(filter(lambda vfile: map_json_suffix in vfile, os.listdir(VENDORS_DIR)))
+
+ for mapfile in vendor_mapfiles:
+ read_repomap_file(mapfile, read_repofile_func, mapfile[:-len(map_json_suffix)])
+ else:
+ api.current_logger().debug(
+ "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR)
+ )
+ # vendor_repomap_collection = scan_vendor_repomaps(VENDOR_REPOMAP_DIR)
+ # if vendor_repomap_collection:
+ # self.produce(vendor_repomap_collection)
+ # for repomap in vendor_repomap_collection.maps:
+ # self.produce(repomap)
diff --git a/repos/system_upgrade/common/libraries/config/version.py b/repos/system_upgrade/common/libraries/config/version.py
index 03f3cd4..783075d 100644
--- a/repos/system_upgrade/common/libraries/config/version.py
@ -955,164 +1240,157 @@ index 1c58148..37313b6 100644
_raise_error(local_path, "File {lp} exists but couldn't be read".format(lp=local_path))
diff --git a/repos/system_upgrade/common/libraries/repomaputils.py b/repos/system_upgrade/common/libraries/repomaputils.py
new file mode 100644
index 0000000..7ca63d2
index 0000000..5c41620
--- /dev/null
+++ b/repos/system_upgrade/common/libraries/repomaputils.py
@@ -0,0 +1,154 @@
+import os
+import io # Python2/Python3 compatible IO (open etc.)
@@ -0,0 +1,147 @@
+import json
+from collections import defaultdict
+
+from leapp.exceptions import StopActorExecutionError
+from leapp.libraries.common import config
+from leapp.libraries.common.fetch import read_or_fetch
+from leapp.libraries.stdlib import api
+from leapp.models import RepositoriesMap, RepositoryMap, VendorRepositoriesMapCollection
+from leapp.models.fields import ModelViolationError
+
+REPOMAP_FILE = "repomap.csv"
+"""Path to the repository mapping file."""
+BASE_REPOMAP_DIR = "/etc/leapp/files"
+VENDOR_REPOMAP_DIR = "/etc/leapp/files/vendors.d"
+from leapp.models import PESIDRepositoryEntry, RepoMapEntry
+
+
+def _raise_error(msg, details):
+def inhibit_upgrade(msg):
+ raise StopActorExecutionError(
+ msg,
+ details={
+ "details": details,
+ "hint": (
+ "Read documentation at the following link for more"
+ " information about how to retrieve the valid file:"
+ " https://access.redhat.com/articles/3664871"
+ ),
+ },
+ )
+ details={'hint': ('Read documentation at the following link for more'
+ ' information about how to retrieve the valid file:'
+ ' https://access.redhat.com/articles/3664871')})
+
+
+def read_local(
+ filename,
+ directory=BASE_REPOMAP_DIR,
+ allow_empty=False,
+ encoding="utf-8",
+):
+ logger = api.current_logger()
+ local_path = os.path.join(directory, filename)
+def read_repofile(repofile, directory="/etc/leapp/files"):
+ # NOTE: what about catch StopActorExecution error when the file cannot be
+ # obtained -> then check whether old_repomap file exists and in such a case
+ # inform user they have to provde the new repomap.json file (we have the
+ # warning now only which could be potentially overlooked)
+ try:
+ with io.open(local_path, encoding=encoding) as f:
+ data = f.read()
+ if not allow_empty and not data:
+ _raise_error(
+ local_path, "File {} exists but is empty".format(local_path)
+ )
+ logger.warning(
+ "File {lp} successfully read ({l} bytes)".format(
+ lp=local_path, l=len(data)
+ )
+ )
+ return [line.strip() for line in data.splitlines()]
+ except EnvironmentError:
+ _raise_error(
+ local_path, "File {} exists but couldn't be read".format(local_path)
+ )
+ except Exception as e:
+ raise e
+ return json.loads(read_or_fetch(repofile, directory))
+ except ValueError:
+ # The data does not contain a valid json
+ inhibit_upgrade('The repository mapping file is invalid: file does not contain a valid JSON object.')
+ return None # Avoids inconsistent-return-statements warning
+
+
+def read_or_fetch_repofile(repofile, directory):
+ contents = read_or_fetch(repofile, directory)
+ return [line.strip() for line in contents.splitlines()]
+class RepoMapData(object):
+ VERSION_FORMAT = '1.0.0'
+
+ def __init__(self):
+ self.repositories = []
+ self.mapping = {}
+
+def scan_repomaps(repomap_file, repomap_dir, read_repofile_func=read_or_fetch_repofile):
+ """
+ Scan the repository mapping file and produce RepositoriesMap msg.
+ def add_repository(self, data, pesid):
+ """
+ Add new PESIDRepositoryEntry with given pesid from the provided dictionary.
+
+ See the description of the actor for more details.
+ """
+ _exp_src_prod_type = config.get_product_type("source")
+ _exp_dst_prod_type = config.get_product_type("target")
+ :param data: A dict containing the data of the added repository. The dictionary structure corresponds
+ to the repositories entries in the repository mapping JSON schema.
+ :type data: Dict[str, str]
+ :param pesid: PES id of the repository family that the newly added repository belongs to.
+ :type pesid: str
+ """
+ self.repositories.append(PESIDRepositoryEntry(
+ repoid=data['repoid'],
+ channel=data['channel'],
+ rhui=data.get('rhui', ''),
+ repo_type=data['repo_type'],
+ arch=data['arch'],
+ major_version=data['major_version'],
+ pesid=pesid
+ ))
+
+ repositories = []
+ line_num = 0
+ for line in read_repofile_func(repomap_file, repomap_dir)[1:]:
+ line_num += 1
+ def get_repositories(self, valid_major_versions):
+ """
+ Return the list of PESIDRepositoryEntry object matching the specified major versions.
+ """
+ return [repo for repo in self.repositories if repo.major_version in valid_major_versions]
+
+ api.current_logger().debug("Grabbing line {} of file {}: \"{}\"".format(line_num, repomap_file, line))
+ def get_version_repoids(self, major_version):
+ """
+ Return the list of repository ID strings for repositories matching the specified major version.
+ """
+ return [repo.repoid for repo in self.repositories if repo.major_version == major_version]
+
+ # skip empty lines and comments
+ if not line or line.startswith("#"):
+ api.current_logger().debug("Line skipped")
+ continue
+ def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid):
+ """
+ Add a new mapping entry that is mapping the source pesid to the destination pesid(s),
+ relevant in an IPU from the supplied source major version to the supplied target
+ major version.
+
+ try:
+ (
+ from_repoid,
+ to_repoid,
+ to_pes_repo,
+ from_minor_version,
+ to_minor_version,
+ arch,
+ repo_type,
+ src_prod_type,
+ dst_prod_type,
+ ) = line.split(",")
+ :param str source_major_version: Specifies the major version of the source system
+ for which the added mapping applies.
+ :param str target_major_version: Specifies the major version of the target system
+ for which the added mapping applies.
+ :param str source_pesid: PESID of the source repository.
+ :param Union[str|List[str]] target_pesid: A single target PESID or a list of target
+ PESIDs of the added mapping.
+ """
+ # NOTE: it could be more simple, but I prefer to be sure the input data
+ # contains just one map per source PESID.
+ key = '{}:{}'.format(source_major_version, target_major_version)
+ rmap = self.mapping.get(key, defaultdict(set))
+ self.mapping[key] = rmap
+ if isinstance(target_pesid, list):
+ rmap[source_pesid].update(target_pesid)
+ else:
+ rmap[source_pesid].add(target_pesid)
+
+ # filter out records irrelevant for this run
+ if (
+ arch != api.current_actor().configuration.architecture
+ or _exp_src_prod_type != src_prod_type
+ or _exp_dst_prod_type != dst_prod_type
+ ):
+ api.current_logger().debug("Line filtered out")
+ continue
+ def get_mappings(self, src_major_version, dst_major_version):
+ """
+ Return the list of RepoMapEntry objects for the specified upgrade path.
+
+ new_repo_map = RepositoryMap(
+ from_repoid=from_repoid,
+ to_repoid=to_repoid,
+ to_pes_repo=to_pes_repo,
+ from_minor_version=from_minor_version,
+ to_minor_version=to_minor_version,
+ arch=arch,
+ repo_type=repo_type,
+ )
+ IOW, the whole mapping for specified IPU.
+ """
+ key = '{}:{}'.format(src_major_version, dst_major_version)
+ rmap = self.mapping.get(key, None)
+ if not rmap:
+ return None
+ map_list = []
+ for src_pesid in sorted(rmap.keys()):
+ map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid])))
+ return map_list
+
+ api.current_logger().debug("Map added: {}".format(new_repo_map.dump()))
+ repositories.append(new_repo_map)
+
+ except (ModelViolationError, ValueError) as err:
+ _raise_error(
+ "The repository mapping file is invalid. It is possible the file is out of date.",
+ "Offending line number: {} ({}).".format(line_num, err),
+ @staticmethod
+ def load_from_dict(data):
+ if data['version_format'] != RepoMapData.VERSION_FORMAT:
+ raise ValueError(
+ 'The obtained repomap data has unsupported version of format.'
+ ' Get {} required {}'
+ .format(data['version_format'], RepoMapData.VERSION_FORMAT)
+ )
+
+ if not repositories:
+ _raise_error(
+ "The repository mapping file is invalid. Could not find any repository mapping record.",
+ "",
+ )
+ repomap = RepoMapData()
+
+ return RepositoriesMap(file=repomap_file, repositories=repositories)
+ # Load reposiories
+ existing_pesids = set()
+ for repo_family in data['repositories']:
+ existing_pesids.add(repo_family['pesid'])
+ for repo in repo_family['entries']:
+ repomap.add_repository(repo, repo_family['pesid'])
+
+ # Load mappings
+ for mapping in data['mapping']:
+ for entry in mapping['entries']:
+ if not isinstance(entry['target'], list):
+ raise ValueError(
+ 'The target field of a mapping entry is not a list: {}'
+ .format(entry)
+ )
+
+def scan_vendor_repomaps(repomap_dir):
+ if not os.path.isdir(repomap_dir):
+ api.current_logger().debug(
+ "The {} directory doesn't exist. Nothing to do.".format(repomap_dir)
+ )
+ return None
+
+ vendor_maps = []
+
+ for repomap_name in os.listdir(repomap_dir):
+ # Only scan the .csv files, those are the maps.
+ if not repomap_name.endswith(".csv"):
+ continue
+ scanned_map = scan_repomaps(
+ repomap_name, repomap_dir, read_repofile_func=read_local
+ )
+ vendor_maps.append(scanned_map)
+
+ return VendorRepositoriesMapCollection(maps=vendor_maps)
+ for pesid in [entry['source']] + entry['target']:
+ if pesid not in existing_pesids:
+ raise ValueError(
+ 'The {} pesid is not related to any repository.'
+ .format(pesid)
+ )
+ repomap.add_mapping(
+ source_major_version=mapping['source_major_version'],
+ target_major_version=mapping['target_major_version'],
+ source_pesid=entry['source'],
+ target_pesid=entry['target'],
+ )
+ return repomap
diff --git a/repos/system_upgrade/common/libraries/rhsm.py b/repos/system_upgrade/common/libraries/rhsm.py
index b7e4b21..dc038bf 100644
--- a/repos/system_upgrade/common/libraries/rhsm.py
@ -1140,20 +1418,14 @@ index 0000000..de4056f
+ topic = VendorTopic
+ data = fields.List(fields.String())
diff --git a/repos/system_upgrade/common/models/repositoriesmap.py b/repos/system_upgrade/common/models/repositoriesmap.py
index c187333..f5f23f4 100644
index c187333..a068a70 100644
--- a/repos/system_upgrade/common/models/repositoriesmap.py
+++ b/repos/system_upgrade/common/models/repositoriesmap.py
@@ -92,3 +92,10 @@ class RepositoriesMapping(Model):
@@ -92,3 +92,4 @@ class RepositoriesMapping(Model):
mapping = fields.List(fields.Model(RepoMapEntry), default=[])
repositories = fields.List(fields.Model(PESIDRepositoryEntry), default=[])
+ file = fields.String(default="repomap.csv")
+
+
+class VendorRepositoriesMapCollection(Model):
+ topic = TransactionTopic
+
+ maps = fields.List(fields.Model(RepositoriesMapping))
diff --git a/repos/system_upgrade/common/models/vendorsignatures.py b/repos/system_upgrade/common/models/vendorsignatures.py
new file mode 100644
index 0000000..f456aec
@ -1168,6 +1440,24 @@ index 0000000..f456aec
+ topic = VendorTopic
+ vendor = fields.String()
+ sigs = fields.List(fields.String())
diff --git a/repos/system_upgrade/common/models/vendorsourcerepos.py b/repos/system_upgrade/common/models/vendorsourcerepos.py
new file mode 100644
index 0000000..b7a219b
--- /dev/null
+++ b/repos/system_upgrade/common/models/vendorsourcerepos.py
@@ -0,0 +1,12 @@
+from leapp.models import Model, fields
+from leapp.topics import VendorTopic
+
+
+class VendorSourceRepos(Model):
+ """
+ This model contains the data on all source repositories associated with a specific vendor.
+ Its data is used to determine whether the vendor should be included into the upgrade process.
+ """
+ topic = VendorTopic
+ vendor = fields.String()
+ source_repoids = fields.List(fields.String())
diff --git a/repos/system_upgrade/common/topics/vendortopic.py b/repos/system_upgrade/common/topics/vendortopic.py
new file mode 100644
index 0000000..014b7af