import UBI sos-4.6.1-1.el8
This commit is contained in:
parent
157c2d14d2
commit
cce1af82c4
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,2 +1,2 @@
|
|||||||
SOURCES/sos-4.6.0.tar.gz
|
SOURCES/sos-4.6.1.tar.gz
|
||||||
SOURCES/sos-audit-0.3.tgz
|
SOURCES/sos-audit-0.3.tgz
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
90d8b664a4e0593d60357342bb5f73af9908e29d SOURCES/sos-4.6.0.tar.gz
|
b6999d34ade3b3d0b88390ab525d31c6a8dc2950 SOURCES/sos-4.6.1.tar.gz
|
||||||
9d478b9f0085da9178af103078bbf2fd77b0175a SOURCES/sos-audit-0.3.tgz
|
9d478b9f0085da9178af103078bbf2fd77b0175a SOURCES/sos-audit-0.3.tgz
|
||||||
|
@ -1,98 +0,0 @@
|
|||||||
From c6ab24eb8e2bf02c75d0ffa8447032543eb4ea43 Mon Sep 17 00:00:00 2001
|
|
||||||
From: "Dr. Jason Breitweg" <jason@breitweg.com>
|
|
||||||
Date: Tue, 10 Oct 2023 09:50:29 +0200
|
|
||||||
Subject: [PATCH] Fix dynaconf obfuscation and add AUTH_LDAP_BIND_PASSWORD
|
|
||||||
|
|
||||||
Signed-off-by: Dr. Jason Breitweg <jason@breitweg.com>
|
|
||||||
|
|
||||||
Fixed style issues
|
|
||||||
Signed-off-by: Jason Breitweg jbreitwe@redhat.com
|
|
||||||
|
|
||||||
Signed-off-by: Dr. Jason Breitweg <jason@breitweg.com>
|
|
||||||
|
|
||||||
Fixed yet more linting errors
|
|
||||||
Signed-off-by: Jason Breitweg jbreitwe@redhat.com
|
|
||||||
|
|
||||||
Signed-off-by: Dr. Jason Breitweg <jason@breitweg.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/pulp.py | 9 ++++++---
|
|
||||||
1 file changed, 6 insertions(+), 3 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/pulp.py b/sos/report/plugins/pulp.py
|
|
||||||
index df007168a..f5c762f48 100644
|
|
||||||
--- a/sos/report/plugins/pulp.py
|
|
||||||
+++ b/sos/report/plugins/pulp.py
|
|
||||||
@@ -170,10 +170,13 @@ def postproc(self):
|
|
||||||
repl = r"\1********"
|
|
||||||
self.do_path_regex_sub("/etc/pulp(.*)(.json$)", jreg, repl)
|
|
||||||
|
|
||||||
- # obfuscate SECRET_KEY = .. and 'PASSWORD': .. in dynaconf list output
|
|
||||||
- # and also in settings.py
|
|
||||||
+ # obfuscate SECRET_KEY = .., 'PASSWORD': ..,
|
|
||||||
+ # and AUTH_LDAP_BIND_PASSWORD = ..
|
|
||||||
+ # in dynaconf list output and also in settings.py
|
|
||||||
# count with option that PASSWORD is with(out) quotes or in capitals
|
|
||||||
- key_pass_re = r"(SECRET_KEY\s*=|(password|PASSWORD)(\"|'|:)+)\s*(\S*)"
|
|
||||||
+ key_pass_re = r"((?:SECRET_KEY|AUTH_LDAP_BIND_PASSWORD)" \
|
|
||||||
+ r"(?:\<.+\>)?(\s*=)?|(password|PASSWORD)" \
|
|
||||||
+ r"(\"|'|:)+)\s*(\S*)"
|
|
||||||
repl = r"\1 ********"
|
|
||||||
self.do_path_regex_sub("/etc/pulp/settings.py", key_pass_re, repl)
|
|
||||||
self.do_cmd_output_sub("dynaconf list", key_pass_re, repl)
|
|
||||||
From 866abe6119e846e243d586b1e353a6585ed83899 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Pavel Moravec <pmoravec@redhat.com>
|
|
||||||
Date: Wed, 18 Oct 2023 13:38:29 +0200
|
|
||||||
Subject: [PATCH] [pulpcore] Scrub AUTH_LDAP_BIND_PASSWORD value
|
|
||||||
|
|
||||||
Likewise in #3379, scrub the password also in pulpcore plugin.
|
|
||||||
|
|
||||||
Resolves: #3389
|
|
||||||
|
|
||||||
Signed-off-by: Pavel Moravec <pmoravec@redhat.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/pulpcore.py | 27 ++++++++-------------------
|
|
||||||
1 file changed, 8 insertions(+), 19 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/pulpcore.py b/sos/report/plugins/pulpcore.py
|
|
||||||
index 04efae9f8..649626ada 100644
|
|
||||||
--- a/sos/report/plugins/pulpcore.py
|
|
||||||
+++ b/sos/report/plugins/pulpcore.py
|
|
||||||
@@ -144,29 +144,18 @@ def build_query_cmd(self, query, csv=False):
|
|
||||||
return _dbcmd % (self.dbhost, self.dbport, self.dbname, quote(query))
|
|
||||||
|
|
||||||
def postproc(self):
|
|
||||||
- # TODO obfuscate from /etc/pulp/settings.py :
|
|
||||||
+ # obfuscate from /etc/pulp/settings.py and "dynaconf list":
|
|
||||||
# SECRET_KEY = "eKfeDkTnvss7p5WFqYdGPWxXfHnsbDBx"
|
|
||||||
# 'PASSWORD': 'tGrag2DmtLqKLTWTQ6U68f6MAhbqZVQj',
|
|
||||||
+ # AUTH_LDAP_BIND_PASSWORD = 'ouch-a-secret'
|
|
||||||
# the PASSWORD can be also in an one-liner list, so detect its value
|
|
||||||
# in non-greedy manner till first ',' or '}'
|
|
||||||
- self.do_path_regex_sub(
|
|
||||||
- "/etc/pulp/settings.py",
|
|
||||||
- r"(SECRET_KEY\s*=\s*)(.*)",
|
|
||||||
- r"\1********")
|
|
||||||
- self.do_path_regex_sub(
|
|
||||||
- "/etc/pulp/settings.py",
|
|
||||||
- r"(PASSWORD\S*\s*:\s*)(.*?)(,|\})",
|
|
||||||
- r"\1********\3")
|
|
||||||
- # apply the same for "dynaconf list" output that prints settings.py
|
|
||||||
- # in a pythonic format
|
|
||||||
- self.do_cmd_output_sub(
|
|
||||||
- "dynaconf list",
|
|
||||||
- r"(SECRET_KEY<str>\s*)'(.*)'",
|
|
||||||
- r"\1********")
|
|
||||||
- self.do_cmd_output_sub(
|
|
||||||
- "dynaconf list",
|
|
||||||
- r"(PASSWORD\S*\s*:\s*)(.*)",
|
|
||||||
- r"\1********")
|
|
||||||
+ key_pass_re = r"((?:SECRET_KEY|AUTH_LDAP_BIND_PASSWORD)" \
|
|
||||||
+ r"(?:\<.+\>)?(\s*=)?|(password|PASSWORD)" \
|
|
||||||
+ r"(\"|'|:)+)\s*(\S*)"
|
|
||||||
+ repl = r"\1 ********"
|
|
||||||
+ self.do_path_regex_sub("/etc/pulp/settings.py", key_pass_re, repl)
|
|
||||||
+ self.do_cmd_output_sub("dynaconf list", key_pass_re, repl)
|
|
||||||
|
|
||||||
|
|
||||||
# vim: set et ts=4 sw=4 :
|
|
||||||
|
|
502
SOURCES/sos-RHEL-21177-device-auth.patch
Normal file
502
SOURCES/sos-RHEL-21177-device-auth.patch
Normal file
@ -0,0 +1,502 @@
|
|||||||
|
From c1a08482f9f724395102be22d94382cbda14dbce Mon Sep 17 00:00:00 2001
|
||||||
|
From: Jose Castillo <jcastillo@redhat.com>
|
||||||
|
Date: Mon, 9 Oct 2023 16:28:15 +0100
|
||||||
|
Subject: [PATCH] [redhat] Change authentication method for RHEL
|
||||||
|
|
||||||
|
The authentication method for RHEL uploads to the
|
||||||
|
customer portal is changing in 2024 to Device Auth
|
||||||
|
tokens, from user/password basic authorization.
|
||||||
|
To accomplish this, one new class is created:
|
||||||
|
DeviceAuth (deviceauth.py), that takes care of
|
||||||
|
managing OID token authentication.
|
||||||
|
|
||||||
|
Closes: RH: SUPDEV-63
|
||||||
|
|
||||||
|
Signed-off-by: Jose Castillo <jcastillo@redhat.com>
|
||||||
|
---
|
||||||
|
sos/policies/auth/__init__.py | 210 +++++++++++++++++++++++++++++++++
|
||||||
|
sos/policies/distros/redhat.py | 121 ++++++++++++++-----
|
||||||
|
2 files changed, 300 insertions(+), 31 deletions(-)
|
||||||
|
create mode 100644 sos/policies/auth/__init__.py
|
||||||
|
|
||||||
|
diff --git a/sos/policies/auth/__init__.py b/sos/policies/auth/__init__.py
|
||||||
|
new file mode 100644
|
||||||
|
index 000000000..5b62a4953
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/sos/policies/auth/__init__.py
|
||||||
|
@@ -0,0 +1,210 @@
|
||||||
|
+# Copyright (C) 2023 Red Hat, Inc., Jose Castillo <jcastillo@redhat.com>
|
||||||
|
+
|
||||||
|
+# This file is part of the sos project: https://github.com/sosreport/sos
|
||||||
|
+#
|
||||||
|
+# This copyrighted material is made available to anyone wishing to use,
|
||||||
|
+# modify, copy, or redistribute it subject to the terms and conditions of
|
||||||
|
+# version 2 of the GNU General Public License.
|
||||||
|
+#
|
||||||
|
+# See the LICENSE file in the source distribution for further information.
|
||||||
|
+
|
||||||
|
+import logging
|
||||||
|
+try:
|
||||||
|
+ import requests
|
||||||
|
+ REQUESTS_LOADED = True
|
||||||
|
+except ImportError:
|
||||||
|
+ REQUESTS_LOADED = False
|
||||||
|
+import time
|
||||||
|
+from datetime import datetime, timedelta
|
||||||
|
+
|
||||||
|
+DEVICE_AUTH_CLIENT_ID = "sos-tools"
|
||||||
|
+GRANT_TYPE_DEVICE_CODE = "urn:ietf:params:oauth:grant-type:device_code"
|
||||||
|
+
|
||||||
|
+logger = logging.getLogger("sos")
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+class DeviceAuthorizationClass:
|
||||||
|
+ """
|
||||||
|
+ Device Authorization Class
|
||||||
|
+ """
|
||||||
|
+
|
||||||
|
+ def __init__(self, client_identifier_url, token_endpoint):
|
||||||
|
+
|
||||||
|
+ self._access_token = None
|
||||||
|
+ self._access_expires_at = None
|
||||||
|
+ self.__device_code = None
|
||||||
|
+
|
||||||
|
+ self.client_identifier_url = client_identifier_url
|
||||||
|
+ self.token_endpoint = token_endpoint
|
||||||
|
+ self._use_device_code_grant()
|
||||||
|
+
|
||||||
|
+ def _use_device_code_grant(self):
|
||||||
|
+ """
|
||||||
|
+ Start the device auth flow. In the future we will
|
||||||
|
+ store the tokens in an in-memory keyring.
|
||||||
|
+
|
||||||
|
+ """
|
||||||
|
+
|
||||||
|
+ self._request_device_code()
|
||||||
|
+ print(
|
||||||
|
+ "Please visit the following URL to authenticate this"
|
||||||
|
+ f" device: {self._verification_uri_complete}"
|
||||||
|
+ )
|
||||||
|
+ self.poll_for_auth_completion()
|
||||||
|
+
|
||||||
|
+ def _request_device_code(self):
|
||||||
|
+ """
|
||||||
|
+ Initialize new Device Authorization Grant attempt by
|
||||||
|
+ requesting a new device code.
|
||||||
|
+
|
||||||
|
+ """
|
||||||
|
+ data = "client_id={}".format(DEVICE_AUTH_CLIENT_ID)
|
||||||
|
+ headers = {'content-type': 'application/x-www-form-urlencoded'}
|
||||||
|
+ if not REQUESTS_LOADED:
|
||||||
|
+ raise Exception("python3-requests is not installed and is required"
|
||||||
|
+ " for obtaining device auth token.")
|
||||||
|
+ try:
|
||||||
|
+ res = requests.post(
|
||||||
|
+ self.client_identifier_url,
|
||||||
|
+ data=data,
|
||||||
|
+ headers=headers)
|
||||||
|
+ res.raise_for_status()
|
||||||
|
+ response = res.json()
|
||||||
|
+ self._user_code = response.get("user_code")
|
||||||
|
+ self._verification_uri = response.get("verification_uri")
|
||||||
|
+ self._interval = response.get("interval")
|
||||||
|
+ self.__device_code = response.get("device_code")
|
||||||
|
+ self._verification_uri_complete = response.get(
|
||||||
|
+ "verification_uri_complete")
|
||||||
|
+ except requests.HTTPError as e:
|
||||||
|
+ raise requests.HTTPError("HTTP request failed "
|
||||||
|
+ "while attempting to acquire the tokens."
|
||||||
|
+ f"Error returned was {res.status_code} "
|
||||||
|
+ f"{e}")
|
||||||
|
+
|
||||||
|
+ def poll_for_auth_completion(self):
|
||||||
|
+ """
|
||||||
|
+ Continuously poll OIDC token endpoint until the user is successfully
|
||||||
|
+ authenticated or an error occurs.
|
||||||
|
+
|
||||||
|
+ """
|
||||||
|
+ token_data = {'grant_type': GRANT_TYPE_DEVICE_CODE,
|
||||||
|
+ 'client_id': DEVICE_AUTH_CLIENT_ID,
|
||||||
|
+ 'device_code': self.__device_code}
|
||||||
|
+
|
||||||
|
+ if not REQUESTS_LOADED:
|
||||||
|
+ raise Exception("python3-requests is not installed and is required"
|
||||||
|
+ " for obtaining device auth token.")
|
||||||
|
+ while self._access_token is None:
|
||||||
|
+ time.sleep(self._interval)
|
||||||
|
+ try:
|
||||||
|
+ check_auth_completion = requests.post(self.token_endpoint,
|
||||||
|
+ data=token_data)
|
||||||
|
+
|
||||||
|
+ status_code = check_auth_completion.status_code
|
||||||
|
+
|
||||||
|
+ if status_code == 200:
|
||||||
|
+ logger.info("The SSO authentication is successful")
|
||||||
|
+ self._set_token_data(check_auth_completion.json())
|
||||||
|
+ if status_code not in [200, 400]:
|
||||||
|
+ raise Exception(status_code, check_auth_completion.text)
|
||||||
|
+ if status_code == 400 and \
|
||||||
|
+ check_auth_completion.json()['error'] not in \
|
||||||
|
+ ("authorization_pending", "slow_down"):
|
||||||
|
+ raise Exception(status_code, check_auth_completion.text)
|
||||||
|
+ except requests.exceptions.RequestException as e:
|
||||||
|
+ logger.error(f"Error was found while posting a request: {e}")
|
||||||
|
+
|
||||||
|
+ def _set_token_data(self, token_data):
|
||||||
|
+ """
|
||||||
|
+ Set the class attributes as per the input token_data received.
|
||||||
|
+ In the future we will persist the token data in a local,
|
||||||
|
+ in-memory keyring, to avoid visting the browser frequently.
|
||||||
|
+ :param token_data: Token data containing access_token, refresh_token
|
||||||
|
+ and their expiry etc.
|
||||||
|
+ """
|
||||||
|
+ self._access_token = token_data.get("access_token")
|
||||||
|
+ self._access_expires_at = datetime.utcnow() + \
|
||||||
|
+ timedelta(seconds=token_data.get("expires_in"))
|
||||||
|
+ self._refresh_token = token_data.get("refresh_token")
|
||||||
|
+ self._refresh_expires_in = token_data.get("refresh_expires_in")
|
||||||
|
+ if self._refresh_expires_in == 0:
|
||||||
|
+ self._refresh_expires_at = datetime.max
|
||||||
|
+ else:
|
||||||
|
+ self._refresh_expires_at = datetime.utcnow() + \
|
||||||
|
+ timedelta(seconds=self._refresh_expires_in)
|
||||||
|
+
|
||||||
|
+ def get_access_token(self):
|
||||||
|
+ """
|
||||||
|
+ Get the valid access_token at any given time.
|
||||||
|
+ :return: Access_token
|
||||||
|
+ :rtype: string
|
||||||
|
+ """
|
||||||
|
+ if self.is_access_token_valid():
|
||||||
|
+ return self._access_token
|
||||||
|
+ else:
|
||||||
|
+ if self.is_refresh_token_valid():
|
||||||
|
+ self._use_refresh_token_grant()
|
||||||
|
+ return self._access_token
|
||||||
|
+ else:
|
||||||
|
+ self._use_device_code_grant()
|
||||||
|
+ return self._access_token
|
||||||
|
+
|
||||||
|
+ def is_access_token_valid(self):
|
||||||
|
+ """
|
||||||
|
+ Check the validity of access_token. We are considering it invalid 180
|
||||||
|
+ sec. prior to it's exact expiry time.
|
||||||
|
+ :return: True/False
|
||||||
|
+
|
||||||
|
+ """
|
||||||
|
+ return self._access_token and self._access_expires_at and \
|
||||||
|
+ self._access_expires_at - timedelta(seconds=180) > \
|
||||||
|
+ datetime.utcnow()
|
||||||
|
+
|
||||||
|
+ def is_refresh_token_valid(self):
|
||||||
|
+ """
|
||||||
|
+ Check the validity of refresh_token. We are considering it invalid
|
||||||
|
+ 180 sec. prior to it's exact expiry time.
|
||||||
|
+
|
||||||
|
+ :return: True/False
|
||||||
|
+
|
||||||
|
+ """
|
||||||
|
+ return self._refresh_token and self._refresh_expires_at and \
|
||||||
|
+ self._refresh_expires_at - timedelta(seconds=180) > \
|
||||||
|
+ datetime.utcnow()
|
||||||
|
+
|
||||||
|
+ def _use_refresh_token_grant(self, refresh_token=None):
|
||||||
|
+ """
|
||||||
|
+ Fetch the new access_token and refresh_token using the existing
|
||||||
|
+ refresh_token and persist it.
|
||||||
|
+ :param refresh_token: optional param for refresh_token
|
||||||
|
+
|
||||||
|
+ """
|
||||||
|
+ if not REQUESTS_LOADED:
|
||||||
|
+ raise Exception("python3-requests is not installed and is required"
|
||||||
|
+ " for obtaining device auth token.")
|
||||||
|
+ refresh_token_data = {'client_id': DEVICE_AUTH_CLIENT_ID,
|
||||||
|
+ 'grant_type': 'refresh_token',
|
||||||
|
+ 'refresh_token': self._refresh_token if not
|
||||||
|
+ refresh_token else refresh_token}
|
||||||
|
+
|
||||||
|
+ refresh_token_res = requests.post(self.token_endpoint,
|
||||||
|
+ data=refresh_token_data)
|
||||||
|
+
|
||||||
|
+ if refresh_token_res.status_code == 200:
|
||||||
|
+ self._set_token_data(refresh_token_res.json())
|
||||||
|
+
|
||||||
|
+ elif refresh_token_res.status_code == 400 and 'invalid' in\
|
||||||
|
+ refresh_token_res.json()['error']:
|
||||||
|
+ logger.warning("Problem while fetching the new tokens from refresh"
|
||||||
|
+ " token grant - {} {}."
|
||||||
|
+ " New Device code will be requested !".format
|
||||||
|
+ (refresh_token_res.status_code,
|
||||||
|
+ refresh_token_res.json()['error']))
|
||||||
|
+ self._use_device_code_grant()
|
||||||
|
+ else:
|
||||||
|
+ raise Exception(
|
||||||
|
+ "Something went wrong while using the "
|
||||||
|
+ "Refresh token grant for fetching tokens:"
|
||||||
|
+ f" Returned status code {refresh_token_res.status_code}"
|
||||||
|
+ f" and error {refresh_token_res.json()['error']}")
|
||||||
|
diff --git a/sos/policies/distros/redhat.py b/sos/policies/distros/redhat.py
|
||||||
|
index bdbe8f952..02cc4cc2f 100644
|
||||||
|
--- a/sos/policies/distros/redhat.py
|
||||||
|
+++ b/sos/policies/distros/redhat.py
|
||||||
|
@@ -12,6 +12,7 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
+from sos.policies.auth import DeviceAuthorizationClass
|
||||||
|
|
||||||
|
from sos.report.plugins import RedHatPlugin
|
||||||
|
from sos.presets.redhat import (RHEL_PRESETS, ATOMIC_PRESETS, RHV, RHEL,
|
||||||
|
@@ -51,6 +52,10 @@ class RedHatPolicy(LinuxPolicy):
|
||||||
|
default_container_runtime = 'podman'
|
||||||
|
sos_pkg_name = 'sos'
|
||||||
|
sos_bin_path = '/usr/sbin'
|
||||||
|
+ client_identifier_url = "https://sso.redhat.com/auth/"\
|
||||||
|
+ "realms/redhat-external/protocol/openid-connect/auth/device"
|
||||||
|
+ token_endpoint = "https://sso.redhat.com/auth/realms/"\
|
||||||
|
+ "redhat-external/protocol/openid-connect/token"
|
||||||
|
|
||||||
|
def __init__(self, sysroot=None, init=None, probe_runtime=True,
|
||||||
|
remote_exec=None):
|
||||||
|
@@ -228,6 +233,7 @@ class RHELPolicy(RedHatPolicy):
|
||||||
|
""" + disclaimer_text + "%(vendor_text)s\n")
|
||||||
|
_upload_url = RH_SFTP_HOST
|
||||||
|
_upload_method = 'post'
|
||||||
|
+ _device_token = None
|
||||||
|
|
||||||
|
def __init__(self, sysroot=None, init=None, probe_runtime=True,
|
||||||
|
remote_exec=None):
|
||||||
|
@@ -266,24 +272,23 @@ def check(cls, remote=''):
|
||||||
|
|
||||||
|
def prompt_for_upload_user(self):
|
||||||
|
if self.commons['cmdlineopts'].upload_user:
|
||||||
|
- return
|
||||||
|
- # Not using the default, so don't call this prompt for RHCP
|
||||||
|
- if self.commons['cmdlineopts'].upload_url:
|
||||||
|
- super(RHELPolicy, self).prompt_for_upload_user()
|
||||||
|
- return
|
||||||
|
- if not self.get_upload_user():
|
||||||
|
- if self.case_id:
|
||||||
|
- self.upload_user = input(_(
|
||||||
|
- "Enter your Red Hat Customer Portal username for "
|
||||||
|
- "uploading [empty for anonymous SFTP]: ")
|
||||||
|
- )
|
||||||
|
- else: # no case id provided => failover to SFTP
|
||||||
|
- self.upload_url = RH_SFTP_HOST
|
||||||
|
- self.ui_log.info("No case id provided, uploading to SFTP")
|
||||||
|
- self.upload_user = input(_(
|
||||||
|
- "Enter your Red Hat Customer Portal username for "
|
||||||
|
- "uploading to SFTP [empty for anonymous]: ")
|
||||||
|
- )
|
||||||
|
+ self.ui_log.info(
|
||||||
|
+ _("The option --upload-user has been deprecated in favour"
|
||||||
|
+ " of device authorization in RHEL")
|
||||||
|
+ )
|
||||||
|
+ if not self.case_id:
|
||||||
|
+ # no case id provided => failover to SFTP
|
||||||
|
+ self.upload_url = RH_SFTP_HOST
|
||||||
|
+ self.ui_log.info("No case id provided, uploading to SFTP")
|
||||||
|
+
|
||||||
|
+ def prompt_for_upload_password(self):
|
||||||
|
+ # With OIDC we don't ask for user/pass anymore
|
||||||
|
+ if self.commons['cmdlineopts'].upload_pass:
|
||||||
|
+ self.ui_log.info(
|
||||||
|
+ _("The option --upload-pass has been deprecated in favour"
|
||||||
|
+ " of device authorization in RHEL")
|
||||||
|
+ )
|
||||||
|
+ return
|
||||||
|
|
||||||
|
def get_upload_url(self):
|
||||||
|
if self.upload_url:
|
||||||
|
@@ -292,10 +297,42 @@ def get_upload_url(self):
|
||||||
|
return self.commons['cmdlineopts'].upload_url
|
||||||
|
elif self.commons['cmdlineopts'].upload_protocol == 'sftp':
|
||||||
|
return RH_SFTP_HOST
|
||||||
|
+ elif not self.commons['cmdlineopts'].case_id:
|
||||||
|
+ self.ui_log.info("No case id provided, uploading to SFTP")
|
||||||
|
+ return RH_SFTP_HOST
|
||||||
|
else:
|
||||||
|
rh_case_api = "/support/v1/cases/%s/attachments"
|
||||||
|
return RH_API_HOST + rh_case_api % self.case_id
|
||||||
|
|
||||||
|
+ def _get_upload_https_auth(self):
|
||||||
|
+ str_auth = "Bearer {}".format(self._device_token)
|
||||||
|
+ return {'Authorization': str_auth}
|
||||||
|
+
|
||||||
|
+ def _upload_https_post(self, archive, verify=True):
|
||||||
|
+ """If upload_https() needs to use requests.post(), use this method.
|
||||||
|
+
|
||||||
|
+ Policies should override this method instead of the base upload_https()
|
||||||
|
+
|
||||||
|
+ :param archive: The open archive file object
|
||||||
|
+ """
|
||||||
|
+ files = {
|
||||||
|
+ 'file': (archive.name.split('/')[-1], archive,
|
||||||
|
+ self._get_upload_headers())
|
||||||
|
+ }
|
||||||
|
+ # Get the access token at this point. With this,
|
||||||
|
+ # we cover the cases where report generation takes
|
||||||
|
+ # longer than the token timeout
|
||||||
|
+ RHELAuth = DeviceAuthorizationClass(
|
||||||
|
+ self.client_identifier_url,
|
||||||
|
+ self.token_endpoint
|
||||||
|
+ )
|
||||||
|
+ self._device_token = RHELAuth.get_access_token()
|
||||||
|
+ self.ui_log.info("Device authorized correctly. Uploading file to "
|
||||||
|
+ f"{self.get_upload_url_string()}")
|
||||||
|
+ return requests.post(self.get_upload_url(), files=files,
|
||||||
|
+ headers=self._get_upload_https_auth(),
|
||||||
|
+ verify=verify)
|
||||||
|
+
|
||||||
|
def _get_upload_headers(self):
|
||||||
|
if self.get_upload_url().startswith(RH_API_HOST):
|
||||||
|
return {'isPrivate': 'false', 'cache-control': 'no-cache'}
|
||||||
|
@@ -332,15 +369,38 @@ def upload_sftp(self):
|
||||||
|
" for obtaining SFTP auth token.")
|
||||||
|
_token = None
|
||||||
|
_user = None
|
||||||
|
+
|
||||||
|
+ # We may have a device token already if we attempted
|
||||||
|
+ # to upload via http but the upload failed. So
|
||||||
|
+ # lets check first if there isn't one.
|
||||||
|
+ if not self._device_token:
|
||||||
|
+ try:
|
||||||
|
+ RHELAuth = DeviceAuthorizationClass(
|
||||||
|
+ self.client_identifier_url,
|
||||||
|
+ self.token_endpoint
|
||||||
|
+ )
|
||||||
|
+ except Exception as e:
|
||||||
|
+ # We end up here if the user cancels the device
|
||||||
|
+ # authentication in the web interface
|
||||||
|
+ if "end user denied" in str(e):
|
||||||
|
+ self.ui_log.info(
|
||||||
|
+ "Device token authorization "
|
||||||
|
+ "has been cancelled by the user."
|
||||||
|
+ )
|
||||||
|
+ else:
|
||||||
|
+ self._device_token = RHELAuth.get_access_token()
|
||||||
|
+ if self._device_token:
|
||||||
|
+ self.ui_log.info("Device authorized correctly. Uploading file to"
|
||||||
|
+ f" {self.get_upload_url_string()}")
|
||||||
|
+
|
||||||
|
url = RH_API_HOST + '/support/v2/sftp/token'
|
||||||
|
- # we have a username and password, but we need to reset the password
|
||||||
|
- # to be the token returned from the auth endpoint
|
||||||
|
- if self.get_upload_user() and self.get_upload_password():
|
||||||
|
- auth = self.get_upload_https_auth()
|
||||||
|
- ret = requests.post(url, auth=auth, timeout=10)
|
||||||
|
+ ret = None
|
||||||
|
+ if self._device_token:
|
||||||
|
+ headers = self._get_upload_https_auth()
|
||||||
|
+ ret = requests.post(url, headers=headers, timeout=10)
|
||||||
|
if ret.status_code == 200:
|
||||||
|
# credentials are valid
|
||||||
|
- _user = self.get_upload_user()
|
||||||
|
+ _user = json.loads(ret.text)['username']
|
||||||
|
_token = json.loads(ret.text)['token']
|
||||||
|
else:
|
||||||
|
self.ui_log.debug(
|
||||||
|
@@ -351,8 +411,7 @@ def upload_sftp(self):
|
||||||
|
"Unable to retrieve Red Hat auth token using provided "
|
||||||
|
"credentials. Will try anonymous."
|
||||||
|
)
|
||||||
|
- # we either do not have a username or password/token, or both
|
||||||
|
- if not _token:
|
||||||
|
+ else:
|
||||||
|
adata = {"isAnonymous": True}
|
||||||
|
anon = requests.post(url, data=json.dumps(adata), timeout=10)
|
||||||
|
if anon.status_code == 200:
|
||||||
|
@@ -368,7 +427,6 @@ def upload_sftp(self):
|
||||||
|
f"DEBUG: anonymous request failed (status: "
|
||||||
|
f"{anon.status_code}): {anon.json()}"
|
||||||
|
)
|
||||||
|
-
|
||||||
|
if _user and _token:
|
||||||
|
return super(RHELPolicy, self).upload_sftp(user=_user,
|
||||||
|
password=_token)
|
||||||
|
@@ -380,17 +438,18 @@ def upload_archive(self, archive):
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.upload_url and self.upload_url.startswith(RH_API_HOST) and\
|
||||||
|
- (not self.get_upload_user() or not self.get_upload_password()):
|
||||||
|
+ (not self.get_upload_user() or
|
||||||
|
+ not self.get_upload_password()):
|
||||||
|
self.upload_url = RH_SFTP_HOST
|
||||||
|
uploaded = super(RHELPolicy, self).upload_archive(archive)
|
||||||
|
- except Exception:
|
||||||
|
+ except Exception as e:
|
||||||
|
uploaded = False
|
||||||
|
if not self.upload_url.startswith(RH_API_HOST):
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
self.ui_log.error(
|
||||||
|
- _(f"Upload to Red Hat Customer Portal failed. Trying "
|
||||||
|
- f"{RH_SFTP_HOST}")
|
||||||
|
+ _(f"Upload to Red Hat Customer Portal failed due to "
|
||||||
|
+ f"{e}. Trying {RH_SFTP_HOST}")
|
||||||
|
)
|
||||||
|
self.upload_url = RH_SFTP_HOST
|
||||||
|
uploaded = super(RHELPolicy, self).upload_archive(archive)
|
||||||
|
From d338a232cd7c829ca8ca5e5febef51035d1f7da5 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Pavel Moravec <pmoravec@redhat.com>
|
||||||
|
Date: Wed, 10 Jan 2024 16:47:44 +0100
|
||||||
|
Subject: [PATCH] [build] Bump version to 4.6.1
|
||||||
|
|
||||||
|
Signed-off-by: Pavel Moravec <pmoravec@redhat.com>
|
||||||
|
---
|
||||||
|
docs/conf.py | 4 ++--
|
||||||
|
sos.spec | 5 ++++-
|
||||||
|
sos/__init__.py | 2 +-
|
||||||
|
3 files changed, 7 insertions(+), 4 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/docs/conf.py b/docs/conf.py
|
||||||
|
index 5f105373e..57d1b9297 100644
|
||||||
|
--- a/docs/conf.py
|
||||||
|
+++ b/docs/conf.py
|
||||||
|
@@ -59,9 +59,9 @@
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
-version = '4.6.0'
|
||||||
|
+version = '4.6.1'
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
-release = '4.6.0'
|
||||||
|
+release = '4.6.1'
|
||||||
|
|
||||||
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
|
# for a list of supported languages.
|
||||||
|
diff --git a/sos.spec b/sos.spec
|
||||||
|
index b575b5232..a08e2857b 100644
|
||||||
|
--- a/sos.spec
|
||||||
|
+++ b/sos.spec
|
||||||
|
@@ -1,6 +1,6 @@
|
||||||
|
Summary: A set of tools to gather troubleshooting information from a system
|
||||||
|
Name: sos
|
||||||
|
-Version: 4.6.0
|
||||||
|
+Version: 4.6.1
|
||||||
|
Release: 1%{?dist}
|
||||||
|
Source0: https://github.com/sosreport/sos/archive/%{name}-%{version}.tar.gz
|
||||||
|
License: GPL-2.0-or-later
|
||||||
|
@@ -90,6 +90,9 @@ rm -rf %{buildroot}/usr/config/
|
||||||
|
%config(noreplace) %{_sysconfdir}/sos/sos.conf
|
||||||
|
|
||||||
|
%changelog
|
||||||
|
+* Wed Jan 10 2024 Pavel Moravec <pmoravec@redhat.com> = 4.6.1
|
||||||
|
+- New upstream release
|
||||||
|
+
|
||||||
|
* Thu Aug 17 2023 Jake Hunsaker <jacob.r.hunsaker@gmail.com> = 4.6.0
|
||||||
|
- New upstream release
|
||||||
|
|
||||||
|
diff --git a/sos/__init__.py b/sos/__init__.py
|
||||||
|
index 78e452676..18d18c4c7 100644
|
||||||
|
--- a/sos/__init__.py
|
||||||
|
+++ b/sos/__init__.py
|
||||||
|
@@ -14,7 +14,7 @@
|
||||||
|
This module houses the i18n setup and message function. The default is to use
|
||||||
|
gettext to internationalize messages.
|
||||||
|
"""
|
||||||
|
-__version__ = "4.6.0"
|
||||||
|
+__version__ = "4.6.1"
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
@ -1,126 +0,0 @@
|
|||||||
From 43714aa5aeb3dcb0dec17dd026ca5c394cc06afd Mon Sep 17 00:00:00 2001
|
|
||||||
From: Periyasamy Palanisamy <pepalani@redhat.com>
|
|
||||||
Date: Fri, 11 Aug 2023 14:30:42 +0200
|
|
||||||
Subject: [PATCH] Collect additional ovnkube node logs
|
|
||||||
|
|
||||||
With Interconnect support in latest OVN-Kubernetes, ovnkube-nodes
|
|
||||||
logs grew large. This commit adds the ability to collect those
|
|
||||||
additional logs.
|
|
||||||
|
|
||||||
Signed-off-by: Periyasamy Palanisamy <pepalani@redhat.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/openshift_ovn.py | 3 ++-
|
|
||||||
1 file changed, 2 insertions(+), 1 deletion(-)
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py
|
|
||||||
index d81fc97aa..2d804e9ae 100644
|
|
||||||
--- a/sos/report/plugins/openshift_ovn.py
|
|
||||||
+++ b/sos/report/plugins/openshift_ovn.py
|
|
||||||
@@ -30,7 +30,8 @@ def setup(self):
|
|
||||||
# Collect ovn interconnect specific files if exists.
|
|
||||||
self.add_copy_spec([
|
|
||||||
"/var/lib/ovn-ic/etc/ovnnb_db.db",
|
|
||||||
- "/var/lib/ovn-ic/etc/ovnsb_db.db"
|
|
||||||
+ "/var/lib/ovn-ic/etc/ovnsb_db.db",
|
|
||||||
+ "/var/lib/ovn-ic/etc/libovsdb*log*"
|
|
||||||
])
|
|
||||||
|
|
||||||
# The ovn cluster/status is not valid anymore for interconnect setup.
|
|
||||||
From e11a594f942f9ae98aeb644c573293b391050657 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Periyasamy Palanisamy <pepalani@redhat.com>
|
|
||||||
Date: Tue, 15 Aug 2023 11:47:20 +0200
|
|
||||||
Subject: [PATCH] Collect ovn logs as much as possible
|
|
||||||
|
|
||||||
The sosreport limits to collect logs at maximum of 25 MB in a given
|
|
||||||
collection passed into add_copy_spec method. so this may lead into
|
|
||||||
logs wouldn't have fully collected when user collected sos report
|
|
||||||
without --all-logs option.
|
|
||||||
Hence this commit ensures logs and dbs collected as much as possible
|
|
||||||
when --all-logs option is not specified.
|
|
||||||
|
|
||||||
Signed-off-by: Periyasamy Palanisamy <pepalani@redhat.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/openshift_ovn.py | 25 +++++++++++++++++--------
|
|
||||||
1 file changed, 17 insertions(+), 8 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py
|
|
||||||
index 2d804e9ae..347b15eea 100644
|
|
||||||
--- a/sos/report/plugins/openshift_ovn.py
|
|
||||||
+++ b/sos/report/plugins/openshift_ovn.py
|
|
||||||
@@ -20,19 +20,28 @@ class OpenshiftOVN(Plugin, RedHatPlugin):
|
|
||||||
profiles = ('openshift',)
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
+ all_logs = self.get_option("all_logs")
|
|
||||||
+
|
|
||||||
self.add_copy_spec([
|
|
||||||
"/var/lib/ovn/etc/ovnnb_db.db",
|
|
||||||
"/var/lib/ovn/etc/ovnsb_db.db",
|
|
||||||
- "/var/lib/openvswitch/etc/keys",
|
|
||||||
- "/var/log/openvswitch/libreswan.log",
|
|
||||||
- "/var/log/openvswitch/ovs-monitor-ipsec.log"
|
|
||||||
- ])
|
|
||||||
- # Collect ovn interconnect specific files if exists.
|
|
||||||
+ "/var/lib/openvswitch/etc/keys"
|
|
||||||
+ ], sizelimit=300)
|
|
||||||
+
|
|
||||||
+ # Collect ovn interconnect specific db files if exists.
|
|
||||||
self.add_copy_spec([
|
|
||||||
"/var/lib/ovn-ic/etc/ovnnb_db.db",
|
|
||||||
- "/var/lib/ovn-ic/etc/ovnsb_db.db",
|
|
||||||
- "/var/lib/ovn-ic/etc/libovsdb*log*"
|
|
||||||
- ])
|
|
||||||
+ "/var/lib/ovn-ic/etc/ovnsb_db.db"
|
|
||||||
+ ], sizelimit=300)
|
|
||||||
+
|
|
||||||
+ # Collect libovsdb logs in case of ovn interconnect setup.
|
|
||||||
+ if not all_logs:
|
|
||||||
+ self.add_copy_spec([
|
|
||||||
+ "/var/lib/ovn-ic/etc/libovsdb.log",
|
|
||||||
+ "/var/lib/ovn-ic/etc/libovsdb*log.gz"
|
|
||||||
+ ], sizelimit=100)
|
|
||||||
+ else:
|
|
||||||
+ self.add_copy_spec("/var/lib/ovn-ic/etc/libovsdb*log*")
|
|
||||||
|
|
||||||
# The ovn cluster/status is not valid anymore for interconnect setup.
|
|
||||||
self.add_cmd_output([
|
|
||||||
From 7cd6f61fd15ae7fc93d62cca927204351cdc1322 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Periyasamy Palanisamy <pepalani@redhat.com>
|
|
||||||
Date: Wed, 30 Aug 2023 09:56:40 +0200
|
|
||||||
Subject: [PATCH] Collect logs from ovnkube-controller container
|
|
||||||
|
|
||||||
This enables ovn sos report plugin to collect logs ovnkube-controller
|
|
||||||
container because ovn-kubernetes now provides option to run both
|
|
||||||
ovnkube-node and ovnkube-controller in same container with this
|
|
||||||
PR https://github.com/ovn-org/ovn-kubernetes/pull/3807.
|
|
||||||
|
|
||||||
Signed-off-by: Periyasamy Palanisamy <pepalani@redhat.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/openshift_ovn.py | 7 ++++++-
|
|
||||||
1 file changed, 6 insertions(+), 1 deletion(-)
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py
|
|
||||||
index 347b15eea..cb48057d3 100644
|
|
||||||
--- a/sos/report/plugins/openshift_ovn.py
|
|
||||||
+++ b/sos/report/plugins/openshift_ovn.py
|
|
||||||
@@ -16,7 +16,8 @@ class OpenshiftOVN(Plugin, RedHatPlugin):
|
|
||||||
"""
|
|
||||||
short_desc = 'Openshift OVN'
|
|
||||||
plugin_name = "openshift_ovn"
|
|
||||||
- containers = ('ovnkube-master', 'ovnkube-node', 'ovn-ipsec')
|
|
||||||
+ containers = ('ovnkube-master', 'ovnkube-node', 'ovn-ipsec',
|
|
||||||
+ 'ovnkube-controller')
|
|
||||||
profiles = ('openshift',)
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
@@ -54,6 +55,10 @@ def setup(self):
|
|
||||||
'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' +
|
|
||||||
'ct-zone-list'],
|
|
||||||
container='ovnkube-node')
|
|
||||||
+ self.add_cmd_output([
|
|
||||||
+ 'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' +
|
|
||||||
+ 'ct-zone-list'],
|
|
||||||
+ container='ovnkube-controller')
|
|
||||||
# Collect ovs ct-zone-list directly on host for interconnect setup.
|
|
||||||
self.add_cmd_output([
|
|
||||||
'ovs-appctl -t /var/run/ovn-ic/ovn-controller.*.ctl ' +
|
|
@ -1,108 +0,0 @@
|
|||||||
From 6526985ea2464944c5cf4cd87c2d981a77363077 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Pablo Acevedo Montserrat <pacevedo@redhat.com>
|
|
||||||
Date: Tue, 12 Sep 2023 10:24:38 +0200
|
|
||||||
Subject: [PATCH] [microshift] Add microshift-etcd.scope service
|
|
||||||
|
|
||||||
Signed-off-by: Pablo Acevedo Montserrat <pacevedo@redhat.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/microshift.py | 2 +-
|
|
||||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py
|
|
||||||
index 1b932d648..2cfafef04 100644
|
|
||||||
--- a/sos/report/plugins/microshift.py
|
|
||||||
+++ b/sos/report/plugins/microshift.py
|
|
||||||
@@ -28,7 +28,7 @@ class Microshift(Plugin, RedHatPlugin):
|
|
||||||
plugin_timeout = 900
|
|
||||||
packages = ('microshift', 'microshift-selinux', 'microshift-networking',
|
|
||||||
'microshift-greenboot')
|
|
||||||
- services = (plugin_name, 'greenboot-healthcheck',
|
|
||||||
+ services = (plugin_name, 'microshift-etcd.scope', 'greenboot-healthcheck',
|
|
||||||
'greenboot-task-runner', 'redboot-task-runner')
|
|
||||||
profiles = (plugin_name,)
|
|
||||||
localhost_kubeconfig = '/var/lib/microshift/resources/kubeadmin/kubeconfig'
|
|
||||||
From 765ac8f3cc8e8413278afbf2579eaac7c0419f72 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Evgeny Slutsky <eslutsky@redhat.com>
|
|
||||||
Date: Thu, 7 Sep 2023 10:54:12 +0300
|
|
||||||
Subject: [PATCH] [greenboot] seperate logs to a standalone plugin.
|
|
||||||
|
|
||||||
Signed-off-by: Evgeny Slutsky <eslutsky@redhat.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/greenboot.py | 26 ++++++++++++++++++++++++++
|
|
||||||
sos/report/plugins/microshift.py | 6 ++----
|
|
||||||
2 files changed, 28 insertions(+), 4 deletions(-)
|
|
||||||
create mode 100644 sos/report/plugins/greenboot.py
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/greenboot.py b/sos/report/plugins/greenboot.py
|
|
||||||
new file mode 100644
|
|
||||||
index 000000000..69b6607b0
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/sos/report/plugins/greenboot.py
|
|
||||||
@@ -0,0 +1,26 @@
|
|
||||||
+# Copyright 2023 Red Hat, Inc. Evgeny Slutsky <eslutsky@redhat.com>
|
|
||||||
+# This file is part of the sos project: https://github.com/sosreport/sos
|
|
||||||
+#
|
|
||||||
+# This copyrighted material is made available to anyone wishing to use,
|
|
||||||
+# modify, copy, or redistribute it subject to the terms and conditions of
|
|
||||||
+# version 2 of the GNU General Public License.
|
|
||||||
+#
|
|
||||||
+# See the LICENSE file in the source distribution for further information.
|
|
||||||
+
|
|
||||||
+from sos.report.plugins import Plugin, RedHatPlugin
|
|
||||||
+
|
|
||||||
+
|
|
||||||
+class Greenboot(Plugin, RedHatPlugin):
|
|
||||||
+ """The greenboot plugin collects systemd service logs and configuration.
|
|
||||||
+ """
|
|
||||||
+
|
|
||||||
+ short_desc = 'Greenboot'
|
|
||||||
+ plugin_name = 'greenboot'
|
|
||||||
+ services = (plugin_name, 'greenboot-healthcheck',
|
|
||||||
+ 'greenboot-task-runner', 'redboot-task-runner',)
|
|
||||||
+ profiles = ('system',)
|
|
||||||
+
|
|
||||||
+ def setup(self):
|
|
||||||
+ self.add_copy_spec([
|
|
||||||
+ "/etc/greenboot/greenboot.conf",
|
|
||||||
+ ])
|
|
||||||
diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py
|
|
||||||
index 2cfafef04..669f4c021 100644
|
|
||||||
--- a/sos/report/plugins/microshift.py
|
|
||||||
+++ b/sos/report/plugins/microshift.py
|
|
||||||
@@ -26,10 +26,8 @@ class Microshift(Plugin, RedHatPlugin):
|
|
||||||
short_desc = 'Microshift'
|
|
||||||
plugin_name = 'microshift'
|
|
||||||
plugin_timeout = 900
|
|
||||||
- packages = ('microshift', 'microshift-selinux', 'microshift-networking',
|
|
||||||
- 'microshift-greenboot')
|
|
||||||
- services = (plugin_name, 'microshift-etcd.scope', 'greenboot-healthcheck',
|
|
||||||
- 'greenboot-task-runner', 'redboot-task-runner')
|
|
||||||
+ packages = ('microshift', 'microshift-selinux', 'microshift-networking',)
|
|
||||||
+ services = (plugin_name, 'microshift-etcd.scope',)
|
|
||||||
profiles = (plugin_name,)
|
|
||||||
localhost_kubeconfig = '/var/lib/microshift/resources/kubeadmin/kubeconfig'
|
|
||||||
|
|
||||||
From 0b72a1f07a5f46e22cb926d129bd8eb63ba20a9a Mon Sep 17 00:00:00 2001
|
|
||||||
From: Pablo Acevedo Montserrat <pacevedo@redhat.com>
|
|
||||||
Date: Tue, 19 Sep 2023 12:18:42 +0200
|
|
||||||
Subject: [PATCH] [microshift] Add /etc/microshift file copy spec
|
|
||||||
|
|
||||||
Signed-off-by: Pablo Acevedo Montserrat <pacevedo@redhat.com>
|
|
||||||
---
|
|
||||||
sos/report/plugins/microshift.py | 3 +++
|
|
||||||
1 file changed, 3 insertions(+)
|
|
||||||
|
|
||||||
diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py
|
|
||||||
index 669f4c021..8fe39ab29 100644
|
|
||||||
--- a/sos/report/plugins/microshift.py
|
|
||||||
+++ b/sos/report/plugins/microshift.py
|
|
||||||
@@ -146,6 +146,9 @@ def setup(self):
|
|
||||||
Output format for this function is based on `oc adm inspect` command,
|
|
||||||
which is used to retrieve all API resources from the cluster.
|
|
||||||
"""
|
|
||||||
+
|
|
||||||
+ self.add_copy_spec('/etc/microshift')
|
|
||||||
+
|
|
||||||
if self.path_exists('/var/lib/microshift-backups'):
|
|
||||||
self.add_copy_spec(['/var/lib/microshift-backups/*/version',
|
|
||||||
'/var/lib/microshift-backups/*.json'])
|
|
@ -4,8 +4,8 @@
|
|||||||
|
|
||||||
Summary: A set of tools to gather troubleshooting information from a system
|
Summary: A set of tools to gather troubleshooting information from a system
|
||||||
Name: sos
|
Name: sos
|
||||||
Version: 4.6.0
|
Version: 4.6.1
|
||||||
Release: 5%{?dist}
|
Release: 1%{?dist}
|
||||||
Group: Applications/System
|
Group: Applications/System
|
||||||
Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz
|
Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz
|
||||||
Source1: sos-audit-%{auditversion}.tgz
|
Source1: sos-audit-%{auditversion}.tgz
|
||||||
@ -22,9 +22,7 @@ Recommends: python3-pexpect
|
|||||||
Recommends: python3-pyyaml
|
Recommends: python3-pyyaml
|
||||||
Conflicts: vdsm < 4.40
|
Conflicts: vdsm < 4.40
|
||||||
Obsoletes: sos-collector
|
Obsoletes: sos-collector
|
||||||
Patch1: sos-SUPDEV145-ovnkube-logs.patch
|
Patch1: sos-RHEL-21177-device-auth.patch
|
||||||
Patch2: sos-SUPDEV148-microshift-greenboot.patch
|
|
||||||
Patch3: sos-RHEL-13697-aap-passwords.patch
|
|
||||||
|
|
||||||
%description
|
%description
|
||||||
Sos is a set of tools that gathers information about system
|
Sos is a set of tools that gathers information about system
|
||||||
@ -36,8 +34,6 @@ support technicians and developers.
|
|||||||
%setup -qn %{name}-%{version}
|
%setup -qn %{name}-%{version}
|
||||||
%setup -T -D -a1 -q
|
%setup -T -D -a1 -q
|
||||||
%patch1 -p1
|
%patch1 -p1
|
||||||
%patch2 -p1
|
|
||||||
%patch3 -p1
|
|
||||||
|
|
||||||
|
|
||||||
%build
|
%build
|
||||||
@ -111,6 +107,12 @@ of the system. Currently storage and filesystem commands are audited.
|
|||||||
%ghost /etc/audit/rules.d/40-sos-storage.rules
|
%ghost /etc/audit/rules.d/40-sos-storage.rules
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Thu Jan 11 2024 Pavel Moravec <pmoravec@redhat.com> = 4.6.1-1
|
||||||
|
- rebase to upstream 4.6.1
|
||||||
|
Resolves: RHEL-21173
|
||||||
|
- [redhat] Change authentication method for RHEL
|
||||||
|
Resolves: RHEL-21177
|
||||||
|
|
||||||
* Wed Oct 18 2023 Pavel Moravec <pmoravec@redhat.com> = 4.6.0-5
|
* Wed Oct 18 2023 Pavel Moravec <pmoravec@redhat.com> = 4.6.0-5
|
||||||
[pulpcore] Scrub AUTH_LDAP_BIND_PASSWORD value
|
[pulpcore] Scrub AUTH_LDAP_BIND_PASSWORD value
|
||||||
Resolves: RHEL-13697
|
Resolves: RHEL-13697
|
||||||
|
Loading…
Reference in New Issue
Block a user