import OL fence-agents-4.10.0-86.el9_6.4
This commit is contained in:
parent
961e4cc6df
commit
c40a5e9197
@ -1,23 +1,20 @@
|
||||
e1b766b2b1601fde67b3b19ed2f13b9746bb1cca SOURCES/MarkupSafe-2.0.1.tar.gz
|
||||
e1fb5dc6f95a85e7d1f93c6701b331201e8b5479 SOURCES/PyJWT-2.1.0-py3-none-any.whl
|
||||
53fc16036940089ceadd4127381e40fd6106a7ed SOURCES/PyYAML-5.1.tar.gz
|
||||
b53cba68215f389dffaa51ee2afe10c943278646 SOURCES/adal-1.2.7-py2.py3-none-any.whl
|
||||
0c47ce98be5a519023c16e10027ed1268c489fcc SOURCES/adal-1.2.7.tar.gz
|
||||
f524069cc0d499c78034b66cf0e8e45344e89791 SOURCES/aliyun-cli-3.0.198.tar.gz
|
||||
c6a6dd2c97adb5a22856ce78ad9e3fdc1cbb9760 SOURCES/aliyun-cli-go-vendor.tar.gz
|
||||
87ad43ef7b7e1cbcba1d743541b3118cffda63f8 SOURCES/aliyun-openapi-meta-5cf98b660.tar.gz
|
||||
bda476965c380701795849179ed91e9d8134ec7c SOURCES/aliyun-python-sdk-core-2.11.5.tar.gz
|
||||
2a3e6ee5713a3cfb47e63a739cc9a05580d95dd6 SOURCES/aliyun_python_sdk_ecs-4.24.7-py2.py3-none-any.whl
|
||||
459383a3bcb16956030e302008a0f140a5fec0fb SOURCES/awscli-2.2.15.tar.gz
|
||||
a08c16f613cb0b9a9f8ce7cae782ea20c40ce2af SOURCES/awscrt-0.11.13-cp39-cp39-manylinux2014_x86_64.whl
|
||||
46d2d17d958ae305ced32fdd6aa847b0cdf31989 SOURCES/azure-identity-1.10.0.zip
|
||||
70a8e279429feaa0ed39bf2da123e5d53c37b0e6 SOURCES/azure_common-1.1.27-py2.py3-none-any.whl
|
||||
63fee5e61283d1c972abd3786ed609da6fd7ea5b SOURCES/azure_core-1.15.0-py2.py3-none-any.whl
|
||||
584ff1897b56673bee12e77aec07e68ebe9f789b SOURCES/azure_mgmt_compute-21.0.0-py2.py3-none-any.whl
|
||||
e20df6c9635f1db9a3c891b9239b4319d88b1747 SOURCES/azure_mgmt_core-1.2.2-py2.py3-none-any.whl
|
||||
3bb341e87329c7507863f0ec53f8cb6e16d698d6 SOURCES/azure_mgmt_network-19.0.0-py2.py3-none-any.whl
|
||||
1f493a02d15374027ae2bcb2ea4daf5b907c528b SOURCES/azure-common-1.1.28.zip
|
||||
8f9ddd1b38ad9c712b17b4eb47f79faf92ab8fb6 SOURCES/azure_core-1.32.0.tar.gz
|
||||
5f357dc1a8a3794a9aa2c7ffbedbce73e0c84f26 SOURCES/azure_identity-1.19.0.tar.gz
|
||||
838eaf93a9cd4dfd3c87a342ee7691cf799b2df2 SOURCES/azure_mgmt_compute-34.0.0.tar.gz
|
||||
adc0e3e2d5b126174273efcc40a5e6fc8fe8a8ff SOURCES/azure_mgmt_core-1.5.0.tar.gz
|
||||
f70832bb6367a31808c22849ae2932dae9b17df0 SOURCES/azure_mgmt_network-28.1.0.tar.gz
|
||||
6591882be0937b2b900eab4ec70feadf34f71738 SOURCES/boto3-1.17.102-py2.py3-none-any.whl
|
||||
22507f43314ccb2dd203337e87ffafb385ba3627 SOURCES/botocore-1.20.102-py2.py3-none-any.whl
|
||||
6ef53a76455b377b02b4774c32a04e241cdb24eb SOURCES/botocore-2.0.0dev123.zip
|
||||
c953dcd6e69587e5b182d77255ed836172fea70a SOURCES/cachetools-4.2.2-py3-none-any.whl
|
||||
0d12f48faa727f0979e9ad5c4c80dfa32b73caff SOURCES/cachetools-4.2.4.tar.gz
|
||||
ec7e8dd8ef95edfdb83a1ea040b8b88507b47615 SOURCES/certifi-2023.7.22.tar.gz
|
||||
@ -26,10 +23,7 @@ ec7e8dd8ef95edfdb83a1ea040b8b88507b47615 SOURCES/certifi-2023.7.22.tar.gz
|
||||
e9eb83c71c09b3c8249bd7d6d2619b65fff03874 SOURCES/chardet-4.0.0-py2.py3-none-any.whl
|
||||
865df92e66e5dc7b940144cbad8115c07dc8784f SOURCES/charset-normalizer-2.0.7.tar.gz
|
||||
a8ee91adf4644bbdccfc73ead88f4cd0df7e3552 SOURCES/colorama-0.3.3.tar.gz
|
||||
444d5ea320f95cd6205535a1be2805598847191b SOURCES/colorama-0.4.3-py2.py3-none-any.whl
|
||||
536a57d70d505e4de8595650603d7e2ecc58b34b SOURCES/cryptography-3.3.2-cp36-abi3-manylinux2010_x86_64.whl
|
||||
4b50bebad4c5036c030a78cbb869d039bc91c4ec SOURCES/distro-1.5.0-py2.py3-none-any.whl
|
||||
aafeddc912b74557754b2aaece3f1364be8e9f6a SOURCES/docutils-0.15.2-py3-none-any.whl
|
||||
1ec823f807b73a377cdd47d12e2e34f046bfc889 SOURCES/fence-agents-4.10.0.tar.gz
|
||||
05d6d7a3df5bdbd9df1b37a65662e5dbe94f23fd SOURCES/flit_core-3.9.0.tar.gz
|
||||
f4e578dc0ed68d6667d7b36cdfc2647d55e9858f SOURCES/google-auth-2.3.0.tar.gz
|
||||
@ -39,16 +33,15 @@ ac160113ba8b78b0688edda9f9a088c0b4b5ded2 SOURCES/google_api_core-1.30.0-py2.py3-
|
||||
9e513ce4e7b36b8e81c607be440e0d6e6afe9833 SOURCES/google_auth_httplib2-0.1.0-py2.py3-none-any.whl
|
||||
dc553afa7a3f23b92ee9ecd27d0b15153c0e9f75 SOURCES/googleapis_common_protos-1.53.0-py2.py3-none-any.whl
|
||||
74ec77d2e2ef6b2ef8503e6e398faa6f3ba298ae SOURCES/httplib2-0.19.1-py3-none-any.whl
|
||||
999b6718b4d789d8ca0d2ddf7c07826154291825 SOURCES/idna-2.10-py2.py3-none-any.whl
|
||||
08c0449533fc94462f78652dea209099754d9ee4 SOURCES/idna-3.3.tar.gz
|
||||
240cc4206740fafacb74bbf0d0c4ff70e41c8a85 SOURCES/isodate-0.6.0-py2.py3-none-any.whl
|
||||
ea36ce1c780dd44f01225dca7f9995a6685a60cc SOURCES/isodate-0.6.1.tar.gz
|
||||
41fdca818f95b8f0d35298eaab42f4e714dedf19 SOURCES/jinja2-3.1.6.tar.gz
|
||||
68904717c48e95adb47d815178fff8d80f39b2ab SOURCES/jmespath-0.7.1-py2.py3-none-any.whl
|
||||
d06a9547b1a87e9c51b0a7c708189d993f2e3d89 SOURCES/kubernetes-12.0.1.tar.gz
|
||||
ecd73099139d222059443ad19dfeee3f715e1ab0 SOURCES/msal-1.18.0.tar.gz
|
||||
04e016bd1fa4ed6ddb852095a45d4f8c81a5b54a SOURCES/msal-extensions-1.0.0.tar.gz
|
||||
ba59fbd147307e7ef92a1fad259e7dc0b07e79e0 SOURCES/msrest-0.6.21-py2.py3-none-any.whl
|
||||
3d65a50b68e3aa506b6af42be485ed2710afa9da SOURCES/msrestazure-0.6.4-py2.py3-none-any.whl
|
||||
346d0213ff6527435a2a04c07c21a80accd7fdf7 SOURCES/msal-1.31.1.tar.gz
|
||||
e7db7d42807fb756777b01b1569075343c6122e9 SOURCES/msal_extensions-1.2.0.tar.gz
|
||||
00c5509205e59ebae09e5d3fe068ab61588e9b4a SOURCES/msrest-0.7.1.zip
|
||||
b21ec03f79d2a7ef4396d909f78130a92455c3c9 SOURCES/msrestazure-0.6.4.post1.tar.gz
|
||||
7e2f8f4cebf309ef6aaf740ee9073276d6937802 SOURCES/oauthlib-3.2.2.tar.gz
|
||||
570d69d8c108ebb8aee562389d13b07dfb61ce25 SOURCES/openshift-0.12.1.tar.gz
|
||||
2b10cb7681bc678ba4ff3be524b28d783e4095ce SOURCES/packaging-20.9-py2.py3-none-any.whl
|
||||
@ -56,7 +49,6 @@ bccbc1bf76a9db46998eb8e1ffa2f2a2baf9237a SOURCES/packaging-21.2-py3-none-any.whl
|
||||
0c3fc83ca045abeec9ce82bb7ee3e77f0390bca4 SOURCES/pexpect-4.8.0-py2.py3-none-any.whl
|
||||
18659a0dea5600df33eab90dec1b597e2437aebd SOURCES/poetry-core-1.0.7.tar.gz
|
||||
8fd43e96c5d0ad701cf3e332fb80c7e92e9ab883 SOURCES/portalocker-2.5.1.tar.gz
|
||||
b09c4655a4c8bd24c54a078e960750ec9e8688d6 SOURCES/prompt_toolkit-2.0.10-py3-none-any.whl
|
||||
5a90b79a9630873c7f2db79544c46146bb6af5e8 SOURCES/protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl
|
||||
0827aaa6fdc3dc4256e06fa1c3991fb4ed20a693 SOURCES/ptyprocess-0.7.0-py2.py3-none-any.whl
|
||||
c3c9f195dc89eb6d04828b881314743b548318d0 SOURCES/pyasn1-0.4.8-py2.py3-none-any.whl
|
||||
@ -79,25 +71,21 @@ c2ba10c775b7a52a4b57cac4d4110a0c0f812a82 SOURCES/python-dateutil-2.8.2.tar.gz
|
||||
1dc2fa004aa6517f1620e55d8a7b8e68a9cf2a47 SOURCES/python-string-utils-1.0.0.tar.gz
|
||||
3005ff67df93ee276fb8631e17c677df852254ad SOURCES/python_dateutil-2.8.1-py2.py3-none-any.whl
|
||||
b97c6966fb8fd273f0bc8533cf0dee4978b9f373 SOURCES/pytz-2021.1-py2.py3-none-any.whl
|
||||
b1009d9fd6acadc64e1a3cecb6f0083fe047e753 SOURCES/requests-2.25.1-py2.py3-none-any.whl
|
||||
8c7a89d183d3e9b70bf91ba5b75eccf7111b9d8d SOURCES/requests-2.26.0.tar.gz
|
||||
f139aed770519b6a095b8fdc888d03955cbe9d8e SOURCES/requests-oauthlib-1.3.0.tar.gz
|
||||
25d5667d7a61586f5ddaac7e08cc3053db3d8661 SOURCES/requests_oauthlib-1.3.0-py2.py3-none-any.whl
|
||||
c0d5be2edf538122d35fe06c29d2d7a5f22f3117 SOURCES/rsa-4.7.2-py3-none-any.whl
|
||||
e8a53067e03fe1b6682fd99a40a7359396a06daa SOURCES/rsa-4.7.2.tar.gz
|
||||
abf532a0cc31ea224e3895f60025e7466b16d7ae SOURCES/ruamel.yaml-0.15.100.tar.gz
|
||||
d1011ff44cd5a045de0460c1b79ec65592e86860 SOURCES/ruamel.yaml-0.17.16.tar.gz
|
||||
27de97227bbbde5a9f571f9fad223578d7bdf7cc SOURCES/ruamel.yaml.clib-0.2.6.tar.gz
|
||||
d7eb0ced8b9f12005ad6f13035689d5611f8a81a SOURCES/s3transfer-0.4.2-py2.py3-none-any.whl
|
||||
54a0d57b77b6030aaca8992282189f4b7b627bc0 SOURCES/setuptools-71.1.0.tar.gz
|
||||
749fa3bff5be78c80ac6d6d313c38c73d679542c SOURCES/setuptools_scm-8.1.0.tar.gz
|
||||
79e6f2e4f9e24898f1896df379871b9c9922f147 SOURCES/six-1.16.0-py2.py3-none-any.whl
|
||||
06fa0bb50f2a4e2917fd14c21e9d2d5508ce0163 SOURCES/six-1.16.0.tar.gz
|
||||
47a980b20875d1a1714e921552b5bb0eda190f37 SOURCES/suds_community-0.8.5-py3-none-any.whl
|
||||
3eae299137c59d6d6d30be01417366c57dd1adf5 SOURCES/tomli-2.0.1.tar.gz
|
||||
1389615a3f0bd216e7db4440985d51fb3f2ea2c6 SOURCES/typing_extensions-4.12.2.tar.gz
|
||||
83be56610e5f824bb05ff7a5618d6d4df9b6cc08 SOURCES/uritemplate-3.0.1-py2.py3-none-any.whl
|
||||
84e2852d8da1655373f7ce5e7d5d3e256b62b4e4 SOURCES/urllib3-1.26.18.tar.gz
|
||||
7126323614cada181bc8b06436e80ef372ff8656 SOURCES/wcwidth-0.1.9-py2.py3-none-any.whl
|
||||
540f083782c584989c1a0f69ffd69ba7aae07db6 SOURCES/websocket-client-1.2.1.tar.gz
|
||||
b6c48d8714e043524be7a869d1db0adcd8441cd4 SOURCES/wheel-0.37.0-py2.py3-none-any.whl
|
||||
|
||||
36
.gitignore
vendored
36
.gitignore
vendored
@ -1,23 +1,20 @@
|
||||
SOURCES/MarkupSafe-2.0.1.tar.gz
|
||||
SOURCES/PyJWT-2.1.0-py3-none-any.whl
|
||||
SOURCES/PyYAML-5.1.tar.gz
|
||||
SOURCES/adal-1.2.7-py2.py3-none-any.whl
|
||||
SOURCES/adal-1.2.7.tar.gz
|
||||
SOURCES/aliyun-cli-3.0.198.tar.gz
|
||||
SOURCES/aliyun-cli-go-vendor.tar.gz
|
||||
SOURCES/aliyun-openapi-meta-5cf98b660.tar.gz
|
||||
SOURCES/aliyun-python-sdk-core-2.11.5.tar.gz
|
||||
SOURCES/aliyun_python_sdk_ecs-4.24.7-py2.py3-none-any.whl
|
||||
SOURCES/awscli-2.2.15.tar.gz
|
||||
SOURCES/awscrt-0.11.13-cp39-cp39-manylinux2014_x86_64.whl
|
||||
SOURCES/azure-identity-1.10.0.zip
|
||||
SOURCES/azure_common-1.1.27-py2.py3-none-any.whl
|
||||
SOURCES/azure_core-1.15.0-py2.py3-none-any.whl
|
||||
SOURCES/azure_mgmt_compute-21.0.0-py2.py3-none-any.whl
|
||||
SOURCES/azure_mgmt_core-1.2.2-py2.py3-none-any.whl
|
||||
SOURCES/azure_mgmt_network-19.0.0-py2.py3-none-any.whl
|
||||
SOURCES/azure-common-1.1.28.zip
|
||||
SOURCES/azure_core-1.32.0.tar.gz
|
||||
SOURCES/azure_identity-1.19.0.tar.gz
|
||||
SOURCES/azure_mgmt_compute-34.0.0.tar.gz
|
||||
SOURCES/azure_mgmt_core-1.5.0.tar.gz
|
||||
SOURCES/azure_mgmt_network-28.1.0.tar.gz
|
||||
SOURCES/boto3-1.17.102-py2.py3-none-any.whl
|
||||
SOURCES/botocore-1.20.102-py2.py3-none-any.whl
|
||||
SOURCES/botocore-2.0.0dev123.zip
|
||||
SOURCES/cachetools-4.2.2-py3-none-any.whl
|
||||
SOURCES/cachetools-4.2.4.tar.gz
|
||||
SOURCES/certifi-2023.7.22.tar.gz
|
||||
@ -26,10 +23,7 @@ SOURCES/chardet-3.0.4-py2.py3-none-any.whl
|
||||
SOURCES/chardet-4.0.0-py2.py3-none-any.whl
|
||||
SOURCES/charset-normalizer-2.0.7.tar.gz
|
||||
SOURCES/colorama-0.3.3.tar.gz
|
||||
SOURCES/colorama-0.4.3-py2.py3-none-any.whl
|
||||
SOURCES/cryptography-3.3.2-cp36-abi3-manylinux2010_x86_64.whl
|
||||
SOURCES/distro-1.5.0-py2.py3-none-any.whl
|
||||
SOURCES/docutils-0.15.2-py3-none-any.whl
|
||||
SOURCES/fence-agents-4.10.0.tar.gz
|
||||
SOURCES/flit_core-3.9.0.tar.gz
|
||||
SOURCES/google-auth-2.3.0.tar.gz
|
||||
@ -39,16 +33,15 @@ SOURCES/google_auth-1.32.0-py2.py3-none-any.whl
|
||||
SOURCES/google_auth_httplib2-0.1.0-py2.py3-none-any.whl
|
||||
SOURCES/googleapis_common_protos-1.53.0-py2.py3-none-any.whl
|
||||
SOURCES/httplib2-0.19.1-py3-none-any.whl
|
||||
SOURCES/idna-2.10-py2.py3-none-any.whl
|
||||
SOURCES/idna-3.3.tar.gz
|
||||
SOURCES/isodate-0.6.0-py2.py3-none-any.whl
|
||||
SOURCES/isodate-0.6.1.tar.gz
|
||||
SOURCES/jinja2-3.1.6.tar.gz
|
||||
SOURCES/jmespath-0.7.1-py2.py3-none-any.whl
|
||||
SOURCES/kubernetes-12.0.1.tar.gz
|
||||
SOURCES/msal-1.18.0.tar.gz
|
||||
SOURCES/msal-extensions-1.0.0.tar.gz
|
||||
SOURCES/msrest-0.6.21-py2.py3-none-any.whl
|
||||
SOURCES/msrestazure-0.6.4-py2.py3-none-any.whl
|
||||
SOURCES/msal-1.31.1.tar.gz
|
||||
SOURCES/msal_extensions-1.2.0.tar.gz
|
||||
SOURCES/msrest-0.7.1.zip
|
||||
SOURCES/msrestazure-0.6.4.post1.tar.gz
|
||||
SOURCES/oauthlib-3.2.2.tar.gz
|
||||
SOURCES/openshift-0.12.1.tar.gz
|
||||
SOURCES/packaging-20.9-py2.py3-none-any.whl
|
||||
@ -56,7 +49,6 @@ SOURCES/packaging-21.2-py3-none-any.whl
|
||||
SOURCES/pexpect-4.8.0-py2.py3-none-any.whl
|
||||
SOURCES/poetry-core-1.0.7.tar.gz
|
||||
SOURCES/portalocker-2.5.1.tar.gz
|
||||
SOURCES/prompt_toolkit-2.0.10-py3-none-any.whl
|
||||
SOURCES/protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl
|
||||
SOURCES/ptyprocess-0.7.0-py2.py3-none-any.whl
|
||||
SOURCES/pyasn1-0.4.8-py2.py3-none-any.whl
|
||||
@ -79,25 +71,21 @@ SOURCES/python-dateutil-2.8.2.tar.gz
|
||||
SOURCES/python-string-utils-1.0.0.tar.gz
|
||||
SOURCES/python_dateutil-2.8.1-py2.py3-none-any.whl
|
||||
SOURCES/pytz-2021.1-py2.py3-none-any.whl
|
||||
SOURCES/requests-2.25.1-py2.py3-none-any.whl
|
||||
SOURCES/requests-2.26.0.tar.gz
|
||||
SOURCES/requests-oauthlib-1.3.0.tar.gz
|
||||
SOURCES/requests_oauthlib-1.3.0-py2.py3-none-any.whl
|
||||
SOURCES/rsa-4.7.2-py3-none-any.whl
|
||||
SOURCES/rsa-4.7.2.tar.gz
|
||||
SOURCES/ruamel.yaml-0.15.100.tar.gz
|
||||
SOURCES/ruamel.yaml-0.17.16.tar.gz
|
||||
SOURCES/ruamel.yaml.clib-0.2.6.tar.gz
|
||||
SOURCES/s3transfer-0.4.2-py2.py3-none-any.whl
|
||||
SOURCES/setuptools-71.1.0.tar.gz
|
||||
SOURCES/setuptools_scm-8.1.0.tar.gz
|
||||
SOURCES/six-1.16.0-py2.py3-none-any.whl
|
||||
SOURCES/six-1.16.0.tar.gz
|
||||
SOURCES/suds_community-0.8.5-py3-none-any.whl
|
||||
SOURCES/tomli-2.0.1.tar.gz
|
||||
SOURCES/typing_extensions-4.12.2.tar.gz
|
||||
SOURCES/uritemplate-3.0.1-py2.py3-none-any.whl
|
||||
SOURCES/urllib3-1.26.18.tar.gz
|
||||
SOURCES/wcwidth-0.1.9-py2.py3-none-any.whl
|
||||
SOURCES/websocket-client-1.2.1.tar.gz
|
||||
SOURCES/wheel-0.37.0-py2.py3-none-any.whl
|
||||
|
||||
@ -15,22 +15,6 @@ Subject: [PATCH] Merge pull request from GHSA-34jh-p97f-mpxf
|
||||
test/with_dummyserver/test_poolmanager.py | 27 ++++++++++++++++++++---
|
||||
4 files changed, 37 insertions(+), 5 deletions(-)
|
||||
|
||||
diff --git a/awscli/urllib3/util/retry.py b/awscli/urllib3/util/retry.py
|
||||
index 7a76a4a6ad..0456cceba4 100644
|
||||
--- a/awscli/urllib3/util/retry.py
|
||||
+++ b/awscli/urllib3/util/retry.py
|
||||
@@ -189,7 +189,9 @@ class Retry:
|
||||
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
|
||||
|
||||
#: Default headers to be used for ``remove_headers_on_redirect``
|
||||
- DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"])
|
||||
+ DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(
|
||||
+ ["Cookie", "Authorization", "Proxy-Authorization"]
|
||||
+ )
|
||||
|
||||
#: Default maximum backoff time.
|
||||
DEFAULT_BACKOFF_MAX = 120
|
||||
|
||||
diff --git a/aws/urllib3/util/retry.py b/aws/urllib3/util/retry.py
|
||||
index 7a76a4a6ad..0456cceba4 100644
|
||||
--- a/aws/urllib3/util/retry.py
|
||||
|
||||
@ -0,0 +1,10 @@
|
||||
--- a/agents/mpath/fence_mpath.py 2024-10-09 14:56:07.688665678 +0200
|
||||
+++ b/agents/mpath/fence_mpath.py 2024-10-09 14:56:12.639780798 +0200
|
||||
@@ -323,6 +323,7 @@
|
||||
fail_usage("Failed: No devices found")
|
||||
|
||||
options["devices"] = [d for d in re.split("\s*,\s*|\s+", options["--devices"].strip()) if d]
|
||||
+ options["--plug"] = re.sub(r"^0x0*|^0+", "", options["--plug"])
|
||||
# Input control END
|
||||
|
||||
result = fence_action(None, options, set_status, get_status)
|
||||
@ -0,0 +1,44 @@
|
||||
From 99c5c3289b23064441f998949808a0b3569c2e3b Mon Sep 17 00:00:00 2001
|
||||
From: Oyvind Albrigtsen <oalbrigt@redhat.com>
|
||||
Date: Thu, 10 Oct 2024 09:55:16 +0200
|
||||
Subject: [PATCH] fence_mpath: fix 0x-format patch causing unfencing issue, and
|
||||
use re.MULTILINE to avoid duplicating device dev/key lines in
|
||||
/run/cluster/mpath.devices
|
||||
|
||||
---
|
||||
agents/mpath/fence_mpath.py | 7 ++++---
|
||||
1 file changed, 4 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/agents/mpath/fence_mpath.py b/agents/mpath/fence_mpath.py
|
||||
index 2d8601497..2a4a58b36 100644
|
||||
--- a/agents/mpath/fence_mpath.py
|
||||
+++ b/agents/mpath/fence_mpath.py
|
||||
@@ -40,7 +40,7 @@ def set_status(conn, options):
|
||||
if options["--plug"] not in get_registration_keys(options, dev):
|
||||
count += 1
|
||||
logging.debug("Failed to register key "\
|
||||
- + options["--plug"] + "on device " + dev + "\n")
|
||||
+ + options["--plug"] + " on device " + dev + "\n")
|
||||
continue
|
||||
dev_write(options, dev)
|
||||
|
||||
@@ -147,8 +147,9 @@ def dev_write(options, dev):
|
||||
store_fh = open(file_path, "a+")
|
||||
except IOError:
|
||||
fail_usage("Failed: Cannot open file \""+ file_path + "\"")
|
||||
+ store_fh.seek(0)
|
||||
out = store_fh.read()
|
||||
- if not re.search(r"^" + dev + r"\s+", out):
|
||||
+ if not re.search(r"^{}\s+{}$".format(dev, options["--plug"]), out, flags=re.MULTILINE):
|
||||
store_fh.write(dev + "\t" + options["--plug"] + "\n")
|
||||
store_fh.close()
|
||||
|
||||
@@ -332,7 +333,7 @@ def main():
|
||||
fail_usage("Failed: No devices found")
|
||||
|
||||
options["devices"] = [d for d in re.split("\s*,\s*|\s+", options["--devices"].strip()) if d]
|
||||
- options["--plug"] = re.sub(r"^0x0*|^0+", "", options["--plug"])
|
||||
+ options["--plug"] = re.sub(r"^0x0*|^0+", "", options.get("--plug", ""))
|
||||
# Input control END
|
||||
|
||||
result = fence_action(None, options, set_status, get_status)
|
||||
@ -0,0 +1,120 @@
|
||||
--- a/agents/ibm_powervs/fence_ibm_powervs.py 2024-10-18 10:30:40.651200620 +0200
|
||||
+++ b/agents/ibm_powervs/fence_ibm_powervs.py 2024-10-18 10:30:35.157070713 +0200
|
||||
@@ -1,13 +1,14 @@
|
||||
#!@PYTHON@ -tt
|
||||
|
||||
import sys
|
||||
-import pycurl, io, json
|
||||
+import pycurl
|
||||
+import io
|
||||
+import json
|
||||
import logging
|
||||
import atexit
|
||||
-import time
|
||||
+
|
||||
sys.path.append("@FENCEAGENTSLIBDIR@")
|
||||
-from fencing import *
|
||||
-from fencing import fail, run_delay, EC_LOGIN_DENIED, EC_STATUS
|
||||
+from fencing import all_opt, atexit_handler, check_input, process_input, show_docs, fence_action, fail, run_delay, EC_STATUS
|
||||
|
||||
state = {
|
||||
"ACTIVE": "on",
|
||||
@@ -18,15 +19,35 @@
|
||||
}
|
||||
|
||||
def get_token(conn, options):
|
||||
- try:
|
||||
- command = "identity/token"
|
||||
- action = "grant_type=urn%3Aibm%3Aparams%3Aoauth%3Agrant-type%3Aapikey&apikey={}".format(options["--token"])
|
||||
- res = send_command(conn, command, "POST", action, printResult=False)
|
||||
- except Exception as e:
|
||||
- logging.debug("Failed: {}".format(e))
|
||||
- return "TOKEN_IS_MISSING_OR_WRONG"
|
||||
-
|
||||
- return res["access_token"]
|
||||
+ try:
|
||||
+ if options["--token"][0] == '@':
|
||||
+ key_file = options["--token"][1:]
|
||||
+ try:
|
||||
+ # read the API key from a file
|
||||
+ with open(key_file, "r") as f:
|
||||
+ try:
|
||||
+ keys = json.loads(f.read())
|
||||
+ # data seems to be in json format
|
||||
+ # return the value of the item with the key 'Apikey'
|
||||
+ api_key = keys.get("Apikey", "")
|
||||
+ if not api_key:
|
||||
+ # backward compatibility: former key name was 'apikey'
|
||||
+ api_key = keys.get("apikey", "")
|
||||
+ # data is text, return as is
|
||||
+ except ValueError:
|
||||
+ api_key = f.read().strip()
|
||||
+ except FileNotFoundError:
|
||||
+ logging.debug("Failed: Cannot open file {}".format(key_file))
|
||||
+ return "TOKEN_IS_MISSING_OR_WRONG"
|
||||
+ else:
|
||||
+ api_key = options["--token"]
|
||||
+ command = "identity/token"
|
||||
+ action = "grant_type=urn%3Aibm%3Aparams%3Aoauth%3Agrant-type%3Aapikey&apikey={}".format(api_key)
|
||||
+ res = send_command(conn, command, "POST", action, printResult=False)
|
||||
+ except Exception as e:
|
||||
+ logging.debug("Failed: {}".format(e))
|
||||
+ return "TOKEN_IS_MISSING_OR_WRONG"
|
||||
+ return res["access_token"]
|
||||
|
||||
def get_list(conn, options):
|
||||
outlets = {}
|
||||
@@ -41,7 +62,7 @@
|
||||
for r in res["pvmInstances"]:
|
||||
if options["--verbose-level"] > 1:
|
||||
logging.debug(json.dumps(r, indent=2))
|
||||
- outlets[r["pvmInstanceID"]] = (r["serverName"], state[r["status"]])
|
||||
+ outlets[r["pvmInstanceID"]] = (r["serverName"], state.get(r["status"], "unknown"))
|
||||
|
||||
return outlets
|
||||
|
||||
@@ -97,7 +118,7 @@
|
||||
else:
|
||||
logging.debug("Failed: Unable to cycle with {} for {}".format(options["--action"], e))
|
||||
fail(EC_STATUS)
|
||||
- return True
|
||||
+ return True
|
||||
|
||||
def connect(opt, token):
|
||||
conn = pycurl.Curl()
|
||||
@@ -130,7 +151,10 @@
|
||||
conn = pycurl.Curl()
|
||||
|
||||
# setup correct URL
|
||||
- conn.base_url = "https://iam.cloud.ibm.com/"
|
||||
+ if opt["--api-type"] == "private":
|
||||
+ conn.base_url = "https://private.iam.cloud.ibm.com/"
|
||||
+ else:
|
||||
+ conn.base_url = "https://iam.cloud.ibm.com/"
|
||||
|
||||
if opt["--verbose-level"] > 1:
|
||||
conn.setopt(pycurl.VERBOSE, 1)
|
||||
@@ -265,9 +289,9 @@
|
||||
define_new_opts()
|
||||
|
||||
all_opt["shell_timeout"]["default"] = "500"
|
||||
- all_opt["power_timeout"]["default"] = "30"
|
||||
- all_opt["power_wait"]["default"] = "1"
|
||||
- all_opt["stonith_status_sleep"]["default"] = "2"
|
||||
+ all_opt["power_timeout"]["default"] = "120"
|
||||
+ all_opt["power_wait"]["default"] = "15"
|
||||
+ all_opt["stonith_status_sleep"]["default"] = "10"
|
||||
all_opt["api-type"]["default"] = "private"
|
||||
all_opt["proxy"]["default"] = ""
|
||||
|
||||
@@ -275,8 +299,8 @@
|
||||
|
||||
docs = {}
|
||||
docs["shortdesc"] = "Fence agent for IBM PowerVS"
|
||||
- docs["longdesc"] = """fence_ibm_powervs is a Power Fencing agent which can be \
|
||||
-used with IBM PowerVS to fence virtual machines."""
|
||||
+ docs["longdesc"] = """fence_ibm_powervs is a power fencing agent for \
|
||||
+IBM Power Virtual Server (IBM PowerVS) to fence virtual server instances."""
|
||||
docs["vendorurl"] = "https://www.ibm.com"
|
||||
show_docs(options, docs)
|
||||
|
||||
472
SOURCES/RHEL-76493-fence_azure_arm-use-azure-identity.patch
Normal file
472
SOURCES/RHEL-76493-fence_azure_arm-use-azure-identity.patch
Normal file
@ -0,0 +1,472 @@
|
||||
--- a/lib/azure_fence.py.py 2025-01-30 14:59:48.211366580 +0100
|
||||
+++ b/lib/azure_fence.py.py 2025-01-30 12:06:10.847889534 +0100
|
||||
@@ -1,9 +1,6 @@
|
||||
import logging, re, time
|
||||
from fencing import fail_usage
|
||||
|
||||
-import sys
|
||||
-sys.path.insert(0, '/usr/lib/fence-agents/support/azure')
|
||||
-
|
||||
FENCE_SUBNET_NAME = "fence-subnet"
|
||||
FENCE_INBOUND_RULE_NAME = "FENCE_DENY_ALL_INBOUND"
|
||||
FENCE_INBOUND_RULE_DIRECTION = "Inbound"
|
||||
@@ -17,6 +14,9 @@
|
||||
IP_TYPE_DYNAMIC = "Dynamic"
|
||||
MAX_RETRY = 10
|
||||
RETRY_WAIT = 5
|
||||
+NETWORK_MGMT_CLIENT_API_VERSION = "2021-05-01"
|
||||
+AZURE_RHEL8_COMPUTE_VERSION = "27.2.0"
|
||||
+AZURE_COMPUTE_VERSION_5 = "5.0.0"
|
||||
|
||||
class AzureSubResource:
|
||||
Type = None
|
||||
@@ -52,7 +52,7 @@
|
||||
return None
|
||||
|
||||
def get_azure_resource(id):
|
||||
- match = re.match('(/subscriptions/([^/]*)/resourceGroups/([^/]*))(/providers/([^/]*/[^/]*)/([^/]*))?((/([^/]*)/([^/]*))*)', id)
|
||||
+ match = re.match(r'(/subscriptions/([^/]*)/resourceGroups/([^/]*))(/providers/([^/]*/[^/]*)/([^/]*))?((/([^/]*)/([^/]*))*)', id)
|
||||
if not match:
|
||||
fail_usage("{get_azure_resource} cannot parse resource id %s" % id)
|
||||
|
||||
@@ -89,6 +89,59 @@
|
||||
|
||||
return resource
|
||||
|
||||
+def azure_dep_versions(v):
|
||||
+ return tuple(map(int, (v.split("."))))
|
||||
+
|
||||
+# Do azure API call to list all virtual machines in a resource group
|
||||
+def get_vm_list(compute_client,rgName):
|
||||
+ return compute_client.virtual_machines.list(rgName)
|
||||
+
|
||||
+# Do azue API call to shutdown a virtual machine
|
||||
+def do_vm_power_off(compute_client,rgName,vmName, skipShutdown):
|
||||
+ try:
|
||||
+ # Version is not available in azure-mgmt-compute version 14.0.0 until 27.2.0
|
||||
+ from azure.mgmt.compute import __version__
|
||||
+ except ImportError:
|
||||
+ __version__ = "0.0.0"
|
||||
+
|
||||
+ # use different implementation call based on used version
|
||||
+ if (azure_dep_versions(__version__) == azure_dep_versions(AZURE_COMPUTE_VERSION_5)):
|
||||
+ logging.debug("{do_vm_power_off} azure.mgtm.compute version is to old to use 'begin_power_off' use 'power_off' function")
|
||||
+ compute_client.virtual_machines.power_off(rgName, vmName, skip_shutdown=skipShutdown)
|
||||
+ return
|
||||
+
|
||||
+ compute_client.virtual_machines.begin_power_off(rgName, vmName, skip_shutdown=skipShutdown)
|
||||
+
|
||||
+# Do azure API call to start a virtual machine
|
||||
+def do_vm_start(compute_client,rgName,vmName):
|
||||
+ try:
|
||||
+ # Version is not available in azure-mgmt-compute version 14.0.0 until 27.2.0
|
||||
+ from azure.mgmt.compute import __version__
|
||||
+ except ImportError:
|
||||
+ __version__ = "0.0.0"
|
||||
+
|
||||
+ # use different implementation call based on used version
|
||||
+ if (azure_dep_versions(__version__) == azure_dep_versions(AZURE_COMPUTE_VERSION_5)):
|
||||
+ logging.debug("{do_vm_start} azure.mgtm.compute version is to old to use 'begin_start' use 'start' function")
|
||||
+ compute_client.virtual_machines.start(rgName, vmName)
|
||||
+ return
|
||||
+
|
||||
+ compute_client.virtual_machines.begin_start(rgName, vmName)
|
||||
+
|
||||
+def get_vm_resource(compute_client, rgName, vmName):
|
||||
+ try:
|
||||
+ # Version is not available in azure-mgmt-compute version 14.0.0 until 27.2.0
|
||||
+ from azure.mgmt.compute import __version__
|
||||
+ except ImportError:
|
||||
+ __version__ = "0.0.0"
|
||||
+
|
||||
+ # use different implementation call based on used version
|
||||
+ if (azure_dep_versions(__version__) <= azure_dep_versions(AZURE_RHEL8_COMPUTE_VERSION)):
|
||||
+ return compute_client.virtual_machines.get(rgName, vmName, "instanceView")
|
||||
+
|
||||
+ return compute_client.virtual_machines.get(resource_group_name=rgName, vm_name=vmName,expand="instanceView")
|
||||
+
|
||||
+
|
||||
def get_fence_subnet_for_config(ipConfig, network_client):
|
||||
subnetResource = get_azure_resource(ipConfig.subnet.id)
|
||||
logging.debug("{get_fence_subnet_for_config} testing virtual network %s in resource group %s for a fence subnet" %(subnetResource.ResourceName, subnetResource.ResourceGroupName))
|
||||
@@ -155,7 +208,7 @@
|
||||
result = FENCE_STATE_ON
|
||||
|
||||
try:
|
||||
- vm = compute_client.virtual_machines.get(rgName, vmName, "instanceView")
|
||||
+ vm = get_vm_resource(compute_client, rgName, vmName)
|
||||
|
||||
allNICOK = True
|
||||
for nicRef in vm.network_profile.network_interfaces:
|
||||
@@ -182,7 +235,7 @@
|
||||
import msrestazure.azure_exceptions
|
||||
logging.info("{set_network_state} Setting state %s for %s in resource group %s" % (operation, vmName, rgName))
|
||||
|
||||
- vm = compute_client.virtual_machines.get(rgName, vmName, "instanceView")
|
||||
+ vm = get_vm_resource(compute_client,rgName, vmName)
|
||||
|
||||
operations = []
|
||||
for nicRef in vm.network_profile.network_interfaces:
|
||||
@@ -271,10 +324,72 @@
|
||||
|
||||
return config
|
||||
|
||||
+# Function to fetch endpoints from metadata endpoint for azure_stack
|
||||
+def get_cloud_from_arm_metadata_endpoint(arm_endpoint):
|
||||
+ try:
|
||||
+ import requests
|
||||
+ session = requests.Session()
|
||||
+ metadata_endpoint = arm_endpoint + "/metadata/endpoints?api-version=2015-01-01"
|
||||
+ response = session.get(metadata_endpoint)
|
||||
+ if response.status_code == 200:
|
||||
+ metadata = response.json()
|
||||
+ return {
|
||||
+ "resource_manager": arm_endpoint,
|
||||
+ "credential_scopes": [metadata.get("graphEndpoint") + "/.default"],
|
||||
+ "authority_hosts": metadata['authentication'].get('loginEndpoint').replace("https://","")
|
||||
+ }
|
||||
+ else:
|
||||
+ fail_usage("Failed to get cloud from metadata endpoint: %s - %s" % arm_endpoint, e)
|
||||
+ except Exception as e:
|
||||
+ fail_usage("Failed to get cloud from metadata endpoint: %s - %s" % arm_endpoint, e)
|
||||
+
|
||||
+def get_azure_arm_endpoints(cloudName, authority):
|
||||
+ cloudEnvironment = {
|
||||
+ "authority_hosts": authority
|
||||
+ }
|
||||
+
|
||||
+ if cloudName == "AZURE_CHINA_CLOUD":
|
||||
+ cloudEnvironment["resource_manager"] = "https://management.chinacloudapi.cn/"
|
||||
+ cloudEnvironment["credential_scopes"] = ["https://management.chinacloudapi.cn/.default"]
|
||||
+ return cloudEnvironment
|
||||
+
|
||||
+ if cloudName == "AZURE_US_GOV_CLOUD":
|
||||
+ cloudEnvironment["resource_manager"] = "https://management.usgovcloudapi.net/"
|
||||
+ cloudEnvironment["credential_scopes"] = ["https://management.core.usgovcloudapi.net/.default"]
|
||||
+ return cloudEnvironment
|
||||
+
|
||||
+ if cloudName == "AZURE_PUBLIC_CLOUD":
|
||||
+ cloudEnvironment["resource_manager"] = "https://management.azure.com/"
|
||||
+ cloudEnvironment["credential_scopes"] = ["https://management.azure.com/.default"]
|
||||
+ return cloudEnvironment
|
||||
+
|
||||
+
|
||||
def get_azure_cloud_environment(config):
|
||||
- cloud_environment = None
|
||||
- if config.Cloud:
|
||||
+ if (config.Cloud is None):
|
||||
+ config.Cloud = "public"
|
||||
+
|
||||
+ try:
|
||||
+ from azure.identity import AzureAuthorityHosts
|
||||
+
|
||||
+ azureCloudName = "AZURE_PUBLIC_CLOUD"
|
||||
+ authorityHosts = AzureAuthorityHosts.AZURE_PUBLIC_CLOUD
|
||||
if (config.Cloud.lower() == "china"):
|
||||
+ azureCloudName = "AZURE_CHINA_CLOUD"
|
||||
+ authorityHosts = AzureAuthorityHosts.AZURE_CHINA
|
||||
+ elif (config.Cloud.lower() == "usgov"):
|
||||
+ azureCloudName = "AZURE_US_GOV_CLOUD"
|
||||
+ authorityHosts = AzureAuthorityHosts.AZURE_GOVERNMENT
|
||||
+ elif (config.Cloud.lower() == "stack"):
|
||||
+ # use custom function to call the azuer stack metadata endpoint to get required configuration.
|
||||
+ return get_cloud_from_arm_metadata_endpoint(config.MetadataEndpoint)
|
||||
+
|
||||
+ return get_azure_arm_endpoints(azureCloudName, authorityHosts)
|
||||
+
|
||||
+ except ImportError:
|
||||
+ if (config.Cloud.lower() == "public"):
|
||||
+ from msrestazure.azure_cloud import AZURE_PUBLIC_CLOUD
|
||||
+ cloud_environment = AZURE_PUBLIC_CLOUD
|
||||
+ elif (config.Cloud.lower() == "china"):
|
||||
from msrestazure.azure_cloud import AZURE_CHINA_CLOUD
|
||||
cloud_environment = AZURE_CHINA_CLOUD
|
||||
elif (config.Cloud.lower() == "germany"):
|
||||
@@ -287,61 +402,44 @@
|
||||
from msrestazure.azure_cloud import get_cloud_from_metadata_endpoint
|
||||
cloud_environment = get_cloud_from_metadata_endpoint(config.MetadataEndpoint)
|
||||
|
||||
- return cloud_environment
|
||||
+ authority_hosts = cloud_environment.endpoints.active_directory.replace("http://","")
|
||||
+ return {
|
||||
+ "resource_manager": cloud_environment.endpoints.resource_manager,
|
||||
+ "credential_scopes": [cloud_environment.endpoints.active_directory_resource_id + "/.default"],
|
||||
+ "authority_hosts": authority_hosts,
|
||||
+ "cloud_environment": cloud_environment,
|
||||
+ }
|
||||
|
||||
def get_azure_credentials(config):
|
||||
credentials = None
|
||||
cloud_environment = get_azure_cloud_environment(config)
|
||||
- if config.UseMSI and cloud_environment:
|
||||
- try:
|
||||
- from azure.identity import ManagedIdentityCredential
|
||||
- credentials = ManagedIdentityCredential(cloud_environment=cloud_environment)
|
||||
- except ImportError:
|
||||
- from msrestazure.azure_active_directory import MSIAuthentication
|
||||
- credentials = MSIAuthentication(cloud_environment=cloud_environment)
|
||||
- elif config.UseMSI:
|
||||
+ if config.UseMSI:
|
||||
try:
|
||||
from azure.identity import ManagedIdentityCredential
|
||||
- credentials = ManagedIdentityCredential()
|
||||
+ credentials = ManagedIdentityCredential(authority=cloud_environment["authority_hosts"])
|
||||
except ImportError:
|
||||
from msrestazure.azure_active_directory import MSIAuthentication
|
||||
- credentials = MSIAuthentication()
|
||||
- elif cloud_environment:
|
||||
- try:
|
||||
- # try to use new libraries ClientSecretCredential (azure.identity, based on azure.core)
|
||||
- from azure.identity import ClientSecretCredential
|
||||
- credentials = ClientSecretCredential(
|
||||
- client_id = config.ApplicationId,
|
||||
- client_secret = config.ApplicationKey,
|
||||
- tenant_id = config.Tenantid,
|
||||
- cloud_environment=cloud_environment
|
||||
- )
|
||||
- except ImportError:
|
||||
- # use old libraries ServicePrincipalCredentials (azure.common) if new one is not available
|
||||
- from azure.common.credentials import ServicePrincipalCredentials
|
||||
- credentials = ServicePrincipalCredentials(
|
||||
- client_id = config.ApplicationId,
|
||||
- secret = config.ApplicationKey,
|
||||
- tenant = config.Tenantid,
|
||||
- cloud_environment=cloud_environment
|
||||
- )
|
||||
- else:
|
||||
- try:
|
||||
- # try to use new libraries ClientSecretCredential (azure.identity, based on azure.core)
|
||||
- from azure.identity import ClientSecretCredential
|
||||
- credentials = ClientSecretCredential(
|
||||
- client_id = config.ApplicationId,
|
||||
- client_secret = config.ApplicationKey,
|
||||
- tenant_id = config.Tenantid
|
||||
- )
|
||||
- except ImportError:
|
||||
- # use old libraries ServicePrincipalCredentials (azure.common) if new one is not available
|
||||
- from azure.common.credentials import ServicePrincipalCredentials
|
||||
- credentials = ServicePrincipalCredentials(
|
||||
- client_id = config.ApplicationId,
|
||||
- secret = config.ApplicationKey,
|
||||
- tenant = config.Tenantid
|
||||
- )
|
||||
+ credentials = MSIAuthentication(cloud_environment=cloud_environment["cloud_environment"])
|
||||
+ return credentials
|
||||
+
|
||||
+ try:
|
||||
+ # try to use new libraries ClientSecretCredential (azure.identity, based on azure.core)
|
||||
+ from azure.identity import ClientSecretCredential
|
||||
+ credentials = ClientSecretCredential(
|
||||
+ client_id = config.ApplicationId,
|
||||
+ client_secret = config.ApplicationKey,
|
||||
+ tenant_id = config.Tenantid,
|
||||
+ authority=cloud_environment["authority_hosts"]
|
||||
+ )
|
||||
+ except ImportError:
|
||||
+ # use old libraries ServicePrincipalCredentials (azure.common) if new one is not available
|
||||
+ from azure.common.credentials import ServicePrincipalCredentials
|
||||
+ credentials = ServicePrincipalCredentials(
|
||||
+ client_id = config.ApplicationId,
|
||||
+ secret = config.ApplicationKey,
|
||||
+ tenant = config.Tenantid,
|
||||
+ cloud_environment=cloud_environment["cloud_environment"]
|
||||
+ )
|
||||
|
||||
return credentials
|
||||
|
||||
@@ -351,36 +449,40 @@
|
||||
cloud_environment = get_azure_cloud_environment(config)
|
||||
credentials = get_azure_credentials(config)
|
||||
|
||||
- if cloud_environment:
|
||||
- if (config.Cloud.lower() == "stack") and not config.MetadataEndpoint:
|
||||
- fail_usage("metadata-endpoint not specified")
|
||||
+ # Try to read the default used api version from the installed package.
|
||||
+ try:
|
||||
+ compute_api_version = ComputeManagementClient.LATEST_PROFILE.get_profile_dict()["azure.mgmt.compute.ComputeManagementClient"]["virtual_machines"]
|
||||
+ except Exception as e:
|
||||
+ compute_api_version = ComputeManagementClient.DEFAULT_API_VERSION
|
||||
+ logging.debug("{get_azure_compute_client} Failed to get the latest profile: %s using the default api version %s" % (e, compute_api_version))
|
||||
|
||||
- try:
|
||||
- from azure.profiles import KnownProfiles
|
||||
- if (config.Cloud.lower() == "stack"):
|
||||
- client_profile = KnownProfiles.v2020_09_01_hybrid
|
||||
- credential_scope = cloud_environment.endpoints.active_directory_resource_id + "/.default"
|
||||
- else:
|
||||
- client_profile = KnownProfiles.default
|
||||
- credential_scope = cloud_environment.endpoints.resource_manager + "/.default"
|
||||
- compute_client = ComputeManagementClient(
|
||||
- credentials,
|
||||
- config.SubscriptionId,
|
||||
- base_url=cloud_environment.endpoints.resource_manager,
|
||||
- profile=client_profile,
|
||||
- credential_scopes=[credential_scope],
|
||||
- )
|
||||
- except TypeError:
|
||||
- compute_client = ComputeManagementClient(
|
||||
- credentials,
|
||||
- config.SubscriptionId,
|
||||
- base_url=cloud_environment.endpoints.resource_manager
|
||||
- )
|
||||
- else:
|
||||
+ logging.debug("{get_azure_compute_client} use virtual_machine api version: %s" %(compute_api_version))
|
||||
+
|
||||
+ if (config.Cloud.lower() == "stack") and not config.MetadataEndpoint:
|
||||
+ fail_usage("metadata-endpoint not specified")
|
||||
+
|
||||
+ try:
|
||||
+ from azure.profiles import KnownProfiles
|
||||
+ if (config.Cloud.lower() == "stack"):
|
||||
+ client_profile = KnownProfiles.v2020_09_01_hybrid
|
||||
+ else:
|
||||
+ client_profile = KnownProfiles.default
|
||||
compute_client = ComputeManagementClient(
|
||||
credentials,
|
||||
- config.SubscriptionId
|
||||
+ config.SubscriptionId,
|
||||
+ base_url=cloud_environment["resource_manager"],
|
||||
+ profile=client_profile,
|
||||
+ credential_scopes=cloud_environment["credential_scopes"],
|
||||
+ api_version=compute_api_version
|
||||
)
|
||||
+ except TypeError:
|
||||
+ compute_client = ComputeManagementClient(
|
||||
+ credentials,
|
||||
+ config.SubscriptionId,
|
||||
+ base_url=cloud_environment["resource_manager"],
|
||||
+ api_version=compute_api_version
|
||||
+ )
|
||||
+
|
||||
return compute_client
|
||||
|
||||
def get_azure_network_client(config):
|
||||
@@ -389,34 +491,31 @@
|
||||
cloud_environment = get_azure_cloud_environment(config)
|
||||
credentials = get_azure_credentials(config)
|
||||
|
||||
- if cloud_environment:
|
||||
- if (config.Cloud.lower() == "stack") and not config.MetadataEndpoint:
|
||||
- fail_usage("metadata-endpoint not specified")
|
||||
+ if (config.Cloud.lower() == "stack") and not config.MetadataEndpoint:
|
||||
+ fail_usage("metadata-endpoint not specified")
|
||||
|
||||
- try:
|
||||
- from azure.profiles import KnownProfiles
|
||||
- if (config.Cloud.lower() == "stack"):
|
||||
- client_profile = KnownProfiles.v2020_09_01_hybrid
|
||||
- credential_scope = cloud_environment.endpoints.active_directory_resource_id + "/.default"
|
||||
- else:
|
||||
- client_profile = KnownProfiles.default
|
||||
- credential_scope = cloud_environment.endpoints.resource_manager + "/.default"
|
||||
- network_client = NetworkManagementClient(
|
||||
- credentials,
|
||||
- config.SubscriptionId,
|
||||
- base_url=cloud_environment.endpoints.resource_manager,
|
||||
- profile=client_profile,
|
||||
- credential_scopes=[credential_scope],
|
||||
- )
|
||||
- except TypeError:
|
||||
- network_client = NetworkManagementClient(
|
||||
- credentials,
|
||||
- config.SubscriptionId,
|
||||
- base_url=cloud_environment.endpoints.resource_manager
|
||||
- )
|
||||
+
|
||||
+ from azure.profiles import KnownProfiles
|
||||
+
|
||||
+ if (config.Cloud.lower() == "stack"):
|
||||
+ client_profile = KnownProfiles.v2020_09_01_hybrid
|
||||
else:
|
||||
+ client_profile = KnownProfiles.default
|
||||
+
|
||||
+ try:
|
||||
+ network_client = NetworkManagementClient(
|
||||
+ credentials,
|
||||
+ config.SubscriptionId,
|
||||
+ base_url=cloud_environment["resource_manager"],
|
||||
+ profile=client_profile,
|
||||
+ credential_scopes=cloud_environment["credential_scopes"],
|
||||
+ api_version=NETWORK_MGMT_CLIENT_API_VERSION
|
||||
+ )
|
||||
+ except TypeError:
|
||||
network_client = NetworkManagementClient(
|
||||
credentials,
|
||||
- config.SubscriptionId
|
||||
+ config.SubscriptionId,
|
||||
+ base_url=cloud_environment["resource_manager"],
|
||||
+ api_version=NETWORK_MGMT_CLIENT_API_VERSION
|
||||
)
|
||||
return network_client
|
||||
--- a/agents/azure_arm/fence_azure_arm.py 2025-01-30 14:59:48.218366743 +0100
|
||||
+++ b/agents/azure_arm/fence_azure_arm.py 2025-01-28 13:12:00.758567141 +0100
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import sys, re
|
||||
sys.path.insert(0, '/usr/lib/fence-agents/support/common')
|
||||
+sys.path.insert(1, '/usr/lib/fence-agents/support/azure')
|
||||
try:
|
||||
import pexpect
|
||||
except:
|
||||
@@ -20,7 +21,7 @@
|
||||
if clients:
|
||||
compute_client = clients[0]
|
||||
rgName = options["--resourceGroup"]
|
||||
- vms = compute_client.virtual_machines.list(rgName)
|
||||
+ vms = azure_fence.get_vm_list(compute_client,rgName)
|
||||
try:
|
||||
for vm in vms:
|
||||
result[vm.name] = ("", None)
|
||||
@@ -36,7 +37,7 @@
|
||||
rgName = options["--resourceGroup"]
|
||||
|
||||
try:
|
||||
- vms = compute_client.virtual_machines.list(rgName)
|
||||
+ vms = azure_fence.get_vm_list(compute_client,rgName)
|
||||
except Exception as e:
|
||||
fail_usage("Failed: %s" % e)
|
||||
|
||||
@@ -77,7 +78,7 @@
|
||||
|
||||
powerState = "unknown"
|
||||
try:
|
||||
- vmStatus = compute_client.virtual_machines.get(rgName, vmName, "instanceView")
|
||||
+ vmStatus = azure_fence.get_vm_resource(compute_client, rgName, vmName)
|
||||
except Exception as e:
|
||||
fail_usage("Failed: %s" % e)
|
||||
|
||||
@@ -120,23 +121,10 @@
|
||||
|
||||
if (options["--action"]=="off"):
|
||||
logging.info("Poweroff " + vmName + " in resource group " + rgName)
|
||||
- try:
|
||||
- # try new API version first
|
||||
- compute_client.virtual_machines.begin_power_off(rgName, vmName, skip_shutdown=True)
|
||||
- except AttributeError:
|
||||
- # use older API verson if it fails
|
||||
- logging.debug("Poweroff " + vmName + " did not work via 'virtual_machines.begin_power_off. Trying virtual_machines.power_off'.")
|
||||
- compute_client.virtual_machines.power_off(rgName, vmName, skip_shutdown=True)
|
||||
+ azure_fence.do_vm_power_off(compute_client, rgName, vmName, True)
|
||||
elif (options["--action"]=="on"):
|
||||
logging.info("Starting " + vmName + " in resource group " + rgName)
|
||||
- try:
|
||||
- # try new API version first
|
||||
- compute_client.virtual_machines.begin_start(rgName, vmName)
|
||||
- except AttributeError:
|
||||
- # use older API verson if it fails
|
||||
- logging.debug("Starting " + vmName + " did not work via 'virtual_machines.begin_start. Trying virtual_machines.start'.")
|
||||
- compute_client.virtual_machines.start(rgName, vmName)
|
||||
-
|
||||
+ azure_fence.do_vm_start(compute_client, rgName, vmName)
|
||||
|
||||
def define_new_opts():
|
||||
all_opt["resourceGroup"] = {
|
||||
@@ -256,7 +244,7 @@
|
||||
except ImportError:
|
||||
fail_usage("Azure Resource Manager Python SDK not found or not accessible")
|
||||
except Exception as e:
|
||||
- fail_usage("Failed: %s" % re.sub("^, ", "", str(e)))
|
||||
+ fail_usage("Failed: %s" % re.sub(r"^, ", r"", str(e)))
|
||||
|
||||
if "--network-fencing" in options:
|
||||
# use off-action to quickly return off once network is fenced instead of
|
||||
296
SOURCES/RHEL-92695-1-fence_sbd-improve-error-handling.patch
Normal file
296
SOURCES/RHEL-92695-1-fence_sbd-improve-error-handling.patch
Normal file
@ -0,0 +1,296 @@
|
||||
From ab1de07902d9f380c10405d6ddac3aeb43838c86 Mon Sep 17 00:00:00 2001
|
||||
From: Klaus Wenninger <klaus.wenninger@aon.at>
|
||||
Date: Thu, 28 Jul 2022 15:33:12 +0200
|
||||
Subject: [PATCH] fence_sbd: improve error handling
|
||||
|
||||
basically when using 3 disks be happy with 2 answers
|
||||
but give it 5s at least to collect all answers
|
||||
increase default power-timeout to 30s so that waiting
|
||||
those 5s still allows us to get done sending the
|
||||
reboot
|
||||
|
||||
RHBZ#2033671
|
||||
---
|
||||
agents/sbd/fence_sbd.py | 77 +++++++++++++--------
|
||||
lib/fencing.py.py | 109 ++++++++++++++++++++++++++++++
|
||||
tests/data/metadata/fence_sbd.xml | 2 +-
|
||||
3 files changed, 158 insertions(+), 30 deletions(-)
|
||||
|
||||
diff --git a/agents/sbd/fence_sbd.py b/agents/sbd/fence_sbd.py
|
||||
index 0c876b16e..2b0127d55 100644
|
||||
--- a/agents/sbd/fence_sbd.py
|
||||
+++ b/agents/sbd/fence_sbd.py
|
||||
@@ -5,7 +5,7 @@
|
||||
import os
|
||||
import atexit
|
||||
sys.path.append("@FENCEAGENTSLIBDIR@")
|
||||
-from fencing import fail_usage, run_command, fence_action, all_opt
|
||||
+from fencing import fail_usage, run_commands, fence_action, all_opt
|
||||
from fencing import atexit_handler, check_input, process_input, show_docs
|
||||
from fencing import run_delay
|
||||
import itertools
|
||||
@@ -81,7 +81,7 @@ def check_sbd_device(options, device_path):
|
||||
|
||||
cmd = "%s -d %s dump" % (options["--sbd-path"], device_path)
|
||||
|
||||
- (return_code, out, err) = run_command(options, cmd)
|
||||
+ (return_code, out, err) = run_commands(options, [ cmd ])
|
||||
|
||||
for line in itertools.chain(out.split("\n"), err.split("\n")):
|
||||
if len(line) == 0:
|
||||
@@ -94,21 +94,35 @@ def check_sbd_device(options, device_path):
|
||||
|
||||
return DEVICE_INIT
|
||||
|
||||
+
|
||||
def generate_sbd_command(options, command, arguments=None):
|
||||
"""Generates a sbd command based on given arguments.
|
||||
|
||||
Return Value:
|
||||
- generated sbd command (string)
|
||||
+ generated list of sbd commands (strings) depending
|
||||
+ on command multiple commands with a device each
|
||||
+ or a single command with multiple devices
|
||||
"""
|
||||
- cmd = options["--sbd-path"]
|
||||
+ cmds = []
|
||||
+
|
||||
+ if not command in ["list", "dump"]:
|
||||
+ cmd = options["--sbd-path"]
|
||||
|
||||
- # add "-d" for each sbd device
|
||||
- for device in parse_sbd_devices(options):
|
||||
- cmd += " -d %s" % device
|
||||
+ # add "-d" for each sbd device
|
||||
+ for device in parse_sbd_devices(options):
|
||||
+ cmd += " -d %s" % device
|
||||
|
||||
- cmd += " %s %s" % (command, arguments)
|
||||
+ cmd += " %s %s" % (command, arguments)
|
||||
+ cmds.append(cmd)
|
||||
+
|
||||
+ else:
|
||||
+ for device in parse_sbd_devices(options):
|
||||
+ cmd = options["--sbd-path"]
|
||||
+ cmd += " -d %s" % device
|
||||
+ cmd += " %s %s" % (command, arguments)
|
||||
+ cmds.append(cmd)
|
||||
|
||||
- return cmd
|
||||
+ return cmds
|
||||
|
||||
def send_sbd_message(conn, options, plug, message):
|
||||
"""Sends a message to all sbd devices.
|
||||
@@ -128,7 +142,7 @@ def send_sbd_message(conn, options, plug, message):
|
||||
arguments = "%s %s" % (plug, message)
|
||||
cmd = generate_sbd_command(options, "message", arguments)
|
||||
|
||||
- (return_code, out, err) = run_command(options, cmd)
|
||||
+ (return_code, out, err) = run_commands(options, cmd)
|
||||
|
||||
return (return_code, out, err)
|
||||
|
||||
@@ -147,7 +161,7 @@ def get_msg_timeout(options):
|
||||
|
||||
cmd = generate_sbd_command(options, "dump")
|
||||
|
||||
- (return_code, out, err) = run_command(options, cmd)
|
||||
+ (return_code, out, err) = run_commands(options, cmd)
|
||||
|
||||
for line in itertools.chain(out.split("\n"), err.split("\n")):
|
||||
if len(line) == 0:
|
||||
@@ -288,7 +302,7 @@ def get_node_list(conn, options):
|
||||
|
||||
cmd = generate_sbd_command(options, "list")
|
||||
|
||||
- (return_code, out, err) = run_command(options, cmd)
|
||||
+ (return_code, out, err) = run_commands(options, cmd)
|
||||
|
||||
for line in out.split("\n"):
|
||||
if len(line) == 0:
|
||||
@@ -356,6 +370,7 @@ def main():
|
||||
|
||||
all_opt["method"]["default"] = "cycle"
|
||||
all_opt["method"]["help"] = "-m, --method=[method] Method to fence (onoff|cycle) (Default: cycle)"
|
||||
+ all_opt["power_timeout"]["default"] = "30"
|
||||
|
||||
options = check_input(device_opt, process_input(device_opt))
|
||||
|
||||
@@ -376,23 +391,27 @@ def main():
|
||||
|
||||
# We need to check if the provided sbd_devices exists. We need to do
|
||||
# that for every given device.
|
||||
- for device_path in parse_sbd_devices(options):
|
||||
- logging.debug("check device \"%s\"", device_path)
|
||||
-
|
||||
- return_code = check_sbd_device(options, device_path)
|
||||
- if PATH_NOT_EXISTS == return_code:
|
||||
- logging.error("\"%s\" does not exist", device_path)
|
||||
- elif PATH_NOT_BLOCK == return_code:
|
||||
- logging.error("\"%s\" is not a valid block device", device_path)
|
||||
- elif DEVICE_NOT_INIT == return_code:
|
||||
- logging.error("\"%s\" is not initialized", device_path)
|
||||
- elif DEVICE_INIT != return_code:
|
||||
- logging.error("UNKNOWN error while checking \"%s\"", device_path)
|
||||
-
|
||||
- # If we get any error while checking the device we need to exit at this
|
||||
- # point.
|
||||
- if DEVICE_INIT != return_code:
|
||||
- exit(return_code)
|
||||
+ # Just for the case we are really rebooting / powering off a device
|
||||
+ # (pacemaker as well uses the list command to generate a dynamic list)
|
||||
+ # we leave it to sbd to try and decide if it was successful
|
||||
+ if not options["--action"] in ["reboot", "off", "list"]:
|
||||
+ for device_path in parse_sbd_devices(options):
|
||||
+ logging.debug("check device \"%s\"", device_path)
|
||||
+
|
||||
+ return_code = check_sbd_device(options, device_path)
|
||||
+ if PATH_NOT_EXISTS == return_code:
|
||||
+ logging.error("\"%s\" does not exist", device_path)
|
||||
+ elif PATH_NOT_BLOCK == return_code:
|
||||
+ logging.error("\"%s\" is not a valid block device", device_path)
|
||||
+ elif DEVICE_NOT_INIT == return_code:
|
||||
+ logging.error("\"%s\" is not initialized", device_path)
|
||||
+ elif DEVICE_INIT != return_code:
|
||||
+ logging.error("UNKNOWN error while checking \"%s\"", device_path)
|
||||
+
|
||||
+ # If we get any error while checking the device we need to exit at this
|
||||
+ # point.
|
||||
+ if DEVICE_INIT != return_code:
|
||||
+ exit(return_code)
|
||||
|
||||
# we check against the defined timeouts. If the pacemaker timeout is smaller
|
||||
# then that defined within sbd we should report this.
|
||||
diff --git a/lib/fencing.py.py b/lib/fencing.py.py
|
||||
index b746ede8b..fc3679e33 100644
|
||||
--- a/lib/fencing.py.py
|
||||
+++ b/lib/fencing.py.py
|
||||
@@ -1088,6 +1088,115 @@ def is_executable(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
+def run_commands(options, commands, timeout=None, env=None, log_command=None):
|
||||
+ # inspired by psutils.wait_procs (BSD License)
|
||||
+ def check_gone(proc, timeout):
|
||||
+ try:
|
||||
+ returncode = proc.wait(timeout=timeout)
|
||||
+ except subprocess.TimeoutExpired:
|
||||
+ pass
|
||||
+ else:
|
||||
+ if returncode is not None or not proc.is_running():
|
||||
+ proc.returncode = returncode
|
||||
+ gone.add(proc)
|
||||
+
|
||||
+ if timeout is None and "--power-timeout" in options:
|
||||
+ timeout = options["--power-timeout"]
|
||||
+ if timeout == 0:
|
||||
+ timeout = None
|
||||
+ if timeout is not None:
|
||||
+ timeout = float(timeout)
|
||||
+
|
||||
+ time_start = time.time()
|
||||
+ procs = []
|
||||
+ status = None
|
||||
+ pipe_stdout = ""
|
||||
+ pipe_stderr = ""
|
||||
+
|
||||
+ for command in commands:
|
||||
+ logging.info("Executing: %s\n", log_command or command)
|
||||
+
|
||||
+ try:
|
||||
+ process = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env,
|
||||
+ # decodes newlines and in python3 also converts bytes to str
|
||||
+ universal_newlines=(sys.version_info[0] > 2))
|
||||
+ except OSError:
|
||||
+ fail_usage("Unable to run %s\n" % command)
|
||||
+
|
||||
+ procs.append(process)
|
||||
+
|
||||
+ gone = set()
|
||||
+ alive = set(procs)
|
||||
+
|
||||
+ while True:
|
||||
+ if alive:
|
||||
+ max_timeout = 2.0 / len(alive)
|
||||
+ for proc in alive:
|
||||
+ if timeout is not None:
|
||||
+ if time.time()-time_start >= timeout:
|
||||
+ # quickly go over the rest
|
||||
+ max_timeout = 0
|
||||
+ check_gone(proc, max_timeout)
|
||||
+ alive = alive - gone
|
||||
+
|
||||
+ if not alive:
|
||||
+ break
|
||||
+
|
||||
+ if time.time()-time_start < 5.0:
|
||||
+ # give it at least 5s to get a complete answer
|
||||
+ # afterwards we're OK with a quorate answer
|
||||
+ continue
|
||||
+
|
||||
+ if len(gone) > len(alive):
|
||||
+ good_cnt = 0
|
||||
+ for proc in gone:
|
||||
+ if proc.returncode == 0:
|
||||
+ good_cnt += 1
|
||||
+ # a positive result from more than half is fine
|
||||
+ if good_cnt > len(procs)/2:
|
||||
+ break
|
||||
+
|
||||
+ if timeout is not None:
|
||||
+ if time.time() - time_start >= timeout:
|
||||
+ logging.debug("Stop waiting after %s\n", str(timeout))
|
||||
+ break
|
||||
+
|
||||
+ logging.debug("Done: %d gone, %d alive\n", len(gone), len(alive))
|
||||
+
|
||||
+ for proc in gone:
|
||||
+ if (status != 0):
|
||||
+ status = proc.returncode
|
||||
+ # hand over the best status we have
|
||||
+ # but still collect as much stdout/stderr feedback
|
||||
+ # avoid communicate as we know already process
|
||||
+ # is gone and it seems to block when there
|
||||
+ # are D state children we don't get rid off
|
||||
+ os.set_blocking(proc.stdout.fileno(), False)
|
||||
+ os.set_blocking(proc.stderr.fileno(), False)
|
||||
+ try:
|
||||
+ pipe_stdout += proc.stdout.read()
|
||||
+ except:
|
||||
+ pass
|
||||
+ try:
|
||||
+ pipe_stderr += proc.stderr.read()
|
||||
+ except:
|
||||
+ pass
|
||||
+ proc.stdout.close()
|
||||
+ proc.stderr.close()
|
||||
+
|
||||
+ for proc in alive:
|
||||
+ proc.kill()
|
||||
+
|
||||
+ if status is None:
|
||||
+ fail(EC_TIMED_OUT, stop=(int(options.get("retry", 0)) < 1))
|
||||
+ status = EC_TIMED_OUT
|
||||
+ pipe_stdout = ""
|
||||
+ pipe_stderr = "timed out"
|
||||
+
|
||||
+ logging.debug("%s %s %s\n", str(status), str(pipe_stdout), str(pipe_stderr))
|
||||
+
|
||||
+ return (status, pipe_stdout, pipe_stderr)
|
||||
+
|
||||
def run_command(options, command, timeout=None, env=None, log_command=None):
|
||||
if timeout is None and "--power-timeout" in options:
|
||||
timeout = options["--power-timeout"]
|
||||
diff --git a/tests/data/metadata/fence_sbd.xml b/tests/data/metadata/fence_sbd.xml
|
||||
index 516370c40..7248b864a 100644
|
||||
--- a/tests/data/metadata/fence_sbd.xml
|
||||
+++ b/tests/data/metadata/fence_sbd.xml
|
||||
@@ -87,7 +87,7 @@
|
||||
</parameter>
|
||||
<parameter name="power_timeout" unique="0" required="0">
|
||||
<getopt mixed="--power-timeout=[seconds]" />
|
||||
- <content type="second" default="20" />
|
||||
+ <content type="second" default="30" />
|
||||
<shortdesc lang="en">Test X seconds for status change after ON/OFF</shortdesc>
|
||||
</parameter>
|
||||
<parameter name="power_wait" unique="0" required="0">
|
||||
@ -0,0 +1,160 @@
|
||||
From f73b6b4465de1bf2b2887efd3b9767d3f36abd24 Mon Sep 17 00:00:00 2001
|
||||
From: xin liang <xliang@suse.com>
|
||||
Date: Fri, 26 Jul 2024 10:49:55 +0800
|
||||
Subject: [PATCH 1/3] fence_sbd: if sbd devices are not specified with option,
|
||||
read SBD_DEVICE
|
||||
|
||||
from environment
|
||||
---
|
||||
agents/sbd/fence_sbd.py | 12 ++++++++----
|
||||
1 file changed, 8 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/agents/sbd/fence_sbd.py b/agents/sbd/fence_sbd.py
|
||||
index bf95bb72e..c36220295 100644
|
||||
--- a/agents/sbd/fence_sbd.py
|
||||
+++ b/agents/sbd/fence_sbd.py
|
||||
@@ -342,7 +342,7 @@ def define_new_opts():
|
||||
"longopt" : "devices",
|
||||
"help":"--devices=[device_a,device_b] \
|
||||
Comma separated list of sbd devices",
|
||||
- "required" : "1",
|
||||
+ "required" : "0",
|
||||
"shortdesc" : "SBD Device",
|
||||
"order": 1
|
||||
}
|
||||
@@ -382,10 +382,14 @@ def main():
|
||||
docs["vendorurl"] = ""
|
||||
show_docs(options, docs)
|
||||
|
||||
- # We need to check if --devices is given and not empty.
|
||||
+ # If not specified then read SBD_DEVICE from environment
|
||||
if "--devices" not in options:
|
||||
- fail_usage("No SBD devices specified. \
|
||||
- At least one SBD device is required.")
|
||||
+ dev_list = os.getenv("SBD_DEVICE")
|
||||
+ if dev_list:
|
||||
+ options["--devices"] = ",".join(dev_list.split(";"))
|
||||
+ else:
|
||||
+ fail_usage("No SBD devices specified. \
|
||||
+ At least one SBD device is required.")
|
||||
|
||||
run_delay(options)
|
||||
|
||||
|
||||
From 744d534225b51db26058660b753df2991b9356a0 Mon Sep 17 00:00:00 2001
|
||||
From: xin liang <xliang@suse.com>
|
||||
Date: Fri, 26 Jul 2024 17:45:07 +0800
|
||||
Subject: [PATCH 2/3] fence_sbd: Update fence_sbd.xml
|
||||
|
||||
---
|
||||
tests/data/metadata/fence_sbd.xml | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/tests/data/metadata/fence_sbd.xml b/tests/data/metadata/fence_sbd.xml
|
||||
index 82ded25b9..c2daf0c54 100644
|
||||
--- a/tests/data/metadata/fence_sbd.xml
|
||||
+++ b/tests/data/metadata/fence_sbd.xml
|
||||
@@ -8,7 +8,7 @@
|
||||
<content type="string" default="reboot" />
|
||||
<shortdesc lang="en">Fencing action</shortdesc>
|
||||
</parameter>
|
||||
- <parameter name="devices" unique="0" required="1">
|
||||
+ <parameter name="devices" unique="0" required="0">
|
||||
<getopt mixed="--devices=[device_a,device_b]" />
|
||||
<content type="string" />
|
||||
<shortdesc lang="en">SBD Device</shortdesc>
|
||||
|
||||
From 06457f95a4d89d4b6a856ae14ccbcda4d357bef6 Mon Sep 17 00:00:00 2001
|
||||
From: xin liang <xliang@suse.com>
|
||||
Date: Tue, 10 Dec 2024 10:00:00 +0800
|
||||
Subject: [PATCH 3/3] fence_sbd: Check if the sbd daemon is running before
|
||||
using SBD_DEVICE enviroment variable
|
||||
|
||||
And add @SBDPID_PATH@ for the sbd daemon pid file path
|
||||
---
|
||||
agents/sbd/fence_sbd.py | 31 ++++++++++++++++++++++++++++++-
|
||||
configure.ac | 2 ++
|
||||
make/fencebuild.mk | 1 +
|
||||
3 files changed, 33 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/agents/sbd/fence_sbd.py b/agents/sbd/fence_sbd.py
|
||||
index c36220295..bebc7fae1 100644
|
||||
--- a/agents/sbd/fence_sbd.py
|
||||
+++ b/agents/sbd/fence_sbd.py
|
||||
@@ -14,6 +14,7 @@
|
||||
DEVICE_NOT_INIT = -3
|
||||
PATH_NOT_EXISTS = -1
|
||||
PATH_NOT_BLOCK = -2
|
||||
+SBD_PID_FILE = "@SBDPID_PATH@"
|
||||
|
||||
def is_block_device(filename):
|
||||
"""Checks if a given path is a valid block device
|
||||
@@ -356,6 +357,34 @@ def define_new_opts():
|
||||
"order": 200
|
||||
}
|
||||
|
||||
+
|
||||
+def sbd_daemon_is_running():
|
||||
+ """Check if the sbd daemon is running
|
||||
+ """
|
||||
+ if not os.path.exists(SBD_PID_FILE):
|
||||
+ logging.info("SBD PID file %s does not exist", SBD_PID_FILE)
|
||||
+ return False
|
||||
+
|
||||
+ try:
|
||||
+ with open(SBD_PID_FILE, "r") as pid_file:
|
||||
+ pid = int(pid_file.read().strip())
|
||||
+ except Exception as e:
|
||||
+ logging.error("Failed to read PID file %s: %s", SBD_PID_FILE, e)
|
||||
+ return False
|
||||
+
|
||||
+ try:
|
||||
+ # send signal 0 to check if the process is running
|
||||
+ os.kill(pid, 0)
|
||||
+ except ProcessLookupError:
|
||||
+ logging.info("SBD daemon is not running")
|
||||
+ return False
|
||||
+ except Exception as e:
|
||||
+ logging.error("Failed to send signal 0 to PID %d: %s", pid, e)
|
||||
+ return False
|
||||
+
|
||||
+ return True
|
||||
+
|
||||
+
|
||||
def main():
|
||||
"""Main function
|
||||
"""
|
||||
@@ -385,7 +414,7 @@ def main():
|
||||
# If not specified then read SBD_DEVICE from environment
|
||||
if "--devices" not in options:
|
||||
dev_list = os.getenv("SBD_DEVICE")
|
||||
- if dev_list:
|
||||
+ if dev_list and sbd_daemon_is_running():
|
||||
options["--devices"] = ",".join(dev_list.split(";"))
|
||||
else:
|
||||
fail_usage("No SBD devices specified. \
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 6b7322419..0425a9d21 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -145,6 +145,8 @@ eval FENCETMPDIR="`eval echo ${FENCETMPDIR}`"
|
||||
AC_DEFINE_UNQUOTED(FENCETMPDIR,"$FENCETMPDIR", Where Fence agents keep state files)
|
||||
AC_SUBST(FENCETMPDIR)
|
||||
|
||||
+SBDPID_PATH=${localstatedir}/run/sbd.pid
|
||||
+AC_SUBST(SBDPID_PATH)
|
||||
|
||||
if test "x$AGENTS_LIST" = x; then
|
||||
AC_ERROR([No agents selected])
|
||||
diff --git a/make/fencebuild.mk b/make/fencebuild.mk
|
||||
index 9a3c6d6dd..bc9259190 100644
|
||||
--- a/make/fencebuild.mk
|
||||
+++ b/make/fencebuild.mk
|
||||
@@ -9,6 +9,7 @@ define gen_agent_from_py
|
||||
-e 's#@''SBINDIR@#${sbindir}#g' \
|
||||
-e 's#@''LIBEXECDIR@#${libexecdir}#g' \
|
||||
-e 's#@''FENCETMPDIR@#${FENCETMPDIR}#g' \
|
||||
+ -e 's#@''SBDPID_PATH@#${SBDPID_PATH}#g' \
|
||||
-e 's#@''IPMITOOL_PATH@#${IPMITOOL_PATH}#g' \
|
||||
-e 's#@''OPENSTACK_PATH@#${OPENSTACK_PATH}#g' \
|
||||
-e 's#@''AMTTOOL_PATH@#${AMTTOOL_PATH}#g' \
|
||||
@ -15,23 +15,6 @@
|
||||
|
||||
_run_zic(zonedir, filepaths)
|
||||
|
||||
--- a/awscli/dateutil/zoneinfo/rebuild.py 2023-01-26 16:29:30.000000000 +0100
|
||||
+++ b/awscli/dateutil/zoneinfo/rebuild.py 2023-07-19 10:12:42.277559948 +0200
|
||||
@@ -21,7 +21,12 @@
|
||||
try:
|
||||
with TarFile.open(filename) as tf:
|
||||
for name in zonegroups:
|
||||
- tf.extract(name, tmpdir)
|
||||
+ if hasattr(tarfile, 'data_filter'):
|
||||
+ # Python with CVE-2007-4559 mitigation (PEP 706)
|
||||
+ tf.extract(name, tmpdir, filter='data')
|
||||
+ else:
|
||||
+ # Fallback to a possibly dangerous extraction (before PEP 706)
|
||||
+ tf.extract(name, tmpdir)
|
||||
filepaths = [os.path.join(tmpdir, n) for n in zonegroups]
|
||||
|
||||
_run_zic(zonedir, filepaths)
|
||||
|
||||
--- a/azure/dateutil/zoneinfo/rebuild.py 2023-01-26 16:29:30.000000000 +0100
|
||||
+++ b/azure/dateutil/zoneinfo/rebuild.py 2023-07-19 10:12:42.277559948 +0200
|
||||
@@ -21,7 +21,12 @@
|
||||
2
SOURCES/requirements-ibm.txt
Normal file
2
SOURCES/requirements-ibm.txt
Normal file
@ -0,0 +1,2 @@
|
||||
requests
|
||||
urllib3
|
||||
@ -57,21 +57,18 @@
|
||||
Name: fence-agents
|
||||
Summary: Set of unified programs capable of host isolation ("fencing")
|
||||
Version: 4.10.0
|
||||
Release: 76%{?alphatag:.%{alphatag}}%{?dist}.6
|
||||
Release: 86%{?alphatag:.%{alphatag}}%{?dist}.4
|
||||
License: GPLv2+ and LGPLv2+
|
||||
URL: https://github.com/ClusterLabs/fence-agents
|
||||
Source0: https://fedorahosted.org/releases/f/e/fence-agents/%{name}-%{version}.tar.gz
|
||||
### HA support requirements-*.txt ###
|
||||
Source100: requirements-aliyun.txt
|
||||
Source101: requirements-aws.txt
|
||||
Source102: requirements-azure.txt
|
||||
Source103: requirements-google.txt
|
||||
Source104: requirements-common.txt
|
||||
Source100: requirements-common.txt
|
||||
Source101: requirements-aliyun.txt
|
||||
Source102: requirements-aws.txt
|
||||
Source103: requirements-azure.txt
|
||||
Source104: requirements-google.txt
|
||||
Source105: requirements-ibm.txt
|
||||
### HA support libs/utils ###
|
||||
# awscli 2+ is only available from github (and needs to be renamed from aws-cli... to awscli)
|
||||
Source900: awscli-2.2.15.tar.gz
|
||||
# From awscli's requirements.txt: https://github.com/boto/botocore/zipball/v2#egg=botocore
|
||||
Source901: botocore-2.0.0dev123.zip
|
||||
# update with ./update-ha-support.sh and replace lines below with output
|
||||
### BEGIN ###
|
||||
# aliyun
|
||||
@ -92,108 +89,97 @@ Source2000: aliyun-cli-3.0.198.tar.gz
|
||||
Source2001: aliyun-openapi-meta-5cf98b660.tar.gz
|
||||
## go mod vendor
|
||||
Source2002: aliyun-cli-go-vendor.tar.gz
|
||||
# awscli
|
||||
Source1008: awscrt-0.11.13-cp39-cp39-manylinux2014_x86_64.whl
|
||||
Source1009: colorama-0.4.3-py2.py3-none-any.whl
|
||||
Source1010: cryptography-3.3.2-cp36-abi3-manylinux2010_x86_64.whl
|
||||
Source1011: distro-1.5.0-py2.py3-none-any.whl
|
||||
Source1012: docutils-0.15.2-py3-none-any.whl
|
||||
Source1013: prompt_toolkit-2.0.10-py3-none-any.whl
|
||||
Source1014: ruamel.yaml-0.15.100.tar.gz
|
||||
Source1015: six-1.16.0-py2.py3-none-any.whl
|
||||
Source1016: wcwidth-0.1.9-py2.py3-none-any.whl
|
||||
# aws
|
||||
Source1017: boto3-1.17.102-py2.py3-none-any.whl
|
||||
Source1018: botocore-1.20.102-py2.py3-none-any.whl
|
||||
Source1019: python_dateutil-2.8.1-py2.py3-none-any.whl
|
||||
Source1020: s3transfer-0.4.2-py2.py3-none-any.whl
|
||||
Source1021: urllib3-1.26.18.tar.gz
|
||||
Source1007: boto3-1.17.102-py2.py3-none-any.whl
|
||||
Source1008: botocore-1.20.102-py2.py3-none-any.whl
|
||||
Source1009: python_dateutil-2.8.1-py2.py3-none-any.whl
|
||||
Source1010: s3transfer-0.4.2-py2.py3-none-any.whl
|
||||
Source1011: %{urllib3}-%{urllib3_version}.tar.gz
|
||||
# azure
|
||||
Source1022: adal-1.2.7-py2.py3-none-any.whl
|
||||
Source1023: azure_common-1.1.27-py2.py3-none-any.whl
|
||||
Source1024: azure_core-1.15.0-py2.py3-none-any.whl
|
||||
Source1025: azure_mgmt_compute-21.0.0-py2.py3-none-any.whl
|
||||
Source1026: azure_mgmt_core-1.2.2-py2.py3-none-any.whl
|
||||
Source1027: azure_mgmt_network-19.0.0-py2.py3-none-any.whl
|
||||
Source1028: azure-identity-1.10.0.zip
|
||||
Source1029: chardet-4.0.0-py2.py3-none-any.whl
|
||||
Source1030: idna-2.10-py2.py3-none-any.whl
|
||||
Source1031: isodate-0.6.0-py2.py3-none-any.whl
|
||||
Source1032: msrest-0.6.21-py2.py3-none-any.whl
|
||||
Source1033: msrestazure-0.6.4-py2.py3-none-any.whl
|
||||
Source1034: %{oauthlib}-%{oauthlib_version}.tar.gz
|
||||
Source1035: PyJWT-2.1.0-py3-none-any.whl
|
||||
Source1036: requests-2.25.1-py2.py3-none-any.whl
|
||||
Source1037: requests_oauthlib-1.3.0-py2.py3-none-any.whl
|
||||
Source1038: msal-1.18.0.tar.gz
|
||||
Source1039: msal-extensions-1.0.0.tar.gz
|
||||
Source1040: portalocker-2.5.1.tar.gz
|
||||
Source1012: adal-1.2.7.tar.gz
|
||||
Source1013: azure-common-1.1.28.zip
|
||||
Source1014: azure_core-1.32.0.tar.gz
|
||||
Source1015: azure_mgmt_compute-34.0.0.tar.gz
|
||||
Source1016: azure_mgmt_core-1.5.0.tar.gz
|
||||
Source1017: azure_mgmt_network-28.1.0.tar.gz
|
||||
Source1018: azure_identity-1.19.0.tar.gz
|
||||
Source1019: chardet-4.0.0-py2.py3-none-any.whl
|
||||
Source1020: isodate-0.6.1.tar.gz
|
||||
Source1021: msrest-0.7.1.zip
|
||||
Source1022: msrestazure-0.6.4.post1.tar.gz
|
||||
Source1023: %{oauthlib}-%{oauthlib_version}.tar.gz
|
||||
Source1024: PyJWT-2.1.0-py3-none-any.whl
|
||||
Source1025: requests_oauthlib-1.3.0-py2.py3-none-any.whl
|
||||
Source1026: msal-1.31.1.tar.gz
|
||||
Source1027: msal_extensions-1.2.0.tar.gz
|
||||
Source1028: portalocker-2.5.1.tar.gz
|
||||
Source1029: cryptography-3.3.2-cp36-abi3-manylinux2010_x86_64.whl
|
||||
Source1030: typing_extensions-4.12.2.tar.gz
|
||||
# google
|
||||
Source1041: cachetools-4.2.2-py3-none-any.whl
|
||||
Source1042: chardet-3.0.4-py2.py3-none-any.whl
|
||||
Source1043: google_api_core-1.30.0-py2.py3-none-any.whl
|
||||
Source1044: google_api_python_client-1.12.8-py2.py3-none-any.whl
|
||||
Source1045: googleapis_common_protos-1.53.0-py2.py3-none-any.whl
|
||||
Source1046: google_auth-1.32.0-py2.py3-none-any.whl
|
||||
Source1047: google_auth_httplib2-0.1.0-py2.py3-none-any.whl
|
||||
Source1048: httplib2-0.19.1-py3-none-any.whl
|
||||
Source1049: packaging-20.9-py2.py3-none-any.whl
|
||||
Source1050: protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl
|
||||
Source1051: pyasn1-0.4.8-py2.py3-none-any.whl
|
||||
Source1052: pyasn1_modules-0.2.8-py2.py3-none-any.whl
|
||||
Source1053: pyparsing-2.4.7-py2.py3-none-any.whl
|
||||
Source1054: pyroute2-0.7.12.tar.gz
|
||||
Source1055: pyroute2.core-0.6.13.tar.gz
|
||||
Source1056: pyroute2.ethtool-0.6.13.tar.gz
|
||||
Source1057: pyroute2.ipdb-0.6.13.tar.gz
|
||||
Source1058: pyroute2.ipset-0.6.13.tar.gz
|
||||
Source1059: pyroute2.ndb-0.6.13.tar.gz
|
||||
Source1060: pyroute2.nftables-0.6.13.tar.gz
|
||||
Source1061: pyroute2.nslink-0.6.13.tar.gz
|
||||
Source1062: pytz-2021.1-py2.py3-none-any.whl
|
||||
Source1063: rsa-4.7.2-py3-none-any.whl
|
||||
Source1064: setuptools-71.1.0.tar.gz
|
||||
Source1065: uritemplate-3.0.1-py2.py3-none-any.whl
|
||||
Source1031: cachetools-4.2.2-py3-none-any.whl
|
||||
Source1032: chardet-3.0.4-py2.py3-none-any.whl
|
||||
Source1033: google_api_core-1.30.0-py2.py3-none-any.whl
|
||||
Source1034: google_api_python_client-1.12.8-py2.py3-none-any.whl
|
||||
Source1035: googleapis_common_protos-1.53.0-py2.py3-none-any.whl
|
||||
Source1036: google_auth-1.32.0-py2.py3-none-any.whl
|
||||
Source1037: google_auth_httplib2-0.1.0-py2.py3-none-any.whl
|
||||
Source1038: httplib2-0.19.1-py3-none-any.whl
|
||||
Source1039: packaging-20.9-py2.py3-none-any.whl
|
||||
Source1040: protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl
|
||||
Source1041: pyasn1-0.4.8-py2.py3-none-any.whl
|
||||
Source1042: pyasn1_modules-0.2.8-py2.py3-none-any.whl
|
||||
Source1043: pyparsing-2.4.7-py2.py3-none-any.whl
|
||||
Source1044: pyroute2-0.7.12.tar.gz
|
||||
Source1045: pyroute2.core-0.6.13.tar.gz
|
||||
Source1046: pyroute2.ethtool-0.6.13.tar.gz
|
||||
Source1047: pyroute2.ipdb-0.6.13.tar.gz
|
||||
Source1048: pyroute2.ipset-0.6.13.tar.gz
|
||||
Source1049: pyroute2.ndb-0.6.13.tar.gz
|
||||
Source1050: pyroute2.nftables-0.6.13.tar.gz
|
||||
Source1051: pyroute2.nslink-0.6.13.tar.gz
|
||||
Source1052: pytz-2021.1-py2.py3-none-any.whl
|
||||
Source1053: rsa-4.7.2-py3-none-any.whl
|
||||
Source1054: setuptools-71.1.0.tar.gz
|
||||
Source1055: uritemplate-3.0.1-py2.py3-none-any.whl
|
||||
# common (pexpect / suds)
|
||||
Source1066: pexpect-4.8.0-py2.py3-none-any.whl
|
||||
Source1067: ptyprocess-0.7.0-py2.py3-none-any.whl
|
||||
Source1068: suds_community-0.8.5-py3-none-any.whl
|
||||
Source1056: pexpect-4.8.0-py2.py3-none-any.whl
|
||||
Source1057: ptyprocess-0.7.0-py2.py3-none-any.whl
|
||||
Source1058: suds_community-0.8.5-py3-none-any.whl
|
||||
### END ###
|
||||
# kubevirt
|
||||
## pip download --no-binary :all: openshift "ruamel.yaml.clib>=0.1.2"
|
||||
### BEGIN
|
||||
Source1069: %{openshift}-%{openshift_version}.tar.gz
|
||||
Source1070: %{ruamelyamlclib}-%{ruamelyamlclib_version}.tar.gz
|
||||
Source1071: %{kubernetes}-%{kubernetes_version}.tar.gz
|
||||
Source1072: %{certifi}-%{certifi_version}.tar.gz
|
||||
Source1073: %{googleauth}-%{googleauth_version}.tar.gz
|
||||
Source1074: %{cachetools}-%{cachetools_version}.tar.gz
|
||||
Source1075: %{pyasn1modules}-%{pyasn1modules_version}.tar.gz
|
||||
Source1076: %{pyasn1}-%{pyasn1_version}.tar.gz
|
||||
Source1077: python-%{dateutil}-%{dateutil_version}.tar.gz
|
||||
Source1078: %{pyyaml}-%{pyyaml_version}.tar.gz
|
||||
Source1060: %{openshift}-%{openshift_version}.tar.gz
|
||||
Source1061: %{ruamelyamlclib}-%{ruamelyamlclib_version}.tar.gz
|
||||
Source1062: %{kubernetes}-%{kubernetes_version}.tar.gz
|
||||
Source1063: %{certifi}-%{certifi_version}.tar.gz
|
||||
Source1064: %{googleauth}-%{googleauth_version}.tar.gz
|
||||
Source1065: %{cachetools}-%{cachetools_version}.tar.gz
|
||||
Source1066: %{pyasn1modules}-%{pyasn1modules_version}.tar.gz
|
||||
Source1067: %{pyasn1}-%{pyasn1_version}.tar.gz
|
||||
Source1068: python-%{dateutil}-%{dateutil_version}.tar.gz
|
||||
Source1069: %{pyyaml}-%{pyyaml_version}.tar.gz
|
||||
## rsa is dependency for "pip install",
|
||||
## but gets removed to use cryptography lib instead
|
||||
Source1079: rsa-4.7.2.tar.gz
|
||||
Source1080: %{six}-%{six_version}.tar.gz
|
||||
Source1081: %{websocketclient}-%{websocketclient_version}.tar.gz
|
||||
Source1082: %{jinja2}-%{jinja2_version}.tar.gz
|
||||
Source1083: %{markupsafe}-%{markupsafe_version}.tar.gz
|
||||
Source1084: python-%{stringutils}-%{stringutils_version}.tar.gz
|
||||
Source1085: %{requests}-%{requests_version}.tar.gz
|
||||
Source1086: %{chrstnormalizer}-%{chrstnormalizer_version}.tar.gz
|
||||
Source1087: %{idna}-%{idna_version}.tar.gz
|
||||
Source1088: %{reqstsoauthlib}-%{reqstsoauthlib_version}.tar.gz
|
||||
Source1089: %{ruamelyaml}-%{ruamelyaml_version}.tar.gz
|
||||
Source1070: rsa-4.7.2.tar.gz
|
||||
Source1071: %{six}-%{six_version}.tar.gz
|
||||
Source1072: %{websocketclient}-%{websocketclient_version}.tar.gz
|
||||
Source1073: %{jinja2}-%{jinja2_version}.tar.gz
|
||||
Source1074: %{markupsafe}-%{markupsafe_version}.tar.gz
|
||||
Source1075: python-%{stringutils}-%{stringutils_version}.tar.gz
|
||||
Source1076: %{requests}-%{requests_version}.tar.gz
|
||||
Source1077: %{chrstnormalizer}-%{chrstnormalizer_version}.tar.gz
|
||||
Source1078: %{idna}-%{idna_version}.tar.gz
|
||||
Source1079: %{reqstsoauthlib}-%{reqstsoauthlib_version}.tar.gz
|
||||
Source1080: %{ruamelyaml}-%{ruamelyaml_version}.tar.gz
|
||||
## required for installation
|
||||
Source1090: setuptools_scm-8.1.0.tar.gz
|
||||
Source1091: packaging-21.2-py3-none-any.whl
|
||||
Source1092: poetry-core-1.0.7.tar.gz
|
||||
Source1093: pyparsing-3.0.1.tar.gz
|
||||
Source1094: tomli-2.0.1.tar.gz
|
||||
Source1095: flit_core-3.9.0.tar.gz
|
||||
Source1096: typing_extensions-4.12.2.tar.gz
|
||||
Source1097: wheel-0.37.0-py2.py3-none-any.whl
|
||||
Source1081: setuptools_scm-8.1.0.tar.gz
|
||||
Source1082: packaging-21.2-py3-none-any.whl
|
||||
Source1083: poetry-core-1.0.7.tar.gz
|
||||
Source1084: pyparsing-3.0.1.tar.gz
|
||||
Source1085: tomli-2.0.1.tar.gz
|
||||
Source1086: flit_core-3.9.0.tar.gz
|
||||
Source1087: wheel-0.37.0-py2.py3-none-any.whl
|
||||
### END
|
||||
|
||||
Patch0: ha-cloud-support-aliyun.patch
|
||||
@ -254,14 +240,20 @@ Patch54: RHEL-35263-fence_eps-add-fence_epsr2-for-ePowerSwitch-R2-and-newer.patc
|
||||
Patch55: RHEL-25256-fence_vmware_rest-detect-user-sufficient-rights.patch
|
||||
Patch56: RHEL-43235-fence_aws-1-list-add-instance-name-status.patch
|
||||
Patch57: RHEL-43235-fence_aws-2-log-error-for-unknown-states.patch
|
||||
Patch58: RHEL-59882-fence_scsi-only-preempt-once-for-mpath-devices.patch
|
||||
Patch59: RHEL-83487-fence_ibm_vpc-refresh-bearer-token.patch
|
||||
Patch58: RHEL-59878-fence_scsi-only-preempt-once-for-mpath-devices.patch
|
||||
Patch59: RHEL-56138-fence_mpath-1-support-hex-key-format.patch
|
||||
Patch60: RHEL-56138-fence_mpath-2-fix-unfencing-issue-use-MULTILINE-avoid-duplicates.patch
|
||||
Patch61: RHEL-62206-fence_ibm_powervs-add-private-endpoint-and-token-file-support.patch
|
||||
Patch62: RHEL-76493-fence_azure_arm-use-azure-identity.patch
|
||||
Patch63: RHEL-83488-fence_ibm_vpc-refresh-bearer-token.patch
|
||||
Patch64: RHEL-92695-1-fence_sbd-improve-error-handling.patch
|
||||
Patch65: RHEL-92695-2-fence_sbd-get-devices-from-SBD_DEVICE-if-devices-parameter-isnt-set.patch
|
||||
|
||||
### HA support libs/utils ###
|
||||
# all archs
|
||||
Patch1000: bz2217902-1-kubevirt-fix-bundled-dateutil-CVE-2007-4559.patch
|
||||
# cloud (x86_64 only)
|
||||
Patch2000: bz2217902-2-aws-awscli-azure-fix-bundled-dateutil-CVE-2007-4559.patch
|
||||
Patch2000: bz2217902-2-aws-azure-fix-bundled-dateutil-CVE-2007-4559.patch
|
||||
Patch2001: RHEL-43562-fix-bundled-urllib3-CVE-2024-37891.patch
|
||||
|
||||
%global supportedagents amt_ws apc apc_snmp bladecenter brocade cisco_mds cisco_ucs compute drac5 eaton_snmp emerson eps evacuate hpblade ibmblade ibm_powervs ibm_vpc ifmib ilo ilo_moonshot ilo_mp ilo_ssh intelmodular ipdu ipmilan kdump kubevirt lpar mpath redfish rhevm rsa rsb sbd scsi vmware_rest vmware_soap wti
|
||||
@ -436,6 +428,12 @@ BuildRequires: %{systemd_units}
|
||||
%patch -p1 -P 57
|
||||
%patch -p1 -P 58
|
||||
%patch -p1 -P 59
|
||||
%patch -p1 -P 60
|
||||
%patch -p1 -P 61
|
||||
%patch -p1 -P 62
|
||||
%patch -p1 -P 63
|
||||
%patch -p1 -P 64
|
||||
%patch -p1 -P 65
|
||||
|
||||
# prevent compilation of something that won't get used anyway
|
||||
sed -i.orig 's|FENCE_ZVM=1|FENCE_ZVM=0|' configure.ac
|
||||
@ -466,22 +464,21 @@ popd
|
||||
# support libs
|
||||
%ifarch x86_64
|
||||
LIBS="%{_sourcedir}/requirements-*.txt"
|
||||
echo "awscli" >> %{_sourcedir}/requirements-awscli.txt
|
||||
%endif
|
||||
%ifnarch x86_64
|
||||
%ifarch ppc64le
|
||||
LIBS="%{_sourcedir}/requirements-common.txt %{_sourcedir}/requirements-ibm.txt"
|
||||
%endif
|
||||
%ifnarch x86_64 ppc64le
|
||||
LIBS="%{_sourcedir}/requirements-common.txt"
|
||||
%endif
|
||||
for x in $LIBS; do
|
||||
[ "%{_arch}" = "x86_64" ] && [ "$x" = "%{_sourcedir}/requirements-ibm.txt" ] && continue
|
||||
%{__python3} -m pip install --target support/$(echo $x | sed -E "s/.*requirements-(.*).txt/\1/") --no-index --find-links %{_sourcedir} -r $x
|
||||
done
|
||||
|
||||
# fix incorrect #! detected by CI
|
||||
%ifarch x86_64
|
||||
sed -i -e "/^#\!\/Users/c#\!%{__python3}" support/aws/bin/jp support/awscli/bin/jp
|
||||
%endif
|
||||
|
||||
%ifarch x86_64
|
||||
sed -i -e "/^import awscli.clidriver/isys.path.insert(0, '/usr/lib/%{name}/support/awscli')" support/awscli/bin/aws
|
||||
sed -i -e "/^#\!\/Users/c#\!%{__python3}" support/aws/bin/jp
|
||||
%endif
|
||||
|
||||
# kubevirt
|
||||
@ -617,10 +614,12 @@ This package contains support files including the Python fencing library.
|
||||
%dir %{_usr}/lib/%{name}
|
||||
%{_usr}/lib/%{name}/support/common
|
||||
|
||||
%ifarch x86_64
|
||||
%ifarch x86_64 ppc64le
|
||||
%package -n ha-cloud-support
|
||||
License: GPL-2.0-or-later AND LGPL-2.0-or-later AND LGPL-2.1-or-later AND Apache-2.0 AND MIT AND BSD-2-Clause AND BSD-3-Clause AND MPL-2.0 AND Apache-2.0 AND PSF-2.0 AND Unlicense AND ISC
|
||||
Summary: Support libraries for HA Cloud agents
|
||||
%ifarch x86_64
|
||||
Requires: awscli2
|
||||
# aliyun
|
||||
Provides: bundled(python-aliyun-python-sdk-core) = 2.11.5
|
||||
Provides: bundled(python-aliyun-python-sdk-ecs) = 4.24.7
|
||||
@ -631,17 +630,6 @@ Provides: bundled(python-pycryptodome) = 3.20.0
|
||||
Provides: bundled(python-pycparser) = 2.20
|
||||
Provides: bundled(aliyun-cli) = 3.0.198
|
||||
Provides: bundled(aliyun-openapi-meta) = 5cf98b660
|
||||
# awscli
|
||||
Provides: bundled(awscli) = 2.2.15
|
||||
Provides: bundled(python-awscrt) = 0.11.13
|
||||
Provides: bundled(python-colorama) = 0.4.3
|
||||
Provides: bundled(python-cryptography) = 3.3.2
|
||||
Provides: bundled(python-distro) = 1.5.0
|
||||
Provides: bundled(python-docutils) = 0.15.2
|
||||
Provides: bundled(python-prompt-toolkit) = 2.0.10
|
||||
Provides: bundled(python-ruamel-yaml) = 0.15.100
|
||||
Provides: bundled(python-six) = 1.16.0
|
||||
Provides: bundled(python-wcwidth) = 0.1.9
|
||||
# aws
|
||||
Provides: bundled(python-boto3) = 1.17.102
|
||||
Provides: bundled(python-botocore) = 1.20.102
|
||||
@ -650,21 +638,32 @@ Provides: bundled(python-s3transfer) = 0.4.2
|
||||
Provides: bundled(python-urllib3) = 1.26.18
|
||||
# azure
|
||||
Provides: bundled(python-adal) = 1.2.7
|
||||
Provides: bundled(python-azure-common) = 1.1.27
|
||||
Provides: bundled(python-azure-core) = 1.15.0
|
||||
Provides: bundled(python-azure-mgmt-compute) = 21.0.0
|
||||
Provides: bundled(python-azure-mgmt-core) = 1.2.2
|
||||
Provides: bundled(python-azure-mgmt-network) = 19.0.0
|
||||
Provides: bundled(python-certifi) = %{certifi_version}
|
||||
Provides: bundled(python-azure-common) = 1.1.28
|
||||
Provides: bundled(python-azure-core) = 1.32.0
|
||||
Provides: bundled(python-azure-identity) = 1.19.0
|
||||
Provides: bundled(python-azure-mgmt-compute) = 34.0.0
|
||||
Provides: bundled(python-azure-mgmt-core) = 1.5.0
|
||||
Provides: bundled(python-azure-mgmt-network) = 28.1.0
|
||||
Provides: bundled(python-chardet) = 4.0.0
|
||||
Provides: bundled(python-idna) = 2.10
|
||||
Provides: bundled(python-isodate) = 0.6.0
|
||||
Provides: bundled(python-msrest) = 0.6.21
|
||||
Provides: bundled(python-msrestazure) = 0.6.4
|
||||
Provides: bundled(python-oauthlib) = 3.1.1
|
||||
Provides: bundled(python-cffi) = 1.14.5
|
||||
Provides: bundled(python-%{chrstnormalizer}) = %{chrstnormalizer_version}
|
||||
Provides: bundled(python-cryptography) = 3.3.2
|
||||
Provides: bundled(python-dateutil) = 2.8.1
|
||||
Provides: bundled(python-%{idna}) = %{idna_version}
|
||||
Provides: bundled(python-isodate) = 0.6.1
|
||||
Provides: bundled(python-msal) = 1.31.1
|
||||
Provides: bundled(python-msal-extensions) = 1.2.0
|
||||
Provides: bundled(python-msrest) = 0.7.1
|
||||
Provides: bundled(python-msrestazure) = 0.6.4.post1
|
||||
Provides: bundled(python-%{oauthlib}) = %{oauthlib_version}
|
||||
Provides: bundled(python-portalocker) = 2.5.1
|
||||
Provides: bundled(python-pycparser) = 2.20
|
||||
Provides: bundled(python-PyJWT) = 2.1.0
|
||||
Provides: bundled(python-requests) = 2.25.1
|
||||
Provides: bundled(python-%{requests}) = %{requests_version}
|
||||
Provides: bundled(python-requests-oauthlib) = 1.3.0
|
||||
Provides: bundled(python-%{six}) = %{six_version}
|
||||
Provides: bundled(python-typing-extensions) = 4.12.2
|
||||
Provides: bundled(python-%{urllib3}) = %{urllib3_version}
|
||||
# google
|
||||
Provides: bundled(python-cachetools) = 4.2.2
|
||||
Provides: bundled(python-chardet) = 3.0.4
|
||||
@ -691,6 +690,15 @@ Provides: bundled(python-pytz) = 2021.1
|
||||
Provides: bundled(python-rsa) = 4.7.2
|
||||
Provides: bundled(python3-setuptools) = 71.1.0
|
||||
Provides: bundled(python-uritemplate) = 3.0.1
|
||||
%endif
|
||||
%ifarch ppc64le
|
||||
# ibm
|
||||
Provides: bundled(python3-%{certifi}) = %{certifi_version}
|
||||
Provides: bundled(python3-%{chrstnormalizer}) = %{chrstnormalizer_version}
|
||||
Provides: bundled(python3-%{idna}) = %{idna_version}
|
||||
Provides: bundled(python3-%{requests}) = %{requests_version}
|
||||
Provides: bundled(python3-%{urllib3}) = %{urllib3_version}
|
||||
%endif
|
||||
%description -n ha-cloud-support
|
||||
Support libraries for Fence Agents.
|
||||
%files -n ha-cloud-support
|
||||
@ -1532,27 +1540,55 @@ are located on corosync cluster nodes.
|
||||
%endif
|
||||
|
||||
%changelog
|
||||
* Fri Mar 14 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-76.6
|
||||
* Wed May 21 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-86.4
|
||||
- fence_sbd: improve error handling and get devices from SBD_DEVICE env
|
||||
variable if devices parameter isnt set
|
||||
Resolves: RHEL-92695
|
||||
|
||||
* Fri Mar 14 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-86.3
|
||||
- fence_ibm_vpc: refresh bearer-token if token data is corrupt, and
|
||||
avoid edge-case of writing empty token file
|
||||
Resolves: RHEL-83487
|
||||
Resolves: RHEL-83488
|
||||
|
||||
* Tue Mar 11 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-76.5
|
||||
* Tue Mar 11 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-86
|
||||
- bundled jinja2: fix CVE-2025-27516
|
||||
Resolves: RHEL-82712
|
||||
Resolves: RHEL-82713
|
||||
|
||||
* Thu Jan 9 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-76.4
|
||||
* Fri Jan 31 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-85
|
||||
- fence_azure_arm: use azure-identity instead of msrestazure, which has
|
||||
been deprecated
|
||||
Resolves: RHEL-76493
|
||||
|
||||
* Wed Jan 8 2025 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-83
|
||||
- bundled jinja2: fix CVE-2024-56201 and CVE-2024-56326
|
||||
Resolves: RHEL-72070, RHEL-72063
|
||||
Resolves: RHEL-72074, RHEL-72067
|
||||
|
||||
* Wed Sep 25 2024 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-76.1
|
||||
* Tue Nov 26 2024 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-82
|
||||
- Move fence-agents to AppStream
|
||||
Resolves: RHEL-68841
|
||||
|
||||
* Wed Oct 23 2024 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-81
|
||||
- fence_ibm_powervs: add private endpoint and token file support
|
||||
Resolves: RHEL-62206
|
||||
- ha-cloud-support: bundle libs for the powervs-subnet resource agent
|
||||
Resolves: RHEL-64023
|
||||
|
||||
* Thu Oct 10 2024 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-80
|
||||
- fence_mpath: add support for hex-key format (used in multipath.conf)
|
||||
Resolves: RHEL-56138
|
||||
- fence_scsi/fence_mpath: add support for SPC-4 disks
|
||||
Resolves: RHEL-7629
|
||||
|
||||
* Wed Sep 25 2024 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-77
|
||||
- fence_scsi: preempt clears all devices on the mpath device, so only
|
||||
run it for the first device
|
||||
Resolves: RHEL-59882
|
||||
Resolves: RHEL-59878
|
||||
- ha-cloud-support: remove bundled awscli and use awscli2 package
|
||||
instead
|
||||
Resolves: RHEL-60020
|
||||
|
||||
* Tue Jul 23 2024 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-76
|
||||
- bundled setuptools: fix CVE-2024-6345
|
||||
|
||||
Resolves: RHEL-49658
|
||||
|
||||
* Fri Jun 21 2024 Oyvind Albrigtsen <oalbrigt@redhat.com> - 4.10.0-75
|
||||
|
||||
Loading…
Reference in New Issue
Block a user