import CS keylime-7.12.1-11.el9

This commit is contained in:
eabdullin 2025-09-15 12:09:43 +00:00
parent ea625cf31f
commit eaf4d35265
32 changed files with 4435 additions and 7879 deletions

4
.gitignore vendored
View File

@ -1,2 +1,2 @@
SOURCES/keylime-selinux-1.2.0.tar.gz
SOURCES/v7.3.0.tar.gz
SOURCES/keylime-selinux-42.1.2.tar.gz
SOURCES/v7.12.1.tar.gz

View File

@ -1,2 +1,2 @@
9130beade415b8e3b02aac8d06678f2c45b939fe SOURCES/keylime-selinux-1.2.0.tar.gz
400e2b019060b8a6cc255dbfc14c582121acbee1 SOURCES/v7.3.0.tar.gz
36672155770ce6690e59d97764072f9629af716d SOURCES/keylime-selinux-42.1.2.tar.gz
3db2aa10ee0a005bf5d0a1214cd08e2604da0429 SOURCES/v7.12.1.tar.gz

View File

@ -0,0 +1,628 @@
From f7c32aec9c44a176124d982d942391ed3d50e846 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Tue, 3 Jun 2025 21:23:09 +0100
Subject: [PATCH 1/6] Make keylime compatible with python 3.9
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
keylime/ima/types.py | 33 ++++----
keylime/models/base/basic_model.py | 4 +-
keylime/models/base/basic_model_meta.py | 4 +-
keylime/models/base/field.py | 4 +-
keylime/models/base/persistable_model.py | 4 +-
keylime/models/base/type.py | 4 +-
keylime/models/base/types/base64_bytes.py | 4 +-
keylime/models/base/types/certificate.py | 92 +++++++++++----------
keylime/models/base/types/dictionary.py | 4 +-
keylime/models/base/types/one_of.py | 6 +-
keylime/models/registrar/registrar_agent.py | 31 +++----
keylime/policy/create_runtime_policy.py | 2 +-
keylime/registrar_client.py | 8 +-
keylime/web/base/action_handler.py | 7 +-
keylime/web/base/controller.py | 78 ++++++++---------
tox.ini | 10 +++
16 files changed, 154 insertions(+), 141 deletions(-)
diff --git a/keylime/ima/types.py b/keylime/ima/types.py
index 99f0aa7..a0fffdf 100644
--- a/keylime/ima/types.py
+++ b/keylime/ima/types.py
@@ -6,11 +6,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import Literal, TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired, Required
-else:
- from typing_extensions import NotRequired, Required
-
### Types for tpm_dm.py
RuleAttributeType = Optional[Union[int, str, bool]]
@@ -51,7 +46,7 @@ class Rule(TypedDict):
class Policies(TypedDict):
- version: Required[int]
+ version: int
match_on: MatchKeyType
rules: Dict[str, Rule]
@@ -60,27 +55,27 @@ class Policies(TypedDict):
class RPMetaType(TypedDict):
- version: Required[int]
- generator: NotRequired[int]
- timestamp: NotRequired[str]
+ version: int
+ generator: int
+ timestamp: str
class RPImaType(TypedDict):
- ignored_keyrings: Required[List[str]]
- log_hash_alg: Required[Literal["sha1", "sha256", "sha384", "sha512"]]
+ ignored_keyrings: List[str]
+ log_hash_alg: Literal["sha1", "sha256", "sha384", "sha512"]
dm_policy: Optional[Policies]
RuntimePolicyType = TypedDict(
"RuntimePolicyType",
{
- "meta": Required[RPMetaType],
- "release": NotRequired[int],
- "digests": Required[Dict[str, List[str]]],
- "excludes": Required[List[str]],
- "keyrings": Required[Dict[str, List[str]]],
- "ima": Required[RPImaType],
- "ima-buf": Required[Dict[str, List[str]]],
- "verification-keys": Required[str],
+ "meta": RPMetaType,
+ "release": int,
+ "digests": Dict[str, List[str]],
+ "excludes": List[str],
+ "keyrings": Dict[str, List[str]],
+ "ima": RPImaType,
+ "ima-buf": Dict[str, List[str]],
+ "verification-keys": str,
},
)
diff --git a/keylime/models/base/basic_model.py b/keylime/models/base/basic_model.py
index 68a126e..6f5de83 100644
--- a/keylime/models/base/basic_model.py
+++ b/keylime/models/base/basic_model.py
@@ -407,7 +407,9 @@ class BasicModel(ABC, metaclass=BasicModelMeta):
if max and length > max:
self._add_error(field, msg or f"should be at most {length} {element_type}(s)")
- def validate_number(self, field: str, *expressions: tuple[str, int | float], msg: Optional[str] = None) -> None:
+ def validate_number(
+ self, field: str, *expressions: tuple[str, Union[int, float]], msg: Optional[str] = None
+ ) -> None:
value = self.values.get(field)
if not value:
diff --git a/keylime/models/base/basic_model_meta.py b/keylime/models/base/basic_model_meta.py
index 353e004..84617d4 100644
--- a/keylime/models/base/basic_model_meta.py
+++ b/keylime/models/base/basic_model_meta.py
@@ -1,6 +1,6 @@
from abc import ABCMeta
from types import MappingProxyType
-from typing import Any, Callable, Mapping, TypeAlias, Union
+from typing import Any, Callable, Mapping, Union
from sqlalchemy.types import TypeEngine
@@ -40,7 +40,7 @@ class BasicModelMeta(ABCMeta):
# pylint: disable=bad-staticmethod-argument, no-value-for-parameter, using-constant-test
- DeclaredFieldType: TypeAlias = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
+ DeclaredFieldType = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
@classmethod
def _is_model_class(mcs, cls: type) -> bool: # type: ignore[reportSelfClassParameterName]
diff --git a/keylime/models/base/field.py b/keylime/models/base/field.py
index 7fb3dcb..d1e3bc3 100644
--- a/keylime/models/base/field.py
+++ b/keylime/models/base/field.py
@@ -1,6 +1,6 @@
import re
from inspect import isclass
-from typing import TYPE_CHECKING, Any, Optional, TypeAlias, Union
+from typing import TYPE_CHECKING, Any, Optional, Union
from sqlalchemy.types import TypeEngine
@@ -23,7 +23,7 @@ class ModelField:
[2] https://docs.python.org/3/library/functions.html#property
"""
- DeclaredFieldType: TypeAlias = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
+ DeclaredFieldType = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
FIELD_NAME_REGEX = re.compile(r"^[A-Za-z_]+[A-Za-z0-9_]*$")
diff --git a/keylime/models/base/persistable_model.py b/keylime/models/base/persistable_model.py
index 18f7d0d..015d661 100644
--- a/keylime/models/base/persistable_model.py
+++ b/keylime/models/base/persistable_model.py
@@ -1,4 +1,4 @@
-from typing import Any, Mapping, Optional, Sequence
+from typing import Any, Mapping, Optional, Sequence, Union
from keylime.models.base.basic_model import BasicModel
from keylime.models.base.db import db_manager
@@ -165,7 +165,7 @@ class PersistableModel(BasicModel, metaclass=PersistableModelMeta):
else:
return None
- def __init__(self, data: Optional[dict | object] = None, process_associations: bool = True) -> None:
+ def __init__(self, data: Optional[Union[dict, object]] = None, process_associations: bool = True) -> None:
if isinstance(data, type(self).db_mapping):
super().__init__({}, process_associations)
self._init_from_mapping(data, process_associations)
diff --git a/keylime/models/base/type.py b/keylime/models/base/type.py
index 2520f72..e4d924c 100644
--- a/keylime/models/base/type.py
+++ b/keylime/models/base/type.py
@@ -1,7 +1,7 @@
from decimal import Decimal
from inspect import isclass
from numbers import Real
-from typing import Any, TypeAlias, Union
+from typing import Any, Union
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.types import TypeEngine
@@ -99,7 +99,7 @@ class ModelType:
you should instead set ``_type_engine`` to ``None`` and override the ``get_db_type`` method.
"""
- DeclaredTypeEngine: TypeAlias = Union[TypeEngine, type[TypeEngine]]
+ DeclaredTypeEngine = Union[TypeEngine, type[TypeEngine]]
def __init__(self, type_engine: DeclaredTypeEngine) -> None:
if isclass(type_engine) and issubclass(type_engine, TypeEngine):
diff --git a/keylime/models/base/types/base64_bytes.py b/keylime/models/base/types/base64_bytes.py
index b9b4b13..a1eeced 100644
--- a/keylime/models/base/types/base64_bytes.py
+++ b/keylime/models/base/types/base64_bytes.py
@@ -1,6 +1,6 @@
import base64
import binascii
-from typing import Optional, TypeAlias, Union
+from typing import Optional, Union
from sqlalchemy.types import Text
@@ -62,7 +62,7 @@ class Base64Bytes(ModelType):
b64_str = Base64Bytes().cast("MIIE...")
"""
- IncomingValue: TypeAlias = Union[bytes, str, None]
+ IncomingValue = Union[bytes, str, None]
def __init__(self) -> None:
super().__init__(Text)
diff --git a/keylime/models/base/types/certificate.py b/keylime/models/base/types/certificate.py
index 2c27603..0f03169 100644
--- a/keylime/models/base/types/certificate.py
+++ b/keylime/models/base/types/certificate.py
@@ -1,7 +1,7 @@
import base64
import binascii
import io
-from typing import Optional, TypeAlias, Union
+from typing import Optional, Union
import cryptography.x509
from cryptography.hazmat.primitives.serialization import Encoding
@@ -78,7 +78,7 @@ class Certificate(ModelType):
cert = Certificate().cast("-----BEGIN CERTIFICATE-----\nMIIE...")
"""
- IncomingValue: TypeAlias = Union[cryptography.x509.Certificate, bytes, str, None]
+ IncomingValue = Union[cryptography.x509.Certificate, bytes, str, None]
def __init__(self) -> None:
super().__init__(Text)
@@ -195,18 +195,19 @@ class Certificate(ModelType):
"""
try:
- match self.infer_encoding(value):
- case "decoded":
- return None
- case "der":
- cryptography.x509.load_der_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
- case "pem":
- cryptography.x509.load_pem_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
- case "base64":
- der_value = base64.b64decode(value, validate=True) # type: ignore[reportArgumentType, arg-type]
- cryptography.x509.load_der_x509_certificate(der_value)
- case _:
- raise Exception
+ encoding_inf = self.infer_encoding(value)
+ if encoding_inf == "decoded":
+ return None
+
+ if encoding_inf == "der":
+ cryptography.x509.load_der_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
+ elif encoding_inf == "pem":
+ cryptography.x509.load_pem_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
+ elif encoding_inf == "base64":
+ der_value = base64.b64decode(value, validate=True) # type: ignore[reportArgumentType, arg-type]
+ cryptography.x509.load_der_x509_certificate(der_value)
+ else:
+ raise Exception
except Exception:
return False
@@ -227,37 +228,38 @@ class Certificate(ModelType):
if not value:
return None
- match self.infer_encoding(value):
- case "decoded":
- return value # type: ignore[reportReturnType, return-value]
- case "der":
- try:
- return self._load_der_cert(value) # type: ignore[reportArgumentType, arg-type]
- except PyAsn1Error as err:
- raise ValueError(
- f"value cast to certificate appears DER encoded but cannot be deserialized as such: {value!r}"
- ) from err
- case "pem":
- try:
- return self._load_pem_cert(value) # type: ignore[reportArgumentType, arg-type]
- except PyAsn1Error as err:
- raise ValueError(
- f"value cast to certificate appears PEM encoded but cannot be deserialized as such: "
- f"'{str(value)}'"
- ) from err
- case "base64":
- try:
- return self._load_der_cert(base64.b64decode(value, validate=True)) # type: ignore[reportArgumentType, arg-type]
- except (binascii.Error, PyAsn1Error) as err:
- raise ValueError(
- f"value cast to certificate appears Base64 encoded but cannot be deserialized as such: "
- f"'{str(value)}'"
- ) from err
- case _:
- raise TypeError(
- f"value cast to certificate is of type '{value.__class__.__name__}' but should be one of 'str', "
- f"'bytes' or 'cryptography.x509.Certificate': '{str(value)}'"
- )
+ encoding_inf = self.infer_encoding(value)
+ if encoding_inf == "decoded":
+ return value # type: ignore[reportReturnType, return-value]
+
+ if encoding_inf == "der":
+ try:
+ return self._load_der_cert(value) # type: ignore[reportArgumentType, arg-type]
+ except PyAsn1Error as err:
+ raise ValueError(
+ f"value cast to certificate appears DER encoded but cannot be deserialized as such: {value!r}"
+ ) from err
+ elif encoding_inf == "pem":
+ try:
+ return self._load_pem_cert(value) # type: ignore[reportArgumentType, arg-type]
+ except PyAsn1Error as err:
+ raise ValueError(
+ f"value cast to certificate appears PEM encoded but cannot be deserialized as such: "
+ f"'{str(value)}'"
+ ) from err
+ elif encoding_inf == "base64":
+ try:
+ return self._load_der_cert(base64.b64decode(value, validate=True)) # type: ignore[reportArgumentType, arg-type]
+ except (binascii.Error, PyAsn1Error) as err:
+ raise ValueError(
+ f"value cast to certificate appears Base64 encoded but cannot be deserialized as such: "
+ f"'{str(value)}'"
+ ) from err
+ else:
+ raise TypeError(
+ f"value cast to certificate is of type '{value.__class__.__name__}' but should be one of 'str', "
+ f"'bytes' or 'cryptography.x509.Certificate': '{str(value)}'"
+ )
def generate_error_msg(self, _value: IncomingValue) -> str:
return "must be a valid X.509 certificate in PEM format or otherwise encoded using Base64"
diff --git a/keylime/models/base/types/dictionary.py b/keylime/models/base/types/dictionary.py
index 7d9e811..d9ffec3 100644
--- a/keylime/models/base/types/dictionary.py
+++ b/keylime/models/base/types/dictionary.py
@@ -1,5 +1,5 @@
import json
-from typing import Optional, TypeAlias, Union
+from typing import Optional, Union
from sqlalchemy.types import Text
@@ -50,7 +50,7 @@ class Dictionary(ModelType):
kv_pairs = Dictionary().cast('{"key": "value"}')
"""
- IncomingValue: TypeAlias = Union[dict, str, None]
+ IncomingValue = Union[dict, str, None]
def __init__(self) -> None:
super().__init__(Text)
diff --git a/keylime/models/base/types/one_of.py b/keylime/models/base/types/one_of.py
index 479d417..faf097d 100644
--- a/keylime/models/base/types/one_of.py
+++ b/keylime/models/base/types/one_of.py
@@ -1,6 +1,6 @@
from collections import Counter
from inspect import isclass
-from typing import Any, Optional, TypeAlias, Union
+from typing import Any, Optional, Union
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.types import Float, Integer, String, TypeEngine
@@ -65,8 +65,8 @@ class OneOf(ModelType):
incoming PEM value would not be cast to a certificate object and remain a string.
"""
- Declaration: TypeAlias = Union[str, int, float, ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
- PermittedList: TypeAlias = list[Union[str, int, float, ModelType]]
+ Declaration = Union[str, int, float, ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
+ PermittedList = list[Union[str, int, float, ModelType]]
def __init__(self, *args: Declaration) -> None:
# pylint: disable=super-init-not-called
diff --git a/keylime/models/registrar/registrar_agent.py b/keylime/models/registrar/registrar_agent.py
index 560c188..b232049 100644
--- a/keylime/models/registrar/registrar_agent.py
+++ b/keylime/models/registrar/registrar_agent.py
@@ -153,21 +153,22 @@ class RegistrarAgent(PersistableModel):
names = ", ".join(non_compliant_certs)
names = " and".join(names.rsplit(",", 1))
- match config.get("registrar", "malformed_cert_action"):
- case "ignore":
- return
- case "reject":
- logger.error(
- "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were rejected due to "
- "config ('malformed_cert_action = reject')",
- names,
- )
- case _:
- logger.warning(
- "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were re-encoded before "
- "parsing by python-cryptography",
- names,
- )
+ cfg = config.get("registrar", "malformed_cert_action")
+ if cfg == "ignore":
+ return
+
+ if cfg == "reject":
+ logger.error(
+ "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were rejected due to "
+ "config ('malformed_cert_action = reject')",
+ names,
+ )
+ else:
+ logger.warning(
+ "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were re-encoded before "
+ "parsing by python-cryptography",
+ names,
+ )
def _bind_ak_to_iak(self, iak_attest, iak_sign):
# The ak-iak binding should only be verified when either aik_tpm or iak_tpm is changed
diff --git a/keylime/policy/create_runtime_policy.py b/keylime/policy/create_runtime_policy.py
index 6a412c4..8e1c687 100644
--- a/keylime/policy/create_runtime_policy.py
+++ b/keylime/policy/create_runtime_policy.py
@@ -972,7 +972,7 @@ def create_runtime_policy(args: argparse.Namespace) -> Optional[RuntimePolicyTyp
)
abort = True
else:
- if a not in algorithms.Hash:
+ if a not in set(algorithms.Hash):
if a == SHA256_OR_SM3:
algo = a
else:
diff --git a/keylime/registrar_client.py b/keylime/registrar_client.py
index 705ff12..97fbc2a 100644
--- a/keylime/registrar_client.py
+++ b/keylime/registrar_client.py
@@ -13,12 +13,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired
-else:
- from typing_extensions import NotRequired
-
-
class RegistrarData(TypedDict):
ip: Optional[str]
port: Optional[str]
@@ -27,7 +21,7 @@ class RegistrarData(TypedDict):
aik_tpm: str
ek_tpm: str
ekcert: Optional[str]
- provider_keys: NotRequired[Dict[str, str]]
+ provider_keys: Dict[str, str]
logger = keylime_logging.init_logging("registrar_client")
diff --git a/keylime/web/base/action_handler.py b/keylime/web/base/action_handler.py
index b20de89..e7b5888 100644
--- a/keylime/web/base/action_handler.py
+++ b/keylime/web/base/action_handler.py
@@ -1,4 +1,5 @@
import re
+import sys
import time
import traceback
from inspect import iscoroutinefunction
@@ -48,7 +49,11 @@ class ActionHandler(RequestHandler):
# Take the list of strings returned by format_exception, where each string ends in a newline and may contain
# internal newlines, and split the concatenation of all the strings by newline
- message = "".join(traceback.format_exception(err))
+ if sys.version_info < (3, 10):
+ message = "".join(traceback.format_exception(err, None, None))
+ else:
+ message = "".join(traceback.format_exception(err))
+
lines = message.split("\n")
for line in lines:
diff --git a/keylime/web/base/controller.py b/keylime/web/base/controller.py
index f1ac3c5..153535e 100644
--- a/keylime/web/base/controller.py
+++ b/keylime/web/base/controller.py
@@ -2,7 +2,7 @@ import http.client
import json
import re
from types import MappingProxyType
-from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence, TypeAlias, Union
+from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence, Union
from tornado.escape import parse_qs_bytes
from tornado.httputil import parse_body_arguments
@@ -15,14 +15,16 @@ if TYPE_CHECKING:
from keylime.models.base.basic_model import BasicModel
from keylime.web.base.action_handler import ActionHandler
-PathParams: TypeAlias = Mapping[str, str]
-QueryParams: TypeAlias = Mapping[str, str | Sequence[str]]
-MultipartParams: TypeAlias = Mapping[str, Union[str, bytes, Sequence[str | bytes]]]
-FormParams: TypeAlias = Union[QueryParams, MultipartParams]
-JSONConvertible: TypeAlias = Union[str, int, float, bool, None, "JSONObjectConvertible", "JSONArrayConvertible"]
-JSONObjectConvertible: TypeAlias = Mapping[str, JSONConvertible]
-JSONArrayConvertible: TypeAlias = Sequence[JSONConvertible] # pyright: ignore[reportInvalidTypeForm]
-Params: TypeAlias = Mapping[str, Union[str, bytes, Sequence[str | bytes], JSONObjectConvertible, JSONArrayConvertible]]
+PathParams = Mapping[str, str]
+QueryParams = Mapping[str, Union[str, Sequence[str]]]
+MultipartParams = Mapping[str, Union[str, bytes, Union[Sequence[str], Sequence[bytes]]]]
+FormParams = Union[QueryParams, MultipartParams]
+JSONConvertible = Union[str, int, float, bool, None, "JSONObjectConvertible", "JSONArrayConvertible"]
+JSONObjectConvertible = Mapping[str, JSONConvertible]
+JSONArrayConvertible = Sequence[JSONConvertible] # pyright: ignore[reportInvalidTypeForm]
+Params = Mapping[
+ str, Union[str, bytes, Union[Sequence[str], Sequence[bytes]], JSONObjectConvertible, JSONArrayConvertible]
+]
class Controller:
@@ -77,7 +79,7 @@ class Controller:
VERSION_REGEX = re.compile("^\\/v(\\d+)(?:\\.(\\d+))*")
@staticmethod
- def decode_url_query(query: str | bytes) -> QueryParams:
+ def decode_url_query(query: Union[str, bytes]) -> QueryParams:
"""Parses a binary query string (whether from a URL or HTTP body) into a dict of Unicode strings. If multiple
instances of the same key are present in the string, their values are collected into a list.
@@ -135,8 +137,8 @@ class Controller:
@staticmethod
def prepare_http_body(
- body: Union[str, JSONObjectConvertible | JSONArrayConvertible, Any], content_type: Optional[str] = None
- ) -> tuple[Optional[bytes | Any], Optional[str]]:
+ body: Union[str, Union[JSONObjectConvertible, JSONArrayConvertible], Any], content_type: Optional[str] = None
+ ) -> tuple[Optional[Union[bytes, Any]], Optional[str]]:
"""Prepares an object to be included in the body of an HTTP request or response and infers the appropriate
media type unless provided. ``body`` will be serialised into JSON if it contains a ``dict`` or ``list`` which is
serialisable unless a ``content_type`` other than ``"application/json"`` is provided.
@@ -155,32 +157,34 @@ class Controller:
if content_type:
content_type = content_type.lower().strip()
- body_out: Optional[bytes | Any]
- content_type_out: Optional[str]
-
- match (body, content_type):
- case (None, _):
- body_out = None
- content_type_out = content_type
- case ("", _):
- body_out = b""
- content_type_out = "text/plain; charset=utf-8"
- case (_, "text/plain"):
+ body_out: Optional[bytes | Any] = None
+ content_type_out: Optional[str] = None
+
+ if body is None:
+ body_out = None
+ content_type_out = content_type
+ elif body == "":
+ body_out = b""
+ content_type_out = "text/plain; charset=utf-8"
+ else:
+ if content_type == "text/plain":
body_out = str(body).encode("utf-8")
content_type_out = "text/plain; charset=utf-8"
- case (_, "application/json") if isinstance(body, str):
- body_out = body.encode("utf-8")
- content_type_out = "application/json"
- case (_, "application/json"):
- body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
- content_type_out = "application/json"
- case (_, None) if isinstance(body, str):
- body_out = body.encode("utf-8")
- content_type_out = "text/plain; charset=utf-8"
- case (_, None) if isinstance(body, (dict, list)):
- body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
- content_type_out = "application/json"
- case (_, _):
+ elif content_type == "application/json":
+ if isinstance(body, str):
+ body_out = body.encode("utf-8")
+ content_type_out = "application/json"
+ else:
+ body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
+ content_type_out = "application/json"
+ elif content_type is None:
+ if isinstance(body, str):
+ body_out = body.encode("utf-8")
+ content_type_out = "text/plain; charset=utf-8"
+ elif isinstance(body, (dict, list)):
+ body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
+ content_type_out = "application/json"
+ else:
body_out = body
content_type_out = content_type
@@ -248,7 +252,7 @@ class Controller:
self,
code: int = 200,
status: Optional[str] = None,
- data: Optional[JSONObjectConvertible | JSONArrayConvertible] = None,
+ data: Optional[Union[JSONObjectConvertible, JSONArrayConvertible]] = None,
) -> None:
"""Converts a Python data structure to JSON and wraps it in the following boilerplate JSON object which is
returned by all v2 endpoints:
diff --git a/tox.ini b/tox.ini
index 031ac54..ce3974c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -51,3 +51,13 @@ commands = black --diff ./keylime ./test
deps =
isort
commands = isort --diff --check ./keylime ./test
+
+
+[testenv:pylint39]
+basepython = python3.9
+deps =
+ -r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+ pylint
+commands = bash scripts/check_codestyle.sh
+allowlist_externals = bash
--
2.47.1

View File

@ -1,104 +0,0 @@
Subject: [PATCH] Remove usage of Required/NotRequired typing_ext
Since we do not yet have typing_extensions packaged, let us not
use its functionality yet.
---
keylime/ima/types.py | 33 ++++++++++++++-------------------
keylime/registrar_client.py | 8 +-------
2 files changed, 15 insertions(+), 26 deletions(-)
diff --git a/keylime/ima/types.py b/keylime/ima/types.py
index 99f0aa7..a0fffdf 100644
--- a/keylime/ima/types.py
+++ b/keylime/ima/types.py
@@ -6,11 +6,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import Literal, TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired, Required
-else:
- from typing_extensions import NotRequired, Required
-
### Types for tpm_dm.py
RuleAttributeType = Optional[Union[int, str, bool]]
@@ -51,7 +46,7 @@ class Rule(TypedDict):
class Policies(TypedDict):
- version: Required[int]
+ version: int
match_on: MatchKeyType
rules: Dict[str, Rule]
@@ -60,27 +55,27 @@ class Policies(TypedDict):
class RPMetaType(TypedDict):
- version: Required[int]
- generator: NotRequired[int]
- timestamp: NotRequired[str]
+ version: int
+ generator: int
+ timestamp: str
class RPImaType(TypedDict):
- ignored_keyrings: Required[List[str]]
- log_hash_alg: Required[Literal["sha1", "sha256", "sha384", "sha512"]]
+ ignored_keyrings: List[str]
+ log_hash_alg: Literal["sha1", "sha256", "sha384", "sha512"]
dm_policy: Optional[Policies]
RuntimePolicyType = TypedDict(
"RuntimePolicyType",
{
- "meta": Required[RPMetaType],
- "release": NotRequired[int],
- "digests": Required[Dict[str, List[str]]],
- "excludes": Required[List[str]],
- "keyrings": Required[Dict[str, List[str]]],
- "ima": Required[RPImaType],
- "ima-buf": Required[Dict[str, List[str]]],
- "verification-keys": Required[str],
+ "meta": RPMetaType,
+ "release": int,
+ "digests": Dict[str, List[str]],
+ "excludes": List[str],
+ "keyrings": Dict[str, List[str]],
+ "ima": RPImaType,
+ "ima-buf": Dict[str, List[str]],
+ "verification-keys": str,
},
)
diff --git a/keylime/registrar_client.py b/keylime/registrar_client.py
index ab28977..ea5341b 100644
--- a/keylime/registrar_client.py
+++ b/keylime/registrar_client.py
@@ -13,12 +13,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired
-else:
- from typing_extensions import NotRequired
-
-
class RegistrarData(TypedDict):
ip: Optional[str]
port: Optional[str]
@@ -27,7 +21,7 @@ class RegistrarData(TypedDict):
aik_tpm: str
ek_tpm: str
ekcert: Optional[str]
- provider_keys: NotRequired[Dict[str, str]]
+ provider_keys: Dict[str, str]
logger = keylime_logging.init_logging("registrar_client")
--
2.41.0

View File

@ -1,27 +0,0 @@
From e8a1fa55ff0892ee2380e832ac94abc629b401d6 Mon Sep 17 00:00:00 2001
From: Patrik Koncity <pkoncity@redhat.com>
Date: Thu, 10 Aug 2023 07:47:04 -0400
Subject: [PATCH 2/2] Allow keylime_server_t tcp connect to several domains
---
keylime-selinux-1.2.0/keylime.te | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/keylime-selinux-1.2.0/keylime.te b/keylime-selinux-1.2.0/keylime.te
index 8d47d26..8e6487b 100644
--- a/keylime-selinux-1.2.0/keylime.te
+++ b/keylime-selinux-1.2.0/keylime.te
@@ -83,6 +83,10 @@ allow keylime_server_t self:udp_socket create_stream_socket_perms;
manage_dirs_pattern(keylime_server_t, keylime_log_t, keylime_log_t)
manage_files_pattern(keylime_server_t, keylime_log_t, keylime_log_t)
+corenet_tcp_connect_http_cache_port(keylime_server_t)
+corenet_tcp_connect_mysqld_port(keylime_server_t)
+corenet_tcp_connect_postgresql_port(keylime_server_t)
+
fs_getattr_all_fs(keylime_server_t)
fs_rw_inherited_tmpfs_files(keylime_server_t)
--
2.39.3

View File

@ -0,0 +1,58 @@
From 5c5c7f7f7180111485b24061af4c0395476958b5 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Thu, 22 May 2025 11:25:15 -0400
Subject: [PATCH 2/6] tests: fix rpm repo tests from create-runtime-policy
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
.../create-runtime-policy/setup-rpm-tests | 28 +++++++++++++------
1 file changed, 20 insertions(+), 8 deletions(-)
diff --git a/test/data/create-runtime-policy/setup-rpm-tests b/test/data/create-runtime-policy/setup-rpm-tests
index 708438c..b62729b 100755
--- a/test/data/create-runtime-policy/setup-rpm-tests
+++ b/test/data/create-runtime-policy/setup-rpm-tests
@@ -217,20 +217,32 @@ create_rpm() {
# https://github.com/rpm-software-management/rpm/commit/96467dce18f264b278e17ffe1859c88d9b5aa4b6
_pkgname="DUMMY-${_name}-${_version}-${_rel}.noarch.rpm"
- _expected_pkg="${RPMSDIR}/noarch/${_pkgname}"
- [ -e "${_expected_pkg}" ] && return 0
+ # For some reason, it may not store the built package within the
+ # noarch directory, but directly in RPMS, so let's check both
+ # locations.
+ _expected_pkg="${RPMSDIR}/noarch/${_pkgname} ${RPMSDIR}/${_pkgname}"
+ for _expected in ${_expected_pkg}; do
+ if [ -e "${_expected}" ]; then
+ echo "(create_rpm) CREATED RPM: ${_expected}" >&2
+ return 0
+ fi
+ done
# OK, the package was not built where it should. Let us see if
# it was built in ~/rpmbuild instead, and if that is the case,
# copy it to the expected location.
- _bad_location_pkg="${HOME}/rpmbuild/RPMS/noarch/${_pkgname}"
- if [ -e "${_bad_location_pkg}" ]; then
- echo "WARNING: the package ${_pkgname} was built into ~/rpmbuild despite rpmbuild being instructed to build it at a different location. Probably a fallout from https://github.com/rpm-software-management/rpm/commit/96467dce" >&2
- install -D -m644 "${_bad_location_pkg}" "${_expected_pkg}"
- return 0
- fi
+ _bad_location_pkg="${HOME}/rpmbuild/RPMS/noarch/${_pkgname} ${HOME}/rpmbuild/RPMS/${_pkgname}"
+ for _bad_l in ${_bad_location_pkg}; do
+ if [ -e "${_bad_l}" ]; then
+ echo "WARNING: the package ${_pkgname} was built into ~/rpmbuild despite rpmbuild being instructed to build it at a different location. Probably a fallout from https://github.com/rpm-software-management/rpm/commit/96467dce" >&2
+ install -D -m644 "${_bad_l}" "${RPMSDIR}/noarch/${_pkgname}"
+ echo "(create_rpm) CREATED RPM: ${RPMSDIR}/noarch/${_pkgname}" >&2
+ return 0
+ fi
+ done
# Should not be here.
+ echo "create_rpm() ended with error; probably an issue with the location where the RPMs were built" >&2
return 1
}
--
2.47.1

View File

@ -1,51 +0,0 @@
From b8e26ca5e98e1b842db2fc21411962d40f27c557 Mon Sep 17 00:00:00 2001
From: rpm-build <rpm-build>
Date: Tue, 15 Aug 2023 07:19:28 -0400
Subject: [PATCH 3/4] Use version 2.0 as the minimum for the configuration
---
keylime/cmd/convert_config.py | 16 +++++++++++-----
1 file changed, 11 insertions(+), 5 deletions(-)
diff --git a/keylime/cmd/convert_config.py b/keylime/cmd/convert_config.py
index ac28151..1d71b99 100755
--- a/keylime/cmd/convert_config.py
+++ b/keylime/cmd/convert_config.py
@@ -191,7 +191,13 @@ def output(components: List[str], config: RawConfigParser, templates: str, outdi
# Check that there are templates for all components
for component in components:
- version = config[component]["version"].strip('" ')
+ # Minimum version.
+ version = '2.0'
+ if "version" in config[component]:
+ version = config[component]["version"].strip('" ')
+ else:
+ config[component]["version"] = version
+
version_dir = os.path.join(templates, version)
if not os.path.isdir(version_dir):
raise Exception(f"Could not find directory {version_dir}")
@@ -292,15 +298,15 @@ def process_mapping(
raise Exception("Invalid version number found in old configuration")
except (configparser.NoOptionError, configparser.NoSectionError):
- print(f"No version found in old configuration for {component}, using '1.0'")
- old_version = (1, 0)
+ print(f"No version found in old configuration for {component}, using '2.0'")
+ old_version = (2, 0)
else:
# If the old_version does not contain the component from the
# mapping, use the minimum version to use defaults
- old_version = (1, 0)
+ old_version = (2, 0)
# Skip versions lower than the current version
- if old_version >= new_version:
+ if old_version >= new_version and component in old_config:
new[component] = old_config[component]
continue
--
2.39.3

View File

@ -0,0 +1,52 @@
From 4e7cd6b75de27897ecc8e7329732cd945f7adfd0 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Thu, 22 May 2025 18:27:04 +0100
Subject: [PATCH 3/6] tests: skip measured-boot related tests for s390x and
ppc64le
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
test/test_create_mb_policy.py | 2 ++
test/test_mba_parsing.py | 2 ++
2 files changed, 4 insertions(+)
diff --git a/test/test_create_mb_policy.py b/test/test_create_mb_policy.py
index eaed0e3..b00d8e7 100644
--- a/test/test_create_mb_policy.py
+++ b/test/test_create_mb_policy.py
@@ -5,6 +5,7 @@ Copyright 2024 Red Hat, Inc.
import argparse
import os
+import platform
import unittest
from keylime.policy import create_mb_policy
@@ -12,6 +13,7 @@ from keylime.policy import create_mb_policy
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "data", "create-mb-policy"))
+@unittest.skipIf(platform.machine() in ["ppc64le", "s390x"], "ppc64le and s390x are not supported")
class CreateMeasuredBootPolicy_Test(unittest.TestCase):
def test_event_to_sha256(self):
test_cases = [
diff --git a/test/test_mba_parsing.py b/test/test_mba_parsing.py
index 670a602..e157116 100644
--- a/test/test_mba_parsing.py
+++ b/test/test_mba_parsing.py
@@ -1,10 +1,12 @@
import os
+import platform
import unittest
from keylime.common.algorithms import Hash
from keylime.mba import mba
+@unittest.skipIf(platform.machine() in ["ppc64le", "s390x"], "ppc64le and s390x are not supported")
class TestMBAParsing(unittest.TestCase):
def test_parse_bootlog(self):
"""Test parsing binary measured boot event log"""
--
2.47.1

View File

@ -1,88 +0,0 @@
From dbd521e8e8f0ffd9ace79c7b9b888f4cb89488f9 Mon Sep 17 00:00:00 2001
From: rpm-build <rpm-build>
Date: Tue, 15 Aug 2023 06:09:37 -0400
Subject: [PATCH 4/4] Duplicate str_to_version for the upgrade tool
So it does not depend on python-keylime
---
keylime/cmd/convert_config.py | 24 ++++++++++++++++++++++--
templates/2.0/adjust.py | 22 ++++++++++++++++++++--
2 files changed, 42 insertions(+), 4 deletions(-)
diff --git a/keylime/cmd/convert_config.py b/keylime/cmd/convert_config.py
index c1c6180..cad5e31 100755
--- a/keylime/cmd/convert_config.py
+++ b/keylime/cmd/convert_config.py
@@ -84,13 +84,33 @@ import importlib.util
import itertools
import json
import os
+import re
import shutil
from configparser import RawConfigParser
-from typing import List, Optional, Tuple
+from typing import List, Optional, Tuple, Union
from jinja2 import Template
-from keylime.common.version import str_to_version
+
+def str_to_version(v_str: str) -> Union[Tuple[int, int], None]:
+ """
+ Validates the string format and converts the provided string to a tuple of
+ ints which can be sorted and compared.
+
+ :returns: Tuple with version number parts converted to int. In case of
+ invalid version string, returns None
+ """
+
+ # Strip to remove eventual quotes and spaces
+ v_str = v_str.strip('" ')
+
+ m = re.match(r"^(\d+)\.(\d+)$", v_str)
+
+ if not m:
+ return None
+
+ return (int(m.group(1)), int(m.group(2)))
+
COMPONENTS = ["agent", "verifier", "tenant", "registrar", "ca", "logging"]
diff --git a/templates/2.0/adjust.py b/templates/2.0/adjust.py
index 312b790..c1e582a 100644
--- a/templates/2.0/adjust.py
+++ b/templates/2.0/adjust.py
@@ -2,9 +2,27 @@ import ast
import configparser
import re
from configparser import RawConfigParser
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, List, Optional, Tuple, Union
-from keylime.common.version import str_to_version
+
+def str_to_version(v_str: str) -> Union[Tuple[int, int], None]:
+ """
+ Validates the string format and converts the provided string to a tuple of
+ ints which can be sorted and compared.
+
+ :returns: Tuple with version number parts converted to int. In case of
+ invalid version string, returns None
+ """
+
+ # Strip to remove eventual quotes and spaces
+ v_str = v_str.strip('" ')
+
+ m = re.match(r"^(\d+)\.(\d+)$", v_str)
+
+ if not m:
+ return None
+
+ return (int(m.group(1)), int(m.group(2)))
def adjust(config: RawConfigParser, mapping: Dict) -> None: # pylint: disable=unused-argument
--
2.39.3

View File

@ -0,0 +1,52 @@
From 7ca86e1c0d68f45915d9f583ffaf149285905005 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Tue, 3 Jun 2025 10:50:48 +0100
Subject: [PATCH 4/6] templates: duplicate str_to_version() in the adjust
script
As a follow-up of upstream PR#1486, duplicate the str_to_version()
method in adjust.py so that we do not need the keylime modules in
order for the configuration upgrade script to run.
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
templates/2.0/adjust.py | 22 ++++++++++++++++++++--
1 file changed, 20 insertions(+), 2 deletions(-)
diff --git a/templates/2.0/adjust.py b/templates/2.0/adjust.py
index 6008e4c..24ba898 100644
--- a/templates/2.0/adjust.py
+++ b/templates/2.0/adjust.py
@@ -4,9 +4,27 @@ import logging
import re
from configparser import RawConfigParser
from logging import Logger
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, Tuple, Union
-from keylime.common.version import str_to_version
+
+def str_to_version(v_str: str) -> Union[Tuple[int, int], None]:
+ """
+ Validates the string format and converts the provided string to a tuple of
+ ints which can be sorted and compared.
+
+ :returns: Tuple with version number parts converted to int. In case of
+ invalid version string, returns None
+ """
+
+ # Strip to remove eventual quotes and spaces
+ v_str = v_str.strip('" ')
+
+ m = re.match(r"^(\d+)\.(\d+)$", v_str)
+
+ if not m:
+ return None
+
+ return (int(m.group(1)), int(m.group(2)))
def adjust(
--
2.47.1

View File

@ -0,0 +1,404 @@
From c60460eccab93863dbd1fd0b748e5a275c8e6737 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Tue, 3 Jun 2025 21:29:15 +0100
Subject: [PATCH 5/6] Restore RHEL-9 version of create_allowlist.sh
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
scripts/create_runtime_policy.sh | 335 ++++++++++---------------------
1 file changed, 104 insertions(+), 231 deletions(-)
diff --git a/scripts/create_runtime_policy.sh b/scripts/create_runtime_policy.sh
index 90ba50b..c0b641d 100755
--- a/scripts/create_runtime_policy.sh
+++ b/scripts/create_runtime_policy.sh
@@ -1,282 +1,155 @@
-#!/usr/bin/env bash
+#!/usr/bin/bash
################################################################################
# SPDX-License-Identifier: Apache-2.0
# Copyright 2017 Massachusetts Institute of Technology.
################################################################################
-
-if [ $0 != "-bash" ] ; then
- pushd `dirname "$0"` > /dev/null 2>&1
-fi
-KCRP_BASE_DIR=$(pwd)
-if [ $0 != "-bash" ] ; then
- popd 2>&1 > /dev/null
-fi
-KCRP_BASE_DIR=$KCRP_BASE_DIR/..
-
-function detect_hash {
- local hashstr=$1
-
- case "${#hashstr}" in
- 32) hashalgo=md5sum ;;
- 40) hashalgo=sha1sum ;;
- 64) hashalgo=sha256sum ;;
- 128) hashalgo=sha512sum ;;
- *) hashalgo="na";;
- esac
-
- echo $hashalgo
-}
-
-function announce {
- # 1 - MESSAGE
-
- MESSAGE=$(echo "${1}" | tr '\n' ' ')
- MESSAGE=$(echo $MESSAGE | sed "s/\t\t*/ /g")
-
- echo "==> $(date) - ${0} - $MESSAGE"
-}
-
-function valid_algo {
- local algo=$1
-
- [[ " ${ALGO_LIST[@]} " =~ " ${algo} " ]]
-}
-
# Configure the installer here
INITRAMFS_TOOLS_GIT=https://salsa.debian.org/kernel-team/initramfs-tools.git
INITRAMFS_TOOLS_VER="master"
-# All defaults
-ALGO=sha1sum
-WORK_DIR=/tmp/kcrp
-OUTPUT_DIR=${WORK_DIR}/output
-ALLOWLIST_DIR=${WORK_DIR}/allowlist
-INITRAMFS_LOC="/boot/"
-INITRAMFS_STAGING_DIR=${WORK_DIR}/ima_ramfs/
-INITRAMFS_TOOLS_DIR=${WORK_DIR}/initramfs-tools
-BOOT_AGGREGATE_LOC="/sys/kernel/security/ima/ascii_runtime_measurements"
-ROOTFS_LOC="/"
-EXCLUDE_LIST="none"
-SKIP_PATH="none"
-ALGO_LIST=("sha1sum" "sha256sum" "sha512sum")
+WORKING_DIR=$(readlink -f "$0")
+WORKING_DIR=$(dirname "$WORKING_DIR")
# Grabs Debian's initramfs_tools from Git repo if no other options exist
if [[ ! `command -v unmkinitramfs` && ! -x "/usr/lib/dracut/skipcpio" ]] ; then
# Create temp dir for pulling in initramfs-tools
- announce "INFO: Downloading initramfs-tools: $INITRAMFS_TOOLS_DIR"
+ TMPDIR=`mktemp -d` || exit 1
+ echo "INFO: Downloading initramfs-tools: $TMPDIR"
- mkdir -p $INITRAMFS_TOOLS_DIR
# Clone initramfs-tools repo
- pushd $INITRAMFS_TOOLS_DIR > /dev/null 2>&1
- git clone $INITRAMFS_TOOLS_GIT initramfs-tools > /dev/null 2>&1
- pushd initramfs-tools > /dev/null 2>&1
- git checkout $INITRAMFS_TOOLS_VER > /dev/null 2>&1
- popd > /dev/null 2>&1
- popd > /dev/null 2>&1
+ pushd $TMPDIR
+ git clone $INITRAMFS_TOOLS_GIT initramfs-tools
+ pushd initramfs-tools
+ git checkout $INITRAMFS_TOOLS_VER
+ popd # $TMPDIR
+ popd
shopt -s expand_aliases
- alias unmkinitramfs=$INITRAMFS_TOOLS_DIR/initramfs-tools/unmkinitramfs
-
- which unmkinitramfs > /dev/null 2>&1 || exit 1
+ alias unmkinitramfs=$TMPDIR/initramfs-tools/unmkinitramfs
fi
+
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" 1>&2
exit 1
fi
-USAGE=$(cat <<-END
- Usage: $0 -o/--output_file FILENAME [-a/--algo ALGO] [-x/--ramdisk-location PATH] [-y/--boot_aggregate-location PATH] [-z/--rootfs-location PATH] [-e/--exclude_list FILENAME] [-s/--skip-path PATH] [-h/--help]
+if [ $# -lt 1 ]
+then
+ echo "No arguments provided" >&2
+ echo "Usage: `basename $0` -o [filename] -h [hash-algo]" >&2
+ exit $NOARGS;
+fi
- optional arguments:
- -a/--algo (checksum algorithm to be used, default: $ALGO)
- -x/--ramdisk-location (path to initramdisk, default: $INITRAMFS_LOC, set to "none" to skip)
- -y/--boot_aggregate-location (path for IMA log, used for boot aggregate extraction, default: $BOOT_AGGREGATE_LOC, set to "none" to skip)
- -z/--rootfs-location (path to root filesystem, default: $ROOTFS_LOC, cannot be skipped)
- -e/--exclude_list (filename containing a list of paths to be excluded (i.e., verifier will not try to match checksums, default: $EXCLUDE_LIST)
- -s/--skip-path (comma-separated path list, files found there will not have checksums calculated, default: $SKIP_PATH)
- -h/--help (show this message and exit)
-END
-)
+ALGO=sha256sum
-while [[ $# -gt 0 ]]
-do
- key="$1"
+ALGO_LIST=("sha1sum" "sha256sum" "sha512sum")
+
+valid_algo() {
+ local algo=$1
+
+ [[ " ${ALGO_LIST[@]} " =~ " ${algo} " ]]
+}
- case $key in
- -a|--algo)
- ALGO="$2"
- shift
- ;;
- -a=*|--algo=*)
- ALGO=$(echo $key | cut -d '=' -f 2)
- ;;
- -x|--ramdisk-location)
- INITRAMFS_LOC="$2"
- shift
- ;;
- -x=*|--ramdisk-location=*)
- INITRAMFS_LOC=$(echo $key | cut -d '=' -f 2)
- ;;
- -y|--boot_aggregate-location)
- BOOT_AGGREGATE_LOC=$2
- shift
- ;;
- -y=*|--boot_aggregate-location=*)
- BOOT_AGGREGATE_LOC=$(echo $key | cut -d '=' -f 2)
- ;;
- -z|--rootfs-location)
- ROOTFS_LOC=$2
- shift
- ;;
- -z=*|--rootfs-location=*)
- ROOTFS_LOC=$(echo $key | cut -d '=' -f 2)
- ;;
- -e|--exclude_list)
- EXCLUDE_LIST=$2
- shift
- ;;
- -e=*|--exclude_list=*)
- EXCLUDE_LIST=$(echo $key | cut -d '=' -f 2)
- ;;
- -o=*|--output_file=*)
- OUTPUT=$(echo $key | cut -d '=' -f 2)
- ;;
- -o|--output_file)
- OUTPUT=$2
- shift
- ;;
- -s=*|--skip-path=*)
- SKIP_PATH=$(echo $key | cut -d '=' -f 2)
- ;;
- -s|--skip-path)
- SKIP_PATH=$2
- shift
- ;;
- -h|--help)
- printf "%s\n" "$USAGE"
- exit 0
- shift
- ;;
- *)
- # unknown option
- ;;
- esac
- shift
+while getopts ":o:h:" opt; do
+ case $opt in
+ o)
+ OUTPUT=$(readlink -f $OPTARG)
+ rm -f $OUTPUT
+ ;;
+ h)
+ if valid_algo $OPTARG; then
+ ALGO=$OPTARG
+ else
+ echo "Invalid hash function argument: use sha1sum, sha256sum, or sha512sum"
+ exit 1
+ fi
+ ;;
+ esac
done
-if ! valid_algo $ALGO
+if [ ! "$OUTPUT" ]
then
- echo "Invalid hash function argument: pick from \"${ALGO_LIST[@]}\""
+ echo "Missing argument for -o" >&2;
+ echo "Usage: $0 -o [filename] -h [hash-algo]" >&2;
exit 1
fi
-if [[ -z $OUTPUT ]]
-then
- printf "%s\n" "$USAGE"
- exit 1
+
+# Where to look for initramfs image
+INITRAMFS_LOC="/boot"
+if [ -d "/ostree" ]; then
+ # If we are on an ostree system change where we look for initramfs image
+ loc=$(grep -E "/ostree/[^/]([^/]*)" -o /proc/cmdline | head -n 1 | cut -d / -f 3)
+ INITRAMFS_LOC="/boot/ostree/${loc}/"
fi
-rm -rf $ALLOWLIST_DIR
-rm -rf $INITRAMFS_STAGING_DIR
-rm -rf $OUTPUT_DIR
-announce "Writing allowlist $ALLOWLIST_DIR/${OUTPUT} with $ALGO..."
-mkdir -p $ALLOWLIST_DIR
+echo "Writing allowlist to $OUTPUT with $ALGO..."
-if [[ $BOOT_AGGREGATE_LOC != "none" ]]
-then
- announce "--- Adding boot agregate from $BOOT_AGGREGATE_LOC on allowlist $ALLOWLIST_DIR/${OUTPUT} ..."
# Add boot_aggregate from /sys/kernel/security/ima/ascii_runtime_measurements (IMA Log) file.
# The boot_aggregate measurement is always the first line in the IMA Log file.
# The format of the log lines is the following:
# <PCR_ID> <PCR_Value> <IMA_Template> <File_Digest> <File_Name> <File_Signature>
# File_Digest may start with the digest algorithm specified (e.g "sha1:", "sha256:") depending on the template used.
- head -n 1 $BOOT_AGGREGATE_LOC | awk '{ print $4 " boot_aggregate" }' | sed 's/.*://' >> $ALLOWLIST_DIR/${OUTPUT}
+head -n 1 /sys/kernel/security/ima/ascii_runtime_measurements | awk '{ print $4 " boot_aggregate" }' | sed 's/.*://' >> $OUTPUT
- bagghash=$(detect_hash $(cat $ALLOWLIST_DIR/${OUTPUT} | cut -d ' ' -f 1))
- if [[ $ALGO != $bagghash ]]
- then
- announce "ERROR: \"boot aggregate\" has was calculated with $bagghash, but files will be calculated with $ALGO. Use option -a $bagghash"
- exit 1
- fi
-else
- announce "--- Skipping boot aggregate..."
-fi
-
-announce "--- Adding all appropriate files from $ROOTFS_LOC on allowlist $ALLOWLIST_DIR/${OUTPUT} ..."
# Add all appropriate files under root FS to allowlist
-pushd $ROOTFS_LOC > /dev/null 2>&1
-BASE_EXCLUDE_DIRS="\bsys\b\|\brun\b\|\bproc\b\|\blost+found\b\|\bdev\b\|\bmedia\b\|\bsnap\b\|\bmnt\b\|\bvar\b\|\btmp\b"
-ROOTFS_FILE_LIST=$(ls | grep -v $BASE_EXCLUDE_DIRS)
-if [[ $SKIP_PATH != "none" ]]
-then
- SKIP_PATH=$(echo $SKIP_PATH | sed -e "s#^$ROOTFS_LOC##g" -e "s#,$ROOTFS_LOC##g" -e "s#,#\\\|#g")
- ROOTFS_FILE_LIST=$(echo "$ROOTFS_FILE_LIST" | grep -v "$SKIP_PATH")
-fi
-find $ROOTFS_FILE_LIST \( -fstype rootfs -o -xtype f -type l -o -type f \) -uid 0 -exec $ALGO "$ROOTFS_LOC/{}" >> $ALLOWLIST_DIR/${OUTPUT} \;
-popd > /dev/null 2>&1
+cd /
+find `ls / | grep -v "\bsys\b\|\brun\b\|\bproc\b\|\blost+found\b\|\bdev\b\|\bmedia\b\|\bsnap\b\|mnt"` \( -fstype rootfs -o -xtype f -type l -o -type f \) -uid 0 -exec $ALGO '/{}' >> $OUTPUT \;
# Create staging area for init ram images
-mkdir -p $INITRAMFS_STAGING_DIR
+rm -rf /tmp/ima/
+mkdir -p /tmp/ima
-if [[ $INITRAMFS_LOC != "none" ]]
-then
- # Where to look for initramfs image
- if [[ -d "/ostree" ]]
- then
- X=$INITRAMFS_LOC
- # If we are on an ostree system change where we look for initramfs image
- loc=$(grep -E "/ostree/[^/]([^/]*)" -o /proc/cmdline | head -n 1 | cut -d / -f 3)
- INITRAMFS_LOC="/boot/ostree/${loc}/"
- announce "--- The location of initramfs was overriden from \"${X}\" to \"$INITRAMFS_LOC\""
- fi
-
- announce "--- Creating allowlist for init ram disks found under \"$INITRAMFS_LOC\" to $ALLOWLIST_DIR/${OUTPUT} ..."
- for i in $(ls ${INITRAMFS_LOC}/initr* 2> /dev/null)
- do
- announce " extracting $i"
- mkdir -p $INITRAMFS_STAGING_DIR/$i-extracted
- cd $INITRAMFS_STAGING_DIR/$i-extracted
-
- # platform-specific handling of init ram disk images
- if [[ `command -v unmkinitramfs` ]] ; then
- mkdir -p $INITRAMFS_STAGING_DIR/$i-extracted-unmk
- unmkinitramfs $i $INITRAMFS_STAGING_DIR/$i-extracted-unmk
- if [[ -d "$INITRAMFS_STAGING_DIR/$i-extracted-unmk/main/" ]] ; then
- cp -r $INITRAMFS_STAGING_DIR/$i-extracted-unmk/main/. /tmp/ima/$i-extracted
- else
- cp -r $INITRAMFS_STAGING_DIR/$i-extracted-unmk/. /tmp/ima/$i-extracted
- fi
- elif [[ -x "/usr/lib/dracut/skipcpio" ]] ; then
- /usr/lib/dracut/skipcpio $i | gunzip -c | cpio -i -d 2> /dev/null
+# Iterate through init ram disks and add files to allowlist
+echo "Creating allowlist for init ram disk"
+for i in `ls ${INITRAMFS_LOC}/initr*`
+do
+ echo "extracting $i"
+ mkdir -p /tmp/ima/$i-extracted
+ cd /tmp/ima/$i-extracted
+
+ # platform-specific handling of init ram disk images
+ if [[ `command -v unmkinitramfs` ]] ; then
+ mkdir -p /tmp/ima/$i-extracted-unmk
+ unmkinitramfs $i /tmp/ima/$i-extracted-unmk
+ if [[ -d "/tmp/ima/$i-extracted-unmk/main/" ]] ; then
+ cp -r /tmp/ima/$i-extracted-unmk/main/. /tmp/ima/$i-extracted
else
- announce "ERROR: No tools for initramfs image processing found!"
- exit 1
+ cp -r /tmp/ima/$i-extracted-unmk/. /tmp/ima/$i-extracted
fi
+ elif [[ -x "/usr/lib/dracut/skipcpio" ]] ; then
+ /usr/lib/dracut/skipcpio $i | gunzip -c 2> /dev/null | cpio -i -d 2> /dev/null
+ else
+ echo "ERROR: No tools for initramfs image processing found!"
+ break
+ fi
- find -type f -exec $ALGO "./{}" \; | sed "s| \./\./| /|" >> $ALLOWLIST_DIR/${OUTPUT}
- done
-fi
-
-# Non-critical cleanup on the resulting file (when ROOTFS_LOC = '/', the path starts on allowlist ends up with double '//' )
-sed -i "s^ //^ /^g" $ALLOWLIST_DIR/${OUTPUT}
-# A bit of cleanup on the resulting file (among other problems, sha256sum might output a hash with the prefix '\\')
-sed -i "s/^\\\//g" $ALLOWLIST_DIR/${OUTPUT}
-
-# Convert to runtime policy
-mkdir -p $OUTPUT_DIR
-announce "Converting created allowlist ($ALLOWLIST_DIR/${OUTPUT}) to Keylime runtime policy ($OUTPUT_DIR/${OUTPUT}) ..."
-CONVERT_CMD_OPTS="--allowlist $ALLOWLIST_DIR/${OUTPUT} --output_file $OUTPUT_DIR/${OUTPUT}"
-[ -f $EXCLUDE_LIST ] && CONVERT_CMD_OPTS="$CONVERT_CMD_OPTS --excludelist "$(readlink -f -- "${EXCLUDE_LIST}")""
+ find -type f -exec $ALGO "./{}" \; | sed "s| \./\./| /|" >> $OUTPUT
+done
-pushd $KCRP_BASE_DIR > /dev/null 2>&1
-export PYTHONPATH=$KCRP_BASE_DIR:$PYTHONPATH
-# only 3 dependencies required: pip3 install cryptography lark packaging
-python3 ./keylime/cmd/convert_runtime_policy.py $CONVERT_CMD_OPTS; echo " "
-if [[ $? -eq 0 ]]
-then
- announce "Done, new runtime policy file present at ${OUTPUT_DIR}/$OUTPUT. It can be used on the tenant keylime host with \"keylime_tenant -c add --runtime-policy ${OUTPUT_DIR}/$OUTPUT <other options>"
-fi
-popd > /dev/null 2>&1
+# when ROOTFS_LOC = '/', the path starts on allowlist ends up with double '//'
+#
+# Example:
+#
+# b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c //bar
+#
+# Replace the unwanted '//' with a single '/'
+sed -i 's| /\+| /|g' $ALLOWLIST_DIR/${OUTPUT}
+
+# When the file name contains newlines or backslashes, the output of sha256sum
+# adds a backslash at the beginning of the line.
+#
+# Example:
+#
+# $ echo foo > ba\\r
+# $ sha256sum ba\\r
+# \b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c ba\\r
+#
+# Remove the unwanted backslash prefix
+sed -i 's/^\\//g' $ALLOWLIST_DIR/${OUTPUT}
+
+# Clean up
+rm -rf /tmp/ima
--
2.47.1

View File

@ -1,50 +0,0 @@
From f2432efbeb7b6305067111bb3a77ef5d7da4eb5b Mon Sep 17 00:00:00 2001
From: Thore Sommer <mail@thson.de>
Date: Thu, 10 Aug 2023 16:15:57 +0300
Subject: [PATCH 5/6] elchecking/example: add ignores for
EV_PLATFORM_CONFIG_FLAGS
These are generated by edk2 when used with QEMU, but we do not have a
reference for them.
Signed-off-by: Thore Sommer <mail@thson.de>
---
keylime/mba/elchecking/example.py | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
diff --git a/keylime/mba/elchecking/example.py b/keylime/mba/elchecking/example.py
index 8885227..921db4e 100644
--- a/keylime/mba/elchecking/example.py
+++ b/keylime/mba/elchecking/example.py
@@ -75,7 +75,6 @@ shim_authcode_sha256_no_secureboot = tests.obj_test(
kernel_cmdline=tests.type_test(str),
)
-
allowed_kernel_list_test_no_secureboot = tests.list_test(shim_authcode_sha256_no_secureboot)
@@ -303,6 +302,20 @@ class Example(policies.Policy):
),
),
)
+ # edk2 measures up to 4 of those events, where we do not have a good way to get a reference
+ # See:
+ # - https://github.com/keylime/keylime/issues/1393
+ # - https://github.com/tianocore/edk2/commit/935343cf1639a28530904a1e8d73d6517a07cbff
+ dispatcher.set(
+ (1, "EV_PLATFORM_CONFIG_FLAGS"),
+ tests.Or(
+ tests.OnceTest(tests.AcceptAll()),
+ tests.OnceTest(tests.AcceptAll()),
+ tests.OnceTest(tests.AcceptAll()),
+ tests.OnceTest(tests.AcceptAll()),
+ ),
+ )
+
dispatcher.set((4, "EV_EFI_ACTION"), tests.EvEfiActionTest(4))
for pcr in range(8):
dispatcher.set((pcr, "EV_SEPARATOR"), tests.EvSeperatorTest())
--
2.39.3

View File

@ -0,0 +1,66 @@
From 733db4036f2142152795fc51b761f05e39594b08 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Tue, 27 May 2025 09:31:54 +0000
Subject: [PATCH 6/6] Revert "default" server_key_password for
verifier/registrar
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
templates/2.0/mapping.json | 4 ++--
templates/2.1/mapping.json | 6 +++---
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/templates/2.0/mapping.json b/templates/2.0/mapping.json
index 80dcdde..8fce124 100644
--- a/templates/2.0/mapping.json
+++ b/templates/2.0/mapping.json
@@ -232,7 +232,7 @@
"server_key_password": {
"section": "cloud_verifier",
"option": "private_key_pw",
- "default": ""
+ "default": "default"
},
"enable_agent_mtls": {
"section": "cloud_verifier",
@@ -563,7 +563,7 @@
"server_key_password": {
"section": "registrar",
"option": "private_key_pw",
- "default": ""
+ "default": "default"
},
"server_cert": {
"section": "registrar",
diff --git a/templates/2.1/mapping.json b/templates/2.1/mapping.json
index 956a53a..88e3fb6 100644
--- a/templates/2.1/mapping.json
+++ b/templates/2.1/mapping.json
@@ -262,7 +262,7 @@
"server_key_password": {
"section": "verifier",
"option": "server_key_password",
- "default": ""
+ "default": "default"
},
"enable_agent_mtls": {
"section": "verifier",
@@ -593,7 +593,7 @@
"server_key_password": {
"section": "registrar",
"option": "server_key_password",
- "default": ""
+ "default": "default"
},
"server_cert": {
"section": "registrar",
@@ -835,4 +835,4 @@
"handler_consoleHandler": "logging",
"logger_keylime": "logging"
}
-}
\ No newline at end of file
+}
--
2.47.1

View File

@ -1,43 +0,0 @@
From ed213b9533535ceae5026b2fab274f80bcc58cb8 Mon Sep 17 00:00:00 2001
From: rpm-build <rpm-build>
Date: Tue, 15 Aug 2023 09:18:32 -0400
Subject: [PATCH 6/6] Revert mapping changes
---
templates/2.0/mapping.json | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/templates/2.0/mapping.json b/templates/2.0/mapping.json
index 66addbc..0036b63 100644
--- a/templates/2.0/mapping.json
+++ b/templates/2.0/mapping.json
@@ -207,7 +207,7 @@
"registrar_port": {
"section": "cloud_verifier",
"option": "registrar_port",
- "default": "8881"
+ "default": "8891"
},
"tls_dir": {
"section": "cloud_verifier",
@@ -232,7 +232,7 @@
"server_key_password": {
"section": "cloud_verifier",
"option": "private_key_pw",
- "default": ""
+ "default": "default"
},
"enable_agent_mtls": {
"section": "cloud_verifier",
@@ -558,7 +558,7 @@
"server_key_password": {
"section": "registrar",
"option": "private_key_pw",
- "default": ""
+ "default": "default"
},
"server_cert": {
"section": "registrar",
--
2.39.3

View File

@ -1,90 +0,0 @@
From 3dc40e8b1878d84045ee80cb6d216348713c048a Mon Sep 17 00:00:00 2001
From: Karel Srot <ksrot@redhat.com>
Date: Tue, 15 Aug 2023 10:00:50 +0200
Subject: [PATCH 7/7] Handle session close using a session manager
Resolves https://github.com/keylime/keylime/issues/1455
Signed-off-by: Karel Srot <ksrot@redhat.com>
---
keylime/revocation_notifier.py | 50 +++++++++++++++++-----------------
packit-ci.fmf | 1 +
2 files changed, 26 insertions(+), 25 deletions(-)
diff --git a/keylime/revocation_notifier.py b/keylime/revocation_notifier.py
index 31a3095..5cc8b1a 100644
--- a/keylime/revocation_notifier.py
+++ b/keylime/revocation_notifier.py
@@ -132,32 +132,32 @@ def notify_webhook(tosend: Dict[str, Any]) -> None:
def worker_webhook(tosend: Dict[str, Any], url: str) -> None:
interval = config.getfloat("verifier", "retry_interval")
exponential_backoff = config.getboolean("verifier", "exponential_backoff")
- session = requests.session()
- logger.info("Sending revocation event via webhook...")
- for i in range(config.getint("verifier", "max_retries")):
- next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
- try:
- response = session.post(url, json=tosend, timeout=5)
- if response.status_code in [200, 202]:
- break
-
- logger.debug(
- "Unable to publish revocation message %d times via webhook, "
- "trying again in %d seconds. "
- "Server returned status code: %s",
- i,
- next_retry,
- response.status_code,
- )
- except requests.exceptions.RequestException as e:
- logger.debug(
- "Unable to publish revocation message %d times via webhook, trying again in %d seconds: %s",
- i,
- next_retry,
- e,
- )
+ with requests.Session() as session:
+ logger.info("Sending revocation event via webhook...")
+ for i in range(config.getint("verifier", "max_retries")):
+ next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
+ try:
+ response = session.post(url, json=tosend, timeout=5)
+ if response.status_code in [200, 202]:
+ break
+
+ logger.debug(
+ "Unable to publish revocation message %d times via webhook, "
+ "trying again in %d seconds. "
+ "Server returned status code: %s",
+ i,
+ next_retry,
+ response.status_code,
+ )
+ except requests.exceptions.RequestException as e:
+ logger.debug(
+ "Unable to publish revocation message %d times via webhook, trying again in %d seconds: %s",
+ i,
+ next_retry,
+ e,
+ )
- time.sleep(next_retry)
+ time.sleep(next_retry)
w = functools.partial(worker_webhook, tosend, url)
t = threading.Thread(target=w, daemon=True)
diff --git a/packit-ci.fmf b/packit-ci.fmf
index f4d2dae..7abe313 100644
--- a/packit-ci.fmf
+++ b/packit-ci.fmf
@@ -108,6 +108,7 @@ adjust:
- /setup/configure_tpm_emulator
- /setup/install_upstream_keylime
- /setup/install_rust_keylime_from_copr
+ - /setup/configure_kernel_ima_module/ima_policy_simple
- /functional/basic-attestation-on-localhost
- /functional/basic-attestation-with-custom-certificates
- /functional/basic-attestation-without-mtls
--
2.41.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,29 @@
From d14e0a132cfedd081bffa7a990b9401d5e257cac Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Fri, 8 Aug 2025 16:40:01 +0100
Subject: [PATCH 8/9] mb: support EV_EFI_HANDOFF_TABLES events on PCR1
Allow EV_EFI_HANDOFF_TABLES events on PCR1 alongside the existing
EV_EFI_HANDOFF_TABLES2 support to handle different firmware
implementations, in the example policy.
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
keylime/mba/elchecking/example.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/keylime/mba/elchecking/example.py b/keylime/mba/elchecking/example.py
index 2c6f699..a3d918a 100644
--- a/keylime/mba/elchecking/example.py
+++ b/keylime/mba/elchecking/example.py
@@ -185,6 +185,7 @@ class Example(policies.Policy):
# We only expect one EV_NO_ACTION event at the start.
dispatcher.set((0, "EV_NO_ACTION"), tests.OnceTest(tests.AcceptAll()))
dispatcher.set((1, "EV_CPU_MICROCODE"), tests.OnceTest(tests.AcceptAll()))
+ dispatcher.set((1, "EV_EFI_HANDOFF_TABLES"), tests.OnceTest(tests.AcceptAll()))
dispatcher.set((1, "EV_EFI_HANDOFF_TABLES2"), tests.OnceTest(tests.AcceptAll()))
dispatcher.set((0, "EV_S_CRTM_VERSION"), events_final.get("s_crtms"))
dispatcher.set((0, "EV_EFI_PLATFORM_FIRMWARE_BLOB"), events_final.get("platform_firmware_blobs"))
--
2.47.3

View File

@ -1,31 +0,0 @@
From aa891f456d5cf0fc23e16d87fb28efc79a0d8073 Mon Sep 17 00:00:00 2001
From: Marcio Silva <marcio.a.silva@ibm.com>
Date: Wed, 23 Aug 2023 11:24:59 -0300
Subject: [PATCH 8/8] verifier: should read parameters from verifier.conf only
Single-line fix for #1446
The verifier should read "durable attestation" backend imports from
verifier.conf (and NOT from registrar.conf)
Signed-off-by: Marcio Silva <marcio.a.silva@ibm.com>
---
keylime/cloud_verifier_tornado.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/keylime/cloud_verifier_tornado.py b/keylime/cloud_verifier_tornado.py
index d65cb63..261022a 100644
--- a/keylime/cloud_verifier_tornado.py
+++ b/keylime/cloud_verifier_tornado.py
@@ -51,7 +51,7 @@ except SQLAlchemyError as err:
sys.exit(1)
try:
- rmc = record.get_record_mgt_class(config.get("registrar", "durable_attestation_import", fallback=""))
+ rmc = record.get_record_mgt_class(config.get("verifier", "durable_attestation_import", fallback=""))
if rmc:
rmc = rmc("verifier")
except record.RecordManagementException as rme:
--
2.41.0

View File

@ -1,48 +0,0 @@
From 9e5ac9f25cd400b16d5969f531cee28290543f2a Mon Sep 17 00:00:00 2001
From: Marcio Silva <marcio.a.silva@ibm.com>
Date: Wed, 12 Jul 2023 12:05:47 -0300
Subject: [PATCH] Fix for CVE-2023-38201 (Security Advisory
GHSA-f4r5-q63f-gcww)
In addition to remove the offending message, this patch also ensures
deletion of an agent's record from the database in case of failure after
a single attempt.
Signed-off-by: Marcio Silva <marcio.a.silva@ibm.com>
---
keylime/registrar_common.py | 15 +++++++++++++--
1 file changed, 13 insertions(+), 2 deletions(-)
diff --git a/keylime/registrar_common.py b/keylime/registrar_common.py
index 1fd97cd0c..7f15ae430 100644
--- a/keylime/registrar_common.py
+++ b/keylime/registrar_common.py
@@ -250,7 +250,9 @@ def get_network_params(
try:
port = int(port)
if port < 1 or port > 65535:
- logger.warning("Contact port for agent %s is not a number between 1 and got: %s.", agent_id, port)
+ logger.warning(
+ "Contact port for agent %s is not a number between 1 and 65535 got: %s.", agent_id, port
+ )
port = None
except ValueError:
logger.warning("Contact port for agent %s is not a valid number got: %s.", agent_id, port)
@@ -447,7 +449,16 @@ def do_PUT(self) -> None:
logger.error("SQLAlchemy Error: %s", e)
raise
else:
- raise Exception(f"Auth tag {auth_tag} does not match expected value {ex_mac}")
+ if agent_id and session.query(RegistrarMain).filter_by(agent_id=agent_id).delete():
+ try:
+ session.commit()
+ except SQLAlchemyError as e:
+ logger.error("SQLAlchemy Error: %s", e)
+ raise
+
+ raise Exception(
+ f"Auth tag {auth_tag} for agent {agent_id} does not match expected value. The agent has been deleted from database, and a restart of it will be required"
+ )
web_util.echo_json_response(self, 200, "Success")
logger.info("PUT activated: %s", agent_id)

View File

@ -0,0 +1,356 @@
From 607b97ac8d414cb57b1ca89925631d41bd7ac04c Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Fri, 8 Aug 2025 16:41:54 +0100
Subject: [PATCH 9/9] mb: support vendor_db as logged by newer shim versions
- Updated example policy to properly handle different event structures
for vendor_db validation:
- KeySubsetMulti for EV_EFI_VARIABLE_DRIVER_CONFIG (has SignatureType field)
- SignatureSetMember for EV_EFI_VARIABLE_AUTHORITY (direct signature format)
- Added method to extract vendor_db from EV_EFI_VARIABLE_AUTHORITY events
in reference state generation (keylime-policy create measured-boot and
the legacy create_mb_refstate script)
- Made vendor_db optional for backward compatibility
This fixes attestation failures when vendor_db variables are present but
missing from reference states or validated with incorrect test types.
See: https://github.com/rhboot/shim/pull/728
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
keylime/mba/elchecking/example.py | 45 +++++++++
keylime/policy/create_mb_policy.py | 30 ++++++
scripts/create_mb_refstate | 30 ++++++
test/test_create_mb_policy.py | 142 +++++++++++++++++++++++++++++
4 files changed, 247 insertions(+)
diff --git a/keylime/mba/elchecking/example.py b/keylime/mba/elchecking/example.py
index a3d918a..5a933ac 100644
--- a/keylime/mba/elchecking/example.py
+++ b/keylime/mba/elchecking/example.py
@@ -21,6 +21,7 @@ from . import policies, tests
# kek - list of allowed KEK keys
# db - list of allowed db keys
# dbx - list of required dbx keys
+# vendor_db - list of allowed vendor_db keys (optional, for newer shim versions)
# mokdig - list of allowed digests of MoKList (PCR 14 EV_IPL)
# mokxdig - list of allowed digests of MoKListX (PCR 14 EV_IPL)
# kernels - list of allowed {
@@ -121,6 +122,10 @@ class Example(policies.Policy):
if req not in refstate:
raise Exception(f"refstate lacks {req}")
+ # vendor_db is optional for backward compatibility
+ if "vendor_db" not in refstate:
+ refstate["vendor_db"] = []
+
dispatcher = tests.Dispatcher(("PCRIndex", "EventType"))
vd_driver_config = tests.VariableDispatch()
vd_authority = tests.VariableDispatch()
@@ -268,6 +273,34 @@ class Example(policies.Policy):
"db",
db_test,
)
+ # Support vendor_db as logged by newer shim versions
+ # See: https://github.com/rhboot/shim/pull/728
+ if not has_secureboot and not refstate["vendor_db"]:
+ vendor_db_test = tests.OnceTest(tests.AcceptAll())
+ else:
+ vendor_db_test = tests.OnceTest(
+ tests.Or(
+ tests.KeySubsetMulti(
+ ["a159c0a5-e494-a74a-87b5-ab155c2bf072", "2616c4c1-4c50-9240-aca9-41f936934328"],
+ sigs_strip0x(refstate["vendor_db"]),
+ ),
+ tests.KeySubsetMulti(
+ ["a5c059a1-94e4-4aa7-87b5-ab155c2bf072", "c1c41626-504c-4092-aca9-41f936934328"],
+ sigs_strip0x(refstate["vendor_db"]),
+ ),
+ )
+ )
+
+ vd_driver_config.set(
+ "cbb219d7-3a3d-9645-a3bc-dad00e67656f",
+ "vendor_db",
+ vendor_db_test,
+ )
+ vd_driver_config.set(
+ "d719b2cb-3d3a-4596-a3bc-dad00e67656f",
+ "vendor_db",
+ vendor_db_test,
+ )
if not has_secureboot and not refstate["dbx"]:
dbx_test = tests.OnceTest(tests.AcceptAll())
@@ -295,6 +328,18 @@ class Example(policies.Policy):
vd_db_test = tests.OnceTest(tests.AcceptAll())
vd_authority.set("cbb219d7-3a3d-9645-a3bc-dad00e67656f", "db", vd_db_test)
vd_authority.set("d719b2cb-3d3a-4596-a3bc-dad00e67656f", "db", vd_db_test)
+ # Support vendor_db as logged by newer shim versions in EV_EFI_VARIABLE_AUTHORITY events
+ # See: https://github.com/rhboot/shim/pull/728
+ # EV_EFI_VARIABLE_AUTHORITY events have different structure than EV_EFI_VARIABLE_DRIVER_CONFIG
+ # They contain direct signature data without SignatureType field
+ if not has_secureboot and not refstate["vendor_db"]:
+ vendor_db_authority_test = tests.OnceTest(tests.AcceptAll())
+ else:
+ vendor_db_authority_test = tests.OnceTest(
+ tests.IterateTest(tests.SignatureSetMember(sigs_strip0x(refstate["vendor_db"])))
+ )
+ vd_authority.set("cbb219d7-3a3d-9645-a3bc-dad00e67656f", "vendor_db", vendor_db_authority_test)
+ vd_authority.set("d719b2cb-3d3a-4596-a3bc-dad00e67656f", "vendor_db", vendor_db_authority_test)
# Accept all SbatLevels of the Shim, because we already checked the hash of the Shim itself.
vd_sbat_level_test = tests.OnceTest(tests.AcceptAll())
vd_authority.set("50ab5d60-46e0-0043-abb6-3dd810dd8b23", "SbatLevel", vd_sbat_level_test)
diff --git a/keylime/policy/create_mb_policy.py b/keylime/policy/create_mb_policy.py
index 859e652..b2b48f7 100644
--- a/keylime/policy/create_mb_policy.py
+++ b/keylime/policy/create_mb_policy.py
@@ -93,6 +93,35 @@ def get_keys(events: List[Dict[str, Any]]) -> Dict[str, List[Any]]:
return out
+def get_vendor_db(events: List[Dict[str, Any]]) -> Dict[str, List[Any]]:
+ """Get vendor_db signatures from EV_EFI_VARIABLE_AUTHORITY events."""
+ out: Dict[str, List[Any]] = {"vendor_db": []}
+
+ for event in events:
+ if "EventType" not in event:
+ continue
+ if event["EventType"] != "EV_EFI_VARIABLE_AUTHORITY":
+ continue
+ if "Event" not in event or "UnicodeName" not in event["Event"]:
+ continue
+
+ event_name = event["Event"]["UnicodeName"].lower()
+ if event_name == "vendor_db":
+ data = None
+ if "VariableData" in event["Event"]:
+ data = event["Event"]["VariableData"]
+
+ if data is not None:
+ # VariableData for EV_EFI_VARIABLE_AUTHORITY is a list of signatures
+ for entry in data:
+ if "SignatureOwner" in entry and "SignatureData" in entry:
+ out["vendor_db"].append(
+ {"SignatureOwner": entry["SignatureOwner"], "SignatureData": f"0x{entry['SignatureData']}"}
+ )
+
+ return out
+
+
def get_kernel(events: List[Dict[str, Any]], secure_boot: bool) -> Dict[str, List[Dict[str, Any]]]:
"""Extract digest for Shim, Grub, Linux Kernel and initrd."""
out = []
@@ -259,6 +288,7 @@ def create_mb_refstate(args: argparse.Namespace) -> Optional[Dict[str, object]]:
}
],
**get_keys(events),
+ **get_vendor_db(events),
**get_mok(events),
**get_kernel(events, has_secureboot),
}
diff --git a/scripts/create_mb_refstate b/scripts/create_mb_refstate
index 23cafb9..c98e61d 100755
--- a/scripts/create_mb_refstate
+++ b/scripts/create_mb_refstate
@@ -78,6 +78,35 @@ def get_keys(events):
return out
+def get_vendor_db(events):
+ """Get vendor_db signatures from EV_EFI_VARIABLE_AUTHORITY events."""
+ out = {"vendor_db": []}
+
+ for event in events:
+ if "EventType" not in event:
+ continue
+ if event["EventType"] != "EV_EFI_VARIABLE_AUTHORITY":
+ continue
+ if "Event" not in event or "UnicodeName" not in event["Event"]:
+ continue
+
+ event_name = event["Event"]["UnicodeName"].lower()
+ if event_name == "vendor_db":
+ data = None
+ if "VariableData" in event["Event"]:
+ data = event["Event"]["VariableData"]
+
+ if data is not None:
+ # VariableData for EV_EFI_VARIABLE_AUTHORITY is a list of signatures
+ for entry in data:
+ if "SignatureOwner" in entry and "SignatureData" in entry:
+ out["vendor_db"].append(
+ {"SignatureOwner": entry["SignatureOwner"], "SignatureData": f"0x{entry['SignatureData']}"}
+ )
+
+ return out
+
+
def get_kernel(events, secure_boot):
"""
Extract digest for Shim, Grub, Linux Kernel and initrd.
@@ -197,6 +226,7 @@ def main():
}
],
**get_keys(events),
+ **get_vendor_db(events),
**get_mok(events),
**get_kernel(events, has_secureboot),
}
diff --git a/test/test_create_mb_policy.py b/test/test_create_mb_policy.py
index b00d8e7..cd32bda 100644
--- a/test/test_create_mb_policy.py
+++ b/test/test_create_mb_policy.py
@@ -364,6 +364,148 @@ class CreateMeasuredBootPolicy_Test(unittest.TestCase):
for c in test_cases:
self.assertDictEqual(create_mb_policy.get_mok(c["events"]), c["expected"])
+ def test_get_vendor_db(self):
+ test_cases = [
+ {"events": [], "expected": {"vendor_db": []}},
+ # No EV_EFI_VARIABLE_AUTHORITY events.
+ {
+ "events": [
+ {
+ "EventType": "EV_EFI_VARIABLE_DRIVER_CONFIG",
+ "Event": {"UnicodeName": "vendor_db", "VariableData": []},
+ }
+ ],
+ "expected": {"vendor_db": []},
+ },
+ # Good vendor_db event with EV_EFI_VARIABLE_AUTHORITY.
+ {
+ "events": [
+ {
+ "EventType": "EV_EFI_VARIABLE_AUTHORITY",
+ "Event": {
+ "UnicodeName": "vendor_db",
+ "VariableData": [
+ {
+ "SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b",
+ "SignatureData": "sig-data-1",
+ }
+ ],
+ },
+ }
+ ],
+ "expected": {
+ "vendor_db": [
+ {"SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b", "SignatureData": "0xsig-data-1"}
+ ]
+ },
+ },
+ # Multiple vendor_db signatures.
+ {
+ "events": [
+ {
+ "EventType": "EV_EFI_VARIABLE_AUTHORITY",
+ "Event": {
+ "UnicodeName": "vendor_db",
+ "VariableData": [
+ {
+ "SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b",
+ "SignatureData": "sig-data-1",
+ },
+ {
+ "SignatureOwner": "77fa9abd-0359-4d32-bd60-28f4e78f784b",
+ "SignatureData": "sig-data-2",
+ },
+ ],
+ },
+ }
+ ],
+ "expected": {
+ "vendor_db": [
+ {"SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b", "SignatureData": "0xsig-data-1"},
+ {"SignatureOwner": "77fa9abd-0359-4d32-bd60-28f4e78f784b", "SignatureData": "0xsig-data-2"},
+ ]
+ },
+ },
+ # Missing EventType.
+ {
+ "events": [
+ {
+ "Event": {
+ "UnicodeName": "vendor_db",
+ "VariableData": [
+ {
+ "SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b",
+ "SignatureData": "sig-data-1",
+ }
+ ],
+ }
+ }
+ ],
+ "expected": {"vendor_db": []},
+ },
+ # Wrong EventType.
+ {
+ "events": [
+ {
+ "EventType": "EV_EFI_VARIABLE_DRIVER_CONFIG",
+ "Event": {
+ "UnicodeName": "vendor_db",
+ "VariableData": [
+ {
+ "SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b",
+ "SignatureData": "sig-data-1",
+ }
+ ],
+ },
+ }
+ ],
+ "expected": {"vendor_db": []},
+ },
+ # Missing Event.
+ {
+ "events": [{"EventType": "EV_EFI_VARIABLE_AUTHORITY"}],
+ "expected": {"vendor_db": []},
+ },
+ # Missing UnicodeName.
+ {
+ "events": [
+ {
+ "EventType": "EV_EFI_VARIABLE_AUTHORITY",
+ "Event": {
+ "VariableData": [
+ {
+ "SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b",
+ "SignatureData": "sig-data-1",
+ }
+ ]
+ },
+ }
+ ],
+ "expected": {"vendor_db": []},
+ },
+ # Wrong UnicodeName.
+ {
+ "events": [
+ {
+ "EventType": "EV_EFI_VARIABLE_AUTHORITY",
+ "Event": {
+ "UnicodeName": "db",
+ "VariableData": [
+ {
+ "SignatureOwner": "0223eddb-9079-4388-af77-2d65b1c35d3b",
+ "SignatureData": "sig-data-1",
+ }
+ ],
+ },
+ }
+ ],
+ "expected": {"vendor_db": []},
+ },
+ ]
+
+ for c in test_cases:
+ self.assertDictEqual(create_mb_policy.get_vendor_db(c["events"]), c["expected"])
+
def test_get_kernel(self):
test_cases = [
{"events": [], "secureboot": False, "expected": {}},
--
2.47.3

View File

@ -1,69 +0,0 @@
From e17d5a6a47c1405a799a06754d3e905856e3035d Mon Sep 17 00:00:00 2001
From: florian <264356+flozilla@users.noreply.github.com>
Date: Tue, 11 Jul 2023 21:31:27 +0200
Subject: [PATCH 10/10] CVE-2023-38200
Extend Registrar SSL socket to be non-blocking
Fixes: CVE-2023-38200
Upstream:
- https://github.com/keylime/keylime/commit/c68d8f0b7
- https://github.com/keylime/keylime/commit/27d515f4b
---
keylime/registrar_common.py | 23 ++++++++++++++++++++++-
1 file changed, 22 insertions(+), 1 deletion(-)
diff --git a/keylime/registrar_common.py b/keylime/registrar_common.py
index d1d20dd..6441e3b 100644
--- a/keylime/registrar_common.py
+++ b/keylime/registrar_common.py
@@ -2,8 +2,10 @@ import base64
import http.server
import ipaddress
import os
+import select
import signal
import socket
+import ssl
import sys
import threading
from http.server import BaseHTTPRequestHandler, HTTPServer
@@ -77,6 +79,25 @@ class BaseHandler(BaseHTTPRequestHandler, SessionManager):
class ProtectedHandler(BaseHandler):
+ def handle(self) -> None:
+ """Need to perform SSL handshake here, as
+ do_handshake_on_connect=False for non-blocking SSL socket"""
+ while True:
+ try:
+ self.request.do_handshake()
+ break
+ except ssl.SSLWantReadError:
+ select.select([self.request], [], [])
+ except ssl.SSLWantWriteError:
+ select.select([], [self.request], [])
+ except ssl.SSLError as e:
+ logger.error("SSL connection error: %s", e)
+ return
+ except Exception as e:
+ logger.error("General communication failure: %s", e)
+ return
+ BaseHTTPRequestHandler.handle(self)
+
def do_HEAD(self) -> None:
"""HEAD not supported"""
web_util.echo_json_response(self, 405, "HEAD not supported")
@@ -494,7 +515,7 @@ def start(host: str, tlsport: int, port: int) -> None:
protected_server = RegistrarServer((host, tlsport), ProtectedHandler)
context = web_util.init_mtls("registrar", logger=logger)
if context is not None:
- protected_server.socket = context.wrap_socket(protected_server.socket, server_side=True)
+ protected_server.socket = context.wrap_socket(protected_server.socket, server_side=True, do_handshake_on_connect=False)
thread_protected_server = threading.Thread(target=protected_server.serve_forever)
# Set up the unprotected registrar server
--
2.41.0

View File

@ -0,0 +1,42 @@
From 1b7191098ca3f6d72c6ad218564ae0938a87efd4 Mon Sep 17 00:00:00 2001
From: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
Date: Mon, 18 Aug 2025 12:22:55 +0000
Subject: [PATCH 10/13] verifier: Gracefully shutdown on signal
Wait for the processes to finish when interrupted by a signal. Do not
call exit(0) in the signal handler.
Assisted-by: Claude 4 Sonnet
Signed-off-by: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
---
keylime/cloud_verifier_tornado.py | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/keylime/cloud_verifier_tornado.py b/keylime/cloud_verifier_tornado.py
index 7553ac8..7065661 100644
--- a/keylime/cloud_verifier_tornado.py
+++ b/keylime/cloud_verifier_tornado.py
@@ -2138,7 +2138,7 @@ def main() -> None:
revocation_notifier.stop_broker()
for p in processes:
p.join()
- sys.exit(0)
+ # Do not call sys.exit(0) here as it interferes with multiprocessing cleanup
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
@@ -2159,3 +2159,11 @@ def main() -> None:
process = Process(target=server_process, args=(task_id, active_agents))
process.start()
processes.append(process)
+
+ # Wait for all worker processes to complete
+ try:
+ for p in processes:
+ p.join()
+ except KeyboardInterrupt:
+ # Signal handler will take care of cleanup
+ pass
--
2.47.3

View File

@ -1,244 +0,0 @@
From b0cf69c9db20eb319ea2e90c22f500e09b704224 Mon Sep 17 00:00:00 2001
From: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
Date: Wed, 23 Aug 2023 16:24:15 +0200
Subject: [PATCH] Implement automatic agent API version bump
Automatically update the agent supported API version in the database if
the agent is updated and its API version is bumped.
Previously, if an agent was added to a verifier while it used an old API
version, and then it is updated with an API version bump, the
attestation would fail as the verifier would try to reach the agent
using the old API version.
Fixes #1457
Signed-off-by: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
---
keylime/cloud_verifier_tornado.py | 185 +++++++++++++++++++++++++++---
1 file changed, 167 insertions(+), 18 deletions(-)
diff --git a/keylime/cloud_verifier_tornado.py b/keylime/cloud_verifier_tornado.py
index 261022ac6..31e6f7159 100644
--- a/keylime/cloud_verifier_tornado.py
+++ b/keylime/cloud_verifier_tornado.py
@@ -32,6 +32,7 @@
)
from keylime.agentstates import AgentAttestState, AgentAttestStates
from keylime.common import retry, states, validators
+from keylime.common.version import str_to_version
from keylime.da import record
from keylime.db.keylime_db import DBEngineManager, SessionManager
from keylime.db.verifier_db import VerfierMain, VerifierAllowlist
@@ -998,6 +999,80 @@ def data_received(self, chunk: Any) -> None:
raise NotImplementedError()
+async def update_agent_api_version(agent: Dict[str, Any], timeout: float = 60.0) -> Union[Dict[str, Any], None]:
+ agent_id = agent["agent_id"]
+
+ logger.info("Agent %s API version bump detected, trying to update stored API version", agent_id)
+ kwargs = {}
+ if agent["ssl_context"]:
+ kwargs["context"] = agent["ssl_context"]
+
+ res = tornado_requests.request(
+ "GET",
+ f"http://{agent['ip']}:{agent['port']}/version",
+ **kwargs,
+ timeout=timeout,
+ )
+ response = await res
+
+ if response.status_code != 200:
+ logger.warning(
+ "Could not get agent %s supported API version, Error: %s",
+ agent["agent_id"],
+ response.status_code,
+ )
+ return None
+
+ try:
+ json_response = json.loads(response.body)
+ new_version = json_response["results"]["supported_version"]
+ old_version = agent["supported_version"]
+
+ # Only update the API version to use if it is supported by the verifier
+ if new_version in keylime_api_version.all_versions():
+ new_version_tuple = str_to_version(new_version)
+ old_version_tuple = str_to_version(old_version)
+
+ assert new_version_tuple, f"Agent {agent_id} version {new_version} is invalid"
+ assert old_version_tuple, f"Agent {agent_id} version {old_version} is invalid"
+
+ # Check that the new version is greater than current version
+ if new_version_tuple <= old_version_tuple:
+ logger.warning(
+ "Agent %s API version %s is lower or equal to previous version %s",
+ agent_id,
+ new_version,
+ old_version,
+ )
+ return None
+
+ logger.info("Agent %s new API version %s is supported", agent_id, new_version)
+ session = get_session()
+ agent["supported_version"] = new_version
+
+ # Remove keys that should not go to the DB
+ agent_db = dict(agent)
+ for key in exclude_db:
+ if key in agent_db:
+ del agent_db[key]
+
+ session.query(VerfierMain).filter_by(agent_id=agent_id).update(agent_db) # pyright: ignore
+ session.commit()
+ else:
+ logger.warning("Agent %s new API version %s is not supported", agent_id, new_version)
+ return None
+
+ except SQLAlchemyError as e:
+ logger.error("SQLAlchemy Error updating API version for agent %s: %s", agent_id, e)
+ return None
+ except Exception as e:
+ logger.exception(e)
+ return None
+
+ logger.info("Agent %s API version updated to %s", agent["agent_id"], agent["supported_version"])
+ return agent
+
+
async def invoke_get_quote(
agent: Dict[str, Any], runtime_policy: str, need_pubkey: bool, timeout: float = 60.0
) -> None:
@@ -1028,15 +1103,43 @@ async def invoke_get_quote(
# this is a connection error, retry get quote
if response.status_code in [408, 500, 599]:
asyncio.ensure_future(process_agent(agent, states.GET_QUOTE_RETRY))
- else:
- # catastrophic error, do not continue
- logger.critical(
- "Unexpected Get Quote response error for cloud agent %s, Error: %s",
- agent["agent_id"],
- response.status_code,
- )
- failure.add_event("no_quote", "Unexpected Get Quote reponse from agent", False)
- asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ if response.status_code == 400:
+ try:
+ json_response = json.loads(response.body)
+ if "API version not supported" in json_response["status"]:
+ update = update_agent_api_version(agent)
+ updated = await update
+
+ if updated:
+ asyncio.ensure_future(process_agent(updated, states.GET_QUOTE_RETRY))
+ else:
+ logger.warning("Could not update stored agent %s API version", agent["agent_id"])
+ failure.add_event(
+ "version_not_supported",
+ {"context": "Agent API version not supported", "data": json_response},
+ False,
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ except Exception as e:
+ logger.exception(e)
+ failure.add_event(
+ "exception", {"context": "Agent caused the verifier to throw an exception", "data": str(e)}, False
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ # catastrophic error, do not continue
+ logger.critical(
+ "Unexpected Get Quote response error for cloud agent %s, Error: %s",
+ agent["agent_id"],
+ response.status_code,
+ )
+ failure.add_event("no_quote", "Unexpected Get Quote reponse from agent", False)
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
else:
try:
json_response = json.loads(response.body)
@@ -1100,15 +1203,43 @@ async def invoke_provide_v(agent: Dict[str, Any], timeout: float = 60.0) -> None
if response.status_code != 200:
if response.status_code in [408, 500, 599]:
asyncio.ensure_future(process_agent(agent, states.PROVIDE_V_RETRY))
- else:
- # catastrophic error, do not continue
- logger.critical(
- "Unexpected Provide V response error for cloud agent %s, Error: %s",
- agent["agent_id"],
- response.status_code,
- )
- failure.add_event("no_v", {"message": "Unexpected provide V response", "data": response.status_code}, False)
- asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ if response.status_code == 400:
+ try:
+ json_response = json.loads(response.body)
+ if "API version not supported" in json_response["status"]:
+ update = update_agent_api_version(agent)
+ updated = await update
+
+ if updated:
+ asyncio.ensure_future(process_agent(updated, states.PROVIDE_V_RETRY))
+ else:
+ logger.warning("Could not update stored agent %s API version", agent["agent_id"])
+ failure.add_event(
+ "version_not_supported",
+ {"context": "Agent API version not supported", "data": json_response},
+ False,
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ except Exception as e:
+ logger.exception(e)
+ failure.add_event(
+ "exception", {"context": "Agent caused the verifier to throw an exception", "data": str(e)}, False
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ # catastrophic error, do not continue
+ logger.critical(
+ "Unexpected Provide V response error for cloud agent %s, Error: %s",
+ agent["agent_id"],
+ response.status_code,
+ )
+ failure.add_event("no_v", {"message": "Unexpected provide V response", "data": response.status_code}, False)
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
else:
asyncio.ensure_future(process_agent(agent, states.GET_QUOTE))
@@ -1134,6 +1265,24 @@ async def invoke_notify_error(agent: Dict[str, Any], tosend: Dict[str, Any], tim
agent["agent_id"],
)
elif response.status_code != 200:
+ if response.status_code == 400:
+ try:
+ json_response = json.loads(response.body)
+ if "API version not supported" in json_response["status"]:
+ update = update_agent_api_version(agent)
+ updated = await update
+
+ if updated:
+ asyncio.ensure_future(invoke_notify_error(updated, tosend))
+ else:
+ logger.warning("Could not update stored agent %s API version", agent["agent_id"])
+
+ return
+
+ except Exception as e:
+ logger.exception(e)
+ return
+
logger.warning(
"Unexpected Notify Revocation response error for cloud agent %s, Error: %s",
agent["agent_id"],

View File

@ -0,0 +1,308 @@
From af9ac50f5acf1a7d4ad285956b60e60c3c4416b7 Mon Sep 17 00:00:00 2001
From: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
Date: Wed, 23 Jul 2025 15:39:49 +0200
Subject: [PATCH 11/13] revocations: Try to send notifications on shutdown
During verifier shutdown, try to send any pending revocation
notification in a best-effort manner. In future, the pending revocation
notifications should be persisted to be processed during next startup.
Assisted-by: Claude 4 Sonnet
Signed-off-by: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
---
keylime/cloud_verifier_tornado.py | 7 +
keylime/revocation_notifier.py | 239 ++++++++++++++++++++++--------
2 files changed, 184 insertions(+), 62 deletions(-)
diff --git a/keylime/cloud_verifier_tornado.py b/keylime/cloud_verifier_tornado.py
index 7065661..89aa703 100644
--- a/keylime/cloud_verifier_tornado.py
+++ b/keylime/cloud_verifier_tornado.py
@@ -2109,6 +2109,10 @@ def main() -> None:
# Stop server to not accept new incoming connections
server.stop()
+ # Gracefully shutdown webhook workers to prevent connection errors
+ if "webhook" in revocation_notifier.get_notifiers():
+ revocation_notifier.shutdown_webhook_workers()
+
# Wait for all connections to be closed and then stop ioloop
async def stop() -> None:
await server.close_all_connections()
@@ -2136,6 +2140,9 @@ def main() -> None:
def sig_handler(*_: Any) -> None:
if run_revocation_notifier:
revocation_notifier.stop_broker()
+ # Gracefully shutdown webhook workers to prevent connection errors
+ if "webhook" in revocation_notifier.get_notifiers():
+ revocation_notifier.shutdown_webhook_workers()
for p in processes:
p.join()
# Do not call sys.exit(0) here as it interferes with multiprocessing cleanup
diff --git a/keylime/revocation_notifier.py b/keylime/revocation_notifier.py
index 5a7cc4b..c154028 100644
--- a/keylime/revocation_notifier.py
+++ b/keylime/revocation_notifier.py
@@ -18,6 +18,174 @@ broker_proc: Optional[Process] = None
_SOCKET_PATH = "/var/run/keylime/keylime.verifier.ipc"
+# Global webhook manager instance (initialized when needed)
+_webhook_manager: Optional["WebhookNotificationManager"] = None
+
+
+class WebhookNotificationManager:
+ """Manages webhook worker threads and graceful shutdown for revocation notifications."""
+
+ def __init__(self) -> None:
+ self._shutdown_event = threading.Event()
+ self._workers: Set[threading.Thread] = set()
+ self._workers_lock = threading.Lock()
+
+ def notify_webhook(self, tosend: Dict[str, Any]) -> None:
+ """Send webhook notification with worker thread management."""
+ url = config.get("verifier", "webhook_url", section="revocations", fallback="")
+ # Check if a url was specified
+ if url == "":
+ return
+
+ # Similarly to notify(), let's convert `tosend' to str to prevent
+ # possible issues with json handling by python-requests.
+ tosend = json.bytes_to_str(tosend)
+
+ def worker_webhook(tosend: Dict[str, Any], url: str) -> None:
+ is_shutdown_mode = False
+ try:
+ interval = config.getfloat("verifier", "retry_interval")
+ exponential_backoff = config.getboolean("verifier", "exponential_backoff")
+
+ max_retries = config.getint("verifier", "max_retries")
+ if max_retries <= 0:
+ logger.info("Invalid value found in 'max_retries' option for verifier, using default value")
+ max_retries = 5
+
+ # During shutdown, use fewer retries but still make best effort
+ if self._shutdown_event.is_set():
+ is_shutdown_mode = True
+ max_retries = min(max_retries, 3) # Reduce retries during shutdown but still try
+ logger.info(
+ "Shutdown mode: attempting to send critical revocation notification with %d retries",
+ max_retries,
+ )
+
+ # Get TLS options from the configuration
+ (cert, key, trusted_ca, key_password), verify_server_cert = web_util.get_tls_options(
+ "verifier", is_client=True, logger=logger
+ )
+
+ # Generate the TLS context using the obtained options
+ tls_context = web_util.generate_tls_context(
+ cert, key, trusted_ca, key_password, is_client=True, logger=logger
+ )
+
+ logger.info("Sending revocation event via webhook to %s ...", url)
+ for i in range(max_retries):
+ next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
+
+ with RequestsClient(
+ url,
+ verify_server_cert,
+ tls_context,
+ ) as client:
+ try:
+ res = client.post("", json=tosend, timeout=5)
+ except requests.exceptions.SSLError as ssl_error:
+ if "TLSV1_ALERT_UNKNOWN_CA" in str(ssl_error):
+ logger.warning(
+ "Keylime does not recognize certificate from peer. Check if verifier 'trusted_server_ca' is configured correctly"
+ )
+
+ raise ssl_error from ssl_error
+ except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
+ # During shutdown, only suppress errors on the final attempt after all retries exhausted
+ if is_shutdown_mode and i == max_retries - 1:
+ logger.warning(
+ "Final attempt to send revocation notification failed during shutdown: %s", e
+ )
+ return
+ # Otherwise, let the retry logic handle it
+ raise e
+
+ if res and res.status_code in [200, 202]:
+ if is_shutdown_mode:
+ logger.info("Successfully sent revocation notification during shutdown")
+ break
+
+ logger.debug(
+ "Unable to publish revocation message %d times via webhook, "
+ "trying again in %d seconds. "
+ "Server returned status code: %s",
+ i + 1,
+ next_retry,
+ res.status_code,
+ )
+
+ # During shutdown, use shorter retry intervals to complete faster
+ if is_shutdown_mode:
+ next_retry = min(next_retry, 2.0) # Cap retry interval during shutdown
+
+ time.sleep(next_retry)
+
+ except Exception as e:
+ # Only suppress errors during final shutdown phase and log appropriately
+ if is_shutdown_mode:
+ logger.warning("Failed to send revocation notification during shutdown: %s", e)
+ else:
+ logger.error("Error in webhook worker: %s", e)
+ finally:
+ # Remove this worker from the active set
+ current_thread = threading.current_thread()
+ with self._workers_lock:
+ self._workers.discard(current_thread)
+
+ w = functools.partial(worker_webhook, tosend, url)
+ t = threading.Thread(target=w, daemon=True)
+
+ # Add this worker to the active set
+ with self._workers_lock:
+ self._workers.add(t)
+
+ t.start()
+
+ def shutdown_workers(self) -> None:
+ """Signal webhook workers to shut down gracefully and wait for them to complete.
+
+ This gives workers time to complete their critical revocation notifications
+ before the service shuts down completely.
+ """
+ logger.info("Shutting down webhook workers gracefully...")
+ self._shutdown_event.set()
+
+ # Give workers generous time to complete critical revocation notifications
+ timeout = 30.0 # Increased timeout for critical security notifications
+ end_time = time.time() + timeout
+
+ with self._workers_lock:
+ workers_to_wait = list(self._workers)
+
+ if workers_to_wait:
+ logger.info("Waiting for %d webhook workers to complete revocation notifications...", len(workers_to_wait))
+
+ for worker in workers_to_wait:
+ remaining_time = max(0, end_time - time.time())
+ if remaining_time > 0:
+ logger.debug(
+ "Waiting for webhook worker %s to complete (timeout: %.1f seconds)", worker.name, remaining_time
+ )
+ worker.join(timeout=remaining_time)
+ if worker.is_alive():
+ logger.warning("Webhook worker %s did not complete within timeout", worker.name)
+ else:
+ logger.warning("Timeout exceeded while waiting for webhook workers")
+ break
+
+ # Clean up completed workers
+ with self._workers_lock:
+ self._workers.clear()
+
+ logger.info("Webhook workers shutdown complete")
+
+
+def _get_webhook_manager() -> WebhookNotificationManager:
+ """Get the global webhook manager instance, creating it if needed."""
+ global _webhook_manager
+ if _webhook_manager is None:
+ _webhook_manager = WebhookNotificationManager()
+ return _webhook_manager
+
# return the revocation notification methods for cloud verifier
def get_notifiers() -> Set[str]:
@@ -83,6 +251,12 @@ def stop_broker() -> None:
broker_proc.kill() # pylint: disable=E1101
+def shutdown_webhook_workers() -> None:
+ """Convenience function to shutdown webhook workers using the global manager."""
+ manager = _get_webhook_manager()
+ manager.shutdown_workers()
+
+
def notify(tosend: Dict[str, Any]) -> None:
assert "zeromq" in get_notifiers()
try:
@@ -127,68 +301,9 @@ def notify(tosend: Dict[str, Any]) -> None:
def notify_webhook(tosend: Dict[str, Any]) -> None:
- url = config.get("verifier", "webhook_url", section="revocations", fallback="")
- # Check if a url was specified
- if url == "":
- return
-
- # Similarly to notify(), let's convert `tosend' to str to prevent
- # possible issues with json handling by python-requests.
- tosend = json.bytes_to_str(tosend)
-
- def worker_webhook(tosend: Dict[str, Any], url: str) -> None:
- interval = config.getfloat("verifier", "retry_interval")
- exponential_backoff = config.getboolean("verifier", "exponential_backoff")
-
- max_retries = config.getint("verifier", "max_retries")
- if max_retries <= 0:
- logger.info("Invalid value found in 'max_retries' option for verifier, using default value")
- max_retries = 5
-
- # Get TLS options from the configuration
- (cert, key, trusted_ca, key_password), verify_server_cert = web_util.get_tls_options(
- "verifier", is_client=True, logger=logger
- )
-
- # Generate the TLS context using the obtained options
- tls_context = web_util.generate_tls_context(cert, key, trusted_ca, key_password, is_client=True, logger=logger)
-
- logger.info("Sending revocation event via webhook to %s ...", url)
- for i in range(max_retries):
- next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
-
- with RequestsClient(
- url,
- verify_server_cert,
- tls_context,
- ) as client:
- try:
- res = client.post("", json=tosend, timeout=5)
- except requests.exceptions.SSLError as ssl_error:
- if "TLSV1_ALERT_UNKNOWN_CA" in str(ssl_error):
- logger.warning(
- "Keylime does not recognize certificate from peer. Check if verifier 'trusted_server_ca' is configured correctly"
- )
-
- raise ssl_error from ssl_error
-
- if res and res.status_code in [200, 202]:
- break
-
- logger.debug(
- "Unable to publish revocation message %d times via webhook, "
- "trying again in %d seconds. "
- "Server returned status code: %s",
- i + 1,
- next_retry,
- res.status_code,
- )
-
- time.sleep(next_retry)
-
- w = functools.partial(worker_webhook, tosend, url)
- t = threading.Thread(target=w, daemon=True)
- t.start()
+ """Send webhook notification using the global webhook manager."""
+ manager = _get_webhook_manager()
+ manager.notify_webhook(tosend)
cert_key = None
--
2.47.3

View File

@ -1,59 +0,0 @@
--- a/scripts/create_runtime_policy.sh 2023-10-09 17:04:26.121194607 +0200
+++ b/scripts/create_runtime_policy.sh 2023-10-09 17:06:02.089855614 +0200
@@ -42,7 +42,7 @@
exit $NOARGS;
fi
-ALGO=sha1sum
+ALGO=sha256sum
ALGO_LIST=("sha1sum" "sha256sum" "sha512sum")
@@ -78,7 +78,7 @@
# Where to look for initramfs image
-INITRAMFS_LOC="/boot/"
+INITRAMFS_LOC="/boot"
if [ -d "/ostree" ]; then
# If we are on an ostree system change where we look for initramfs image
loc=$(grep -E "/ostree/[^/]([^/]*)" -o /proc/cmdline | head -n 1 | cut -d / -f 3)
@@ -121,7 +121,7 @@
cp -r /tmp/ima/$i-extracted-unmk/. /tmp/ima/$i-extracted
fi
elif [[ -x "/usr/lib/dracut/skipcpio" ]] ; then
- /usr/lib/dracut/skipcpio $i | gunzip -c | cpio -i -d 2> /dev/null
+ /usr/lib/dracut/skipcpio $i | gunzip -c 2> /dev/null | cpio -i -d 2> /dev/null
else
echo "ERROR: No tools for initramfs image processing found!"
break
@@ -130,9 +130,26 @@
find -type f -exec $ALGO "./{}" \; | sed "s| \./\./| /|" >> $OUTPUT
done
-# Convert to runtime policy
-echo "Converting created allowlist to Keylime runtime policy"
-python3 $WORKING_DIR/../keylime/cmd/convert_runtime_policy.py -a $OUTPUT -o $OUTPUT
+# when ROOTFS_LOC = '/', the path starts on allowlist ends up with double '//'
+#
+# Example:
+#
+# b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c //bar
+#
+# Replace the unwanted '//' with a single '/'
+sed -i 's| /\+| /|g' $ALLOWLIST_DIR/${OUTPUT}
+
+# When the file name contains newlines or backslashes, the output of sha256sum
+# adds a backslash at the beginning of the line.
+#
+# Example:
+#
+# $ echo foo > ba\\r
+# $ sha256sum ba\\r
+# \b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c ba\\r
+#
+# Remove the unwanted backslash prefix
+sed -i 's/^\\//g' $ALLOWLIST_DIR/${OUTPUT}
# Clean up
rm -rf /tmp/ima

View File

@ -0,0 +1,45 @@
From 5fb4484b07a7ba3fcdf451bf816b5f07a40d6d97 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Wed, 4 Jun 2025 19:52:37 +0100
Subject: [PATCH 12/13] requests_client: close the session at the end of the
resource manager
We had an issue in the past in which the webhook worker would not
properly close the opened session. This was fixed in #1456 (Close
session in worker_webhook function).
At some later point, in #1566 (revocation_notifier: Take into account CA
certificates added via configuration), some refactoring around the
webhook_worker() in revocation_notifier happened and it started using
the RequestsClient resource manager.
However, the RequestsClient does not close the session at its end, which
in turns makes that the old issue of not closing properly the session
in the webhook_worker() returned.
We now issue a session.close() at the end of the RequestsClient.
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
keylime/requests_client.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/keylime/requests_client.py b/keylime/requests_client.py
index 16615f7..b7da484 100644
--- a/keylime/requests_client.py
+++ b/keylime/requests_client.py
@@ -40,7 +40,10 @@ class RequestsClient:
return self
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
- pass
+ try:
+ self.session.close()
+ except Exception:
+ pass
def request(self, method: str, url: str, **kwargs: Any) -> requests.Response:
return self.session.request(method, self.base_url + url, **kwargs)
--
2.47.3

View File

@ -1,44 +0,0 @@
diff --git a/keylime/cloud_verifier_common.py b/keylime/cloud_verifier_common.py
index a7399d2..c0f416d 100644
--- a/keylime/cloud_verifier_common.py
+++ b/keylime/cloud_verifier_common.py
@@ -8,7 +8,7 @@ from keylime.agentstates import AgentAttestState, AgentAttestStates, TPMClockInf
from keylime.common import algorithms
from keylime.db.verifier_db import VerfierMain
from keylime.failure import Component, Event, Failure
-from keylime.ima import file_signatures
+from keylime.ima import file_signatures, ima
from keylime.ima.types import RuntimePolicyType
from keylime.tpm import tpm_util
from keylime.tpm.tpm_main import Tpm
@@ -271,7 +271,7 @@ def process_get_status(agent: VerfierMain) -> Dict[str, Any]:
logger.debug('The contents of the agent %s attribute "mb_refstate" are %s', agent.agent_id, agent.mb_refstate)
has_runtime_policy = 0
- if agent.ima_policy.generator and agent.ima_policy.generator > 1:
+ if agent.ima_policy.generator and agent.ima_policy.generator > ima.RUNTIME_POLICY_GENERATOR.EmptyAllowList:
has_runtime_policy = 1
response = {
diff --git a/keylime/cmd/create_policy.py b/keylime/cmd/create_policy.py
index 0841d64..086b92a 100755
--- a/keylime/cmd/create_policy.py
+++ b/keylime/cmd/create_policy.py
@@ -6,6 +6,7 @@ import argparse
import binascii
import collections
import copy
+import datetime
import gzip
import json
import multiprocessing
@@ -580,6 +581,9 @@ def main() -> None:
policy["excludes"] = sorted(list(set(policy["excludes"])))
policy["ima"]["ignored_keyrings"] = sorted(list(set(policy["ima"]["ignored_keyrings"])))
+ policy["meta"]["generator"] = ima.RUNTIME_POLICY_GENERATOR.LegacyAllowList
+ policy["meta"]["timestamp"] = str(datetime.datetime.now())
+
try:
ima.validate_runtime_policy(policy)
except ima.ImaValidationError as ex:

View File

@ -1,80 +0,0 @@
From add9847988e963fd124863736592fc16cc8c716b Mon Sep 17 00:00:00 2001
From: Stefan Berger <stefanb@linux.ibm.com>
Date: Tue, 11 Jul 2023 18:03:28 -0400
Subject: [PATCH 14/14] tpm_util: Replace a logger.error with an Exception in
case of invalid signature
This fixes a possibly severe issue in 7.2.5 & 7.3.0.
Signed-off-by: Stefan Berger <stefanb@linux.ibm.com>
---
keylime/tpm/tpm_util.py | 6 +-----
keylime/tpm/tpm_util_test.py | 21 +++++++++++++++++++++
2 files changed, 22 insertions(+), 5 deletions(-)
diff --git a/keylime/tpm/tpm_util.py b/keylime/tpm/tpm_util.py
index ce2ce0f..58a1a04 100644
--- a/keylime/tpm/tpm_util.py
+++ b/keylime/tpm/tpm_util.py
@@ -3,7 +3,6 @@ import string
import struct
from typing import Any, Dict, List, Optional, Tuple, Union
-from cryptography.exceptions import InvalidSignature
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import hashes, hmac, serialization
from cryptography.hazmat.primitives.asymmetric import ec, padding
@@ -155,10 +154,7 @@ def checkquote(
digest.update(quoteblob)
quote_digest = digest.finalize()
- try:
- verify(pubkey, signature, quote_digest, hashfunc)
- except InvalidSignature:
- logger.error("Invalid quote signature!")
+ verify(pubkey, signature, quote_digest, hashfunc)
# Check that reported nonce is expected one
retDict = tpm2_objects.unmarshal_tpms_attest(quoteblob)
diff --git a/keylime/tpm/tpm_util_test.py b/keylime/tpm/tpm_util_test.py
index aaf16cd..2c73997 100644
--- a/keylime/tpm/tpm_util_test.py
+++ b/keylime/tpm/tpm_util_test.py
@@ -2,6 +2,7 @@ import base64
import unittest
from unittest import mock
+from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.primitives.asymmetric.ec import (
SECP256R1,
EllipticCurve,
@@ -60,6 +61,26 @@ class TestTpmUtil(unittest.TestCase):
except Exception as e:
self.fail(f"checkquote failed with {e}")
+ # test bad input
+ bad_quoteblob = bytearray(quoteblob)
+ bad_quoteblob[5] ^= 0x1
+ with self.assertRaises(InvalidSignature):
+ checkquote(aikblob, nonce, sigblob, bad_quoteblob, pcrblob, "sha256")
+
+ l = list(nonce)
+ l[0] = "a"
+ bad_nonce = "".join(l)
+ with self.assertRaises(Exception):
+ checkquote(aikblob, bad_nonce, sigblob, quoteblob, pcrblob, "sha256")
+
+ bad_pcrblob = bytearray(pcrblob)
+ bad_pcrblob[5] ^= 0x1
+ with self.assertRaises(Exception):
+ checkquote(aikblob, nonce, sigblob, quoteblob, bad_pcrblob, "sha256")
+
+ with self.assertRaises(ValueError):
+ checkquote(aikblob, nonce, sigblob, quoteblob, pcrblob, "sha1")
+
@staticmethod
def not_random(numbytes: int) -> bytes:
return b"\x12" * numbytes
--
2.41.0

File diff suppressed because it is too large Load Diff

View File

@ -1,167 +0,0 @@
From 4bd644b74719fdbb6c521d3d5eb2430d8dc18b36 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Wed, 5 Feb 2025 16:16:25 +0000
Subject: [PATCH 16/16] Use TLS on revocation notification webhook
---
keylime/requests_client.py | 5 ++
keylime/revocation_notifier.py | 91 +++++++++++++++++++++++-----------
2 files changed, 68 insertions(+), 28 deletions(-)
diff --git a/keylime/requests_client.py b/keylime/requests_client.py
index 85a175c..e993fbc 100644
--- a/keylime/requests_client.py
+++ b/keylime/requests_client.py
@@ -1,3 +1,4 @@
+import re
import ssl
from typing import Any, Dict, Optional
@@ -15,6 +16,10 @@ class RequestsClient:
ignore_hostname: bool = True,
**kwargs: Any,
) -> None:
+ # Remove eventual "http?://" from the base url
+ if base_url.startswith("http"):
+ base_url = re.sub(r"https?://", "", base_url)
+
if tls_enabled:
self.base_url = f"https://{base_url}"
else:
diff --git a/keylime/revocation_notifier.py b/keylime/revocation_notifier.py
index 5cc8b1a..434bf64 100644
--- a/keylime/revocation_notifier.py
+++ b/keylime/revocation_notifier.py
@@ -9,8 +9,9 @@ from typing import Any, Callable, Dict, Optional, Set
import requests
-from keylime import config, crypto, json, keylime_logging
+from keylime import config, crypto, json, keylime_logging, web_util
from keylime.common import retry
+from keylime.requests_client import RequestsClient
logger = keylime_logging.init_logging("revocation_notifier")
broker_proc: Optional[Process] = None
@@ -109,7 +110,10 @@ def notify(tosend: Dict[str, Any]) -> None:
exponential_backoff = config.getboolean("verifier", "exponential_backoff")
next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
logger.debug(
- "Unable to publish revocation message %d times, trying again in %f seconds: %s", i, next_retry, e
+ "Unable to publish revocation message %d times, trying again in %f seconds: %s",
+ i,
+ next_retry,
+ e,
)
time.sleep(next_retry)
mysock.close()
@@ -132,30 +136,50 @@ def notify_webhook(tosend: Dict[str, Any]) -> None:
def worker_webhook(tosend: Dict[str, Any], url: str) -> None:
interval = config.getfloat("verifier", "retry_interval")
exponential_backoff = config.getboolean("verifier", "exponential_backoff")
- with requests.Session() as session:
- logger.info("Sending revocation event via webhook...")
- for i in range(config.getint("verifier", "max_retries")):
- next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
+
+ max_retries = config.getint("verifier", "max_retries")
+ if max_retries <= 0:
+ logger.info("Invalid value found in 'max_retries' option for verifier, using default value")
+ max_retries = 5
+
+ # Get TLS options from the configuration
+ (cert, key, trusted_ca, key_password), verify_server_cert = web_util.get_tls_options(
+ "verifier", is_client=True, logger=logger
+ )
+
+ # Generate the TLS context using the obtained options
+ tls_context = web_util.generate_tls_context(cert, key, trusted_ca, key_password, is_client=True, logger=logger)
+
+ logger.info("Sending revocation event via webhook to %s ...", url)
+ for i in range(max_retries):
+ next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
+
+ with RequestsClient(
+ url,
+ verify_server_cert,
+ tls_context,
+ ) as client:
try:
- response = session.post(url, json=tosend, timeout=5)
- if response.status_code in [200, 202]:
- break
-
- logger.debug(
- "Unable to publish revocation message %d times via webhook, "
- "trying again in %d seconds. "
- "Server returned status code: %s",
- i,
- next_retry,
- response.status_code,
- )
- except requests.exceptions.RequestException as e:
- logger.debug(
- "Unable to publish revocation message %d times via webhook, trying again in %d seconds: %s",
- i,
- next_retry,
- e,
- )
+ res = client.post("", json=tosend, timeout=5)
+ except requests.exceptions.SSLError as ssl_error:
+ if "TLSV1_ALERT_UNKNOWN_CA" in str(ssl_error):
+ logger.warning(
+ "Keylime does not recognize certificate from peer. Check if verifier 'trusted_server_ca' is configured correctly"
+ )
+
+ raise ssl_error from ssl_error
+
+ if res and res.status_code in [200, 202]:
+ break
+
+ logger.debug(
+ "Unable to publish revocation message %d times via webhook, "
+ "trying again in %d seconds. "
+ "Server returned status code: %s",
+ i + 1,
+ next_retry,
+ res.status_code,
+ )
time.sleep(next_retry)
@@ -167,7 +191,11 @@ def notify_webhook(tosend: Dict[str, Any]) -> None:
cert_key = None
-def process_revocation(revocation: Dict[str, Any], callback: Callable[[Dict[str, Any]], None], cert_path: str) -> None:
+def process_revocation(
+ revocation: Dict[str, Any],
+ callback: Callable[[Dict[str, Any]], None],
+ cert_path: str,
+) -> None:
global cert_key
if cert_key is None:
@@ -179,10 +207,17 @@ def process_revocation(revocation: Dict[str, Any], callback: Callable[[Dict[str,
cert_key = crypto.x509_import_pubkey(certpem)
if cert_key is None:
- logger.warning("Unable to check signature of revocation message: %s not available", cert_path)
+ logger.warning(
+ "Unable to check signature of revocation message: %s not available",
+ cert_path,
+ )
elif "signature" not in revocation or revocation["signature"] == "none":
logger.warning("No signature on revocation message from server")
- elif not crypto.rsa_verify(cert_key, revocation["msg"].encode("utf-8"), revocation["signature"].encode("utf-8")):
+ elif not crypto.rsa_verify(
+ cert_key,
+ revocation["msg"].encode("utf-8"),
+ revocation["signature"].encode("utf-8"),
+ ):
logger.error("Invalid revocation message siganture %s", revocation)
else:
message = json.loads(revocation["msg"])
--
2.47.1

39
SOURCES/keylime.tmpfiles Normal file
View File

@ -0,0 +1,39 @@
d /run/keylime 0700 keylime keylime -
d /var/lib/keylime 0700 keylime keylime -
d /etc/keylime 0500 keylime keylime -
d /etc/keylime/logging.conf.d 0500 keylime keylime -
d /etc/keylime/verifier.conf.d 0500 keylime keylime -
d /etc/keylime/registrar.conf.d 0500 keylime keylime -
d /etc/keylime/tenant.conf.d 0500 keylime keylime -
d /etc/keylime/agent.conf.d 0500 keylime keylime -
# TPM certificate store.
# Copy the cert store from /usr/share/keylime/tpm_cert_store
# to /var/lib/keylime/tpm_cert_store.
# Files inside /var/lib/keylime/tpm_cert_store/ have
# 0400 permission and are owned by keylime/keylime,
# while /var/lib/keylime/tpm_cert_store/ itself has
# permission 0500, also owned by keylime/keylime.
C /var/lib/keylime/tpm_cert_store 0500 keylime keylime - /usr/share/keylime/tpm_cert_store
Z /var/lib/keylime/tpm_cert_store 0400 keylime keylime -
z /var/lib/keylime/tpm_cert_store 0500 keylime keylime -
# Finally, /var/lib/keylime itself has 0700 permission,
# and is owned by keylime/keylime.
z /var/lib/keylime 0700 keylime keylime -
# Keylime configuration in /etc/keylime has permission 0400
# owned by keylime/keylime, while snippet directories and
# the actual /etc/keylime directory have permission 0500,
# also owned by keylime/keylime.
Z /etc/keylime 0400 keylime keylime -
# Now fix the directories:
z /etc/keylime/ca.conf.d 0500 keylime keylime -
z /etc/keylime/logging.conf.d 0500 keylime keylime -
z /etc/keylime/verifier.conf.d 0500 keylime keylime -
z /etc/keylime/registrar.conf.d 0500 keylime keylime -
z /etc/keylime/tenant.conf.d 0500 keylime keylime -
z /etc/keylime/agent.conf.d 0500 keylime keylime -
# And finally, /etc/keylime itself.
z /etc/keylime 0500 keylime keylime -

View File

@ -1,5 +1,5 @@
%global srcname keylime
%global policy_version 1.2.0
%global policy_version 42.1.2
%global with_selinux 1
%global selinuxtype targeted
@ -8,43 +8,59 @@
%global debug_package %{nil}
Name: keylime
Version: 7.3.0
Release: 15%{?dist}
Version: 7.12.1
Release: 11%{?dist}
Summary: Open source TPM software for Bootstrapping and Maintaining Trust
URL: https://github.com/keylime/keylime
Source0: https://github.com/keylime/keylime/archive/refs/tags/v%{version}.tar.gz
Source1: %{srcname}.sysusers
Source2: https://github.com/RedHat-SP-Security/%{name}-selinux/archive/v%{policy_version}/keylime-selinux-%{policy_version}.tar.gz
Source1: https://github.com/RedHat-SP-Security/%{name}-selinux/archive/v%{policy_version}/keylime-selinux-%{policy_version}.tar.gz
Source2: %{srcname}.sysusers
Source3: %{srcname}.tmpfiles
Patch: 0001-Remove-usage-of-Required-NotRequired-typing_ext.patch
Patch: 0002-Allow-keylime_server_t-tcp-connect-to-several-domain.patch
Patch: 0003-Use-version-2.0-as-the-minimum-for-the-configuration.patch
Patch: 0004-Duplicate-str_to_version-for-the-upgrade-tool.patch
Patch: 0005-elchecking-example-add-ignores-for-EV_PLATFORM_CONFI.patch
Patch: 0006-Revert-mapping-changes.patch
Patch: 0007-Handle-session-close-using-a-session-manager.patch
Patch: 0008-verifier-should-read-parameters-from-verifier.conf-o.patch
Patch: 0009-CVE-2023-38201.patch
Patch: 0010-CVE-2023-38200.patch
Patch: 0011-Automatically-update-agent-API-version.patch
Patch: 0012-Restore-create-allowlist.patch
Patch: 0013-Set-generator-and-timestamp-in-create-policy.patch
Patch: 0014-tpm_util-Replace-a-logger.error-with-an-Exception-in.patch
Patch: 0015-Backport-keylime-policy-tool.patch
Patch: 0016-Use-TLS-on-revocation-notification-webhook.patch
Patch: 0001-Make-keylime-compatible-with-python-3.9.patch
Patch: 0002-tests-fix-rpm-repo-tests-from-create-runtime-policy.patch
Patch: 0003-tests-skip-measured-boot-related-tests-for-s390x-and.patch
Patch: 0004-templates-duplicate-str_to_version-in-the-adjust-scr.patch
# RHEL-9 ships a slightly modified version of create_allowlist.sh and
# also a "default" server_key_password for the registrar and verifier.
# DO NOT REMOVE THE FOLLOWING TWO PATCHES IN FOLLOWING RHEL-9.x REBASES.
Patch: 0005-Restore-RHEL-9-version-of-create_allowlist.sh.patch
Patch: 0006-Revert-default-server_key_password-for-verifier-regi.patch
# Backported from https://github.com/keylime/keylime/pull/1782
Patch: 0007-fix_db_connection_leaks.patch
# Backported from https://github.com/keylime/keylime/pull/1791
Patch: 0008-mb-support-EV_EFI_HANDOFF_TABLES-events-on-PCR1.patch
Patch: 0009-mb-support-vendor_db-as-logged-by-newer-shim-version.patch
# Backported from https://github.com/keylime/keylime/pull/1784
# and https://github.com/keylime/keylime/pull/1785.
Patch: 0010-verifier-Gracefully-shutdown-on-signal.patch
Patch: 0011-revocations-Try-to-send-notifications-on-shutdown.patch
Patch: 0012-requests_client-close-the-session-at-the-end-of-the-.patch
License: ASL 2.0 and MIT
BuildRequires: git-core
BuildRequires: swig
BuildRequires: openssl-devel
BuildRequires: python3-devel
BuildRequires: python3-dbus
BuildRequires: python3-jinja2
BuildRequires: python3-cryptography
BuildRequires: python3-pyasn1
BuildRequires: python3-pyasn1-modules
BuildRequires: python3-tornado
BuildRequires: python3-sqlalchemy
BuildRequires: python3-lark-parser
BuildRequires: python3-psutil
BuildRequires: python3-pyyaml
BuildRequires: python3-jsonschema
BuildRequires: python3-setuptools
BuildRequires: systemd-rpm-macros
BuildRequires: tpm2-abrmd-selinux
BuildRequires: rpm-sign
BuildRequires: createrepo_c
BuildRequires: tpm2-tools
Requires: python3-%{srcname} = %{version}-%{release}
Requires: %{srcname}-base = %{version}-%{release}
@ -69,8 +85,9 @@ License: MIT
Requires(pre): python3-jinja2
Requires(pre): shadow-utils
Requires(pre): util-linux
Requires(pre): tpm2-tss
Requires: procps-ng
Requires: tpm2-tss
Requires: openssl
%if 0%{?with_selinux}
# This ensures that the *-selinux package and all its dependencies are not pulled
@ -79,6 +96,7 @@ Recommends: (%{srcname}-selinux if selinux-policy-%{selinuxtype})
%endif
%ifarch %efi
BuildRequires: efivar-libs
Requires: efivar-libs
%endif
@ -161,7 +179,7 @@ Requires: python3-%{srcname} = %{version}-%{release}
The Keylime Tenant can be used to provision a Keylime Agent.
%prep
%autosetup -S git -n %{srcname}-%{version} -a2
%autosetup -S git -n %{srcname}-%{version} -a1
%if 0%{?with_selinux}
# SELinux policy (originally from selinux-policy-contrib)
@ -179,7 +197,6 @@ bzip2 -9 %{srcname}.pp
%py3_install
mkdir -p %{buildroot}/%{_sharedstatedir}/%{srcname}
mkdir -p --mode=0700 %{buildroot}/%{_rundir}/%{srcname}
mkdir -p --mode=0700 %{buildroot}/%{_localstatedir}/log/%{srcname}
mkdir -p --mode=0700 %{buildroot}/%{_sysconfdir}/%{srcname}/
for comp in "verifier" "tenant" "registrar" "ca" "logging"; do
@ -219,22 +236,55 @@ install -Dpm 644 ./services/%{srcname}_verifier.service \
install -Dpm 644 ./services/%{srcname}_registrar.service \
%{buildroot}%{_unitdir}/%{srcname}_registrar.service
cp -r ./tpm_cert_store %{buildroot}%{_sharedstatedir}/%{srcname}/
chmod 400 %{buildroot}%{_sharedstatedir}/%{srcname}/tpm_cert_store/*.pem
# TPM cert store is deployed to both /usr/share/keylime/tpm_cert_store
# and then /var/lib/keylime/tpm_cert_store.
for cert_store_dir in %{_datadir} %{_sharedstatedir}; do
mkdir -p %{buildroot}/"${cert_store_dir}"/%{srcname}
cp -r ./tpm_cert_store %{buildroot}/"${cert_store_dir}"/%{srcname}/
done
install -p -d %{buildroot}/%{_tmpfilesdir}
cat > %{buildroot}/%{_tmpfilesdir}/%{srcname}.conf << EOF
d %{_rundir}/%{srcname} 0700 %{srcname} %{srcname} -
EOF
# Install the sysusers + tmpfiles.d configuration.
install -p -D -m 0644 %{SOURCE2} %{buildroot}/%{_sysusersdir}/%{srcname}.conf
install -p -D -m 0644 %{SOURCE3} %{buildroot}/%{_tmpfilesdir}/%{name}.conf
install -p -D -m 0644 %{SOURCE1} %{buildroot}%{_sysusersdir}/%{srcname}.conf
%check
# Create the default configuration files to be used by the tests.
# Also set the associated environment variables so that the tests
# will actually use them.
CONF_TEMP_DIR="$(mktemp -d)"
%{python3} -m keylime.cmd.convert_config --out "${CONF_TEMP_DIR}" --templates templates/
export KEYLIME_VERIFIER_CONFIG="${CONF_TEMP_DIR}/verifier.conf"
export KEYLIME_TENANT_CONFIG="${CONF_TEMP_DIR}/tenant.conf"
export KEYLIME_REGISTRAR_CONFIG="${CONF_TEMP_DIR}/registrar.conf"
export KEYLIME_CA_CONFIG="${CONF_TEMP_DIR}/ca.conf"
export KEYLIME_LOGGING_CONFIG="${CONF_TEMP_DIR}/logging.conf"
# Run the tests.
%{python3} -m unittest
# Cleanup.
[ "${CONF_TEMP_DIR}" ] && rm -rf "${CONF_TEMP_DIR}"
for e in KEYLIME_VERIFIER_CONFIG \
KEYLIME_TENANT_CONFIG \
KEYLIME_REGISTRAR_CONFIG \
KEYLIME_CA_CONFIG \
KEYLIME_LOGGING_CONFIG; do
unset "${e}"
done
exit 0
%pre base
%sysusers_create_compat %{SOURCE1}
%sysusers_create_compat %{SOURCE2}
exit 0
%post base
/usr/bin/keylime_upgrade_config --component ca --component logging >/dev/null
for c in ca logging; do
[ -e /etc/keylime/"${c}.conf" ] || continue
/usr/bin/keylime_upgrade_config --component "${c}" \
--input /etc/keylime/"${c}.conf" \
>/dev/null
done
exit 0
%posttrans base
@ -254,23 +304,29 @@ fi
[ -d %{_sharedstatedir}/%{srcname}/tpm_cert_store ] && \
chmod 400 %{_sharedstatedir}/%{srcname}/tpm_cert_store/*.pem && \
chmod 500 %{_sharedstatedir}/%{srcname}/tpm_cert_store/
[ -d %{_localstatedir}/log/%{srcname} ] && \
chown -R %{srcname} %{_localstatedir}/log/%{srcname}/
exit 0
%post verifier
/usr/bin/keylime_upgrade_config --component verifier >/dev/null
[ -e /etc/keylime/verifier.conf ] && \
/usr/bin/keylime_upgrade_config --component verifier \
--input /etc/keylime/verifier.conf \
>/dev/null
%systemd_post %{srcname}_verifier.service
exit 0
%post registrar
/usr/bin/keylime_upgrade_config --component registrar >/dev/null
[ -e /etc/keylime/registrar.conf ] && \
/usr/bin/keylime_upgrade_config --component registrar \
--input /etc/keylime/registrar.conf /
>/dev/null
%systemd_post %{srcname}_registrar.service
exit 0
%post tenant
/usr/bin/keylime_upgrade_config --component tenant >/dev/null
[ -e /etc/keylime/tenant.conf ] && \
/usr/bin/keylime_upgrade_config --component tenant \
--input /etc/keylime/tenant.conf \
>/dev/null
exit 0
%preun verifier
@ -356,12 +412,14 @@ fi
%files base
%license LICENSE
%doc README.md
%attr(500,%{srcname},%{srcname}) %dir %{_sysconfdir}/%{srcname}
%attr(500,%{srcname},%{srcname}) %dir %{_sysconfdir}/%{srcname}/{ca,logging}.conf.d
%config(noreplace) %verify(not md5 size mode mtime) %attr(400,%{srcname},%{srcname}) %{_sysconfdir}/%{srcname}/ca.conf
%config(noreplace) %verify(not md5 size mode mtime) %attr(400,%{srcname},%{srcname}) %{_sysconfdir}/%{srcname}/logging.conf
%attr(700,%{srcname},%{srcname}) %dir %{_rundir}/%{srcname}
%attr(700,%{srcname},%{srcname}) %dir %{_localstatedir}/log/%{srcname}
%attr(700,%{srcname},%{srcname}) %dir %{_sharedstatedir}/%{srcname}
%attr(500,%{srcname},%{srcname}) %dir %{_datadir}/%{srcname}/tpm_cert_store
%attr(400,%{srcname},%{srcname}) %{_datadir}/%{srcname}/tpm_cert_store/*.pem
%attr(500,%{srcname},%{srcname}) %dir %{_sharedstatedir}/%{srcname}/tpm_cert_store
%attr(400,%{srcname},%{srcname}) %{_sharedstatedir}/%{srcname}/tpm_cert_store/*.pem
%{_tmpfilesdir}/%{srcname}.conf
@ -375,6 +433,50 @@ fi
%license LICENSE
%changelog
* Mon Aug 18 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-11
- Fix for revocation notifier not closing TLS session correctly
Resolves: RHEL-109656
* Wed Aug 13 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-10
- Support vendor_db: follow-up fix
Related: RHEL-80455
* Tue Aug 12 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-9
- Support vendor_db as logged by newer shim versions
Resolves: RHEL-80455
* Fri Aug 08 2025 Anderson Toshiyuki Sasaki <ansasaki@redhat.com> - 7.12.1-8
- Fix DB connection leaks
Resolves: RHEL-108263
* Tue Jul 22 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-7
- Fix tmpfiles.d configuration related to the cert store
Resolves: RHEL-104572
* Thu Jul 10 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-6
- Populate cert_store_dir with tpmfiles.d
Resolves: RHEL-76926
* Thu Jul 10 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-5
- Use tmpfiles.d for permissions in /var/lib/keylime and /etc/keylime
Resolves: RHEL-77144
* Tue Jul 08 2025 Patrik Koncity <pkoncity@redhat.com> - 7.12.1-4
- Add new keylime-selinux release - removing keylime_var_log_t label
Resolves: RHEL-388
* Fri Jun 20 2025 Anderson Toshiyuki Sasaki <ansasaki@redhat.com> - 7.12.1-3
- Avoid changing ownership of /var/log/keylime
Resolves: RHEL-388
* Tue May 27 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-2
- Revert changes to default server_key_password for verifier/registrar
Resolves: RHEL-93678
* Thu May 22 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-1
- Update to 7.12.1
Resolves: RHEL-78418
* Wed Feb 05 2025 Sergio Correia <scorreia@redhat.com> - 7.3.0-15
- Use TLS on revocation notification webhook
- Include system installed CA certificates when verifying webhook