Update to 7.12.1

Resolves: RHEL-78418

Signed-off-by: Sergio Correia <scorreia@redhat.com>
This commit is contained in:
Sergio Correia 2025-05-22 10:25:20 +00:00
parent 5d41ae0699
commit 3cdf1fe060
No known key found for this signature in database
GPG Key ID: D0D219ED1F7E762C
27 changed files with 1290 additions and 7945 deletions

3
.gitignore vendored
View File

@ -6,3 +6,6 @@
/keylime-selinux-1.0.0.tar.gz
/v7.3.0.tar.gz
/keylime-selinux-1.2.0.tar.gz
/v7.12.0.tar.gz
/keylime-selinux-38.1.0.tar.gz
/v7.12.1.tar.gz

View File

@ -0,0 +1,628 @@
From f7c32aec9c44a176124d982d942391ed3d50e846 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Tue, 3 Jun 2025 21:23:09 +0100
Subject: [PATCH 1/6] Make keylime compatible with python 3.9
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
keylime/ima/types.py | 33 ++++----
keylime/models/base/basic_model.py | 4 +-
keylime/models/base/basic_model_meta.py | 4 +-
keylime/models/base/field.py | 4 +-
keylime/models/base/persistable_model.py | 4 +-
keylime/models/base/type.py | 4 +-
keylime/models/base/types/base64_bytes.py | 4 +-
keylime/models/base/types/certificate.py | 92 +++++++++++----------
keylime/models/base/types/dictionary.py | 4 +-
keylime/models/base/types/one_of.py | 6 +-
keylime/models/registrar/registrar_agent.py | 31 +++----
keylime/policy/create_runtime_policy.py | 2 +-
keylime/registrar_client.py | 8 +-
keylime/web/base/action_handler.py | 7 +-
keylime/web/base/controller.py | 78 ++++++++---------
tox.ini | 10 +++
16 files changed, 154 insertions(+), 141 deletions(-)
diff --git a/keylime/ima/types.py b/keylime/ima/types.py
index 99f0aa7..a0fffdf 100644
--- a/keylime/ima/types.py
+++ b/keylime/ima/types.py
@@ -6,11 +6,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import Literal, TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired, Required
-else:
- from typing_extensions import NotRequired, Required
-
### Types for tpm_dm.py
RuleAttributeType = Optional[Union[int, str, bool]]
@@ -51,7 +46,7 @@ class Rule(TypedDict):
class Policies(TypedDict):
- version: Required[int]
+ version: int
match_on: MatchKeyType
rules: Dict[str, Rule]
@@ -60,27 +55,27 @@ class Policies(TypedDict):
class RPMetaType(TypedDict):
- version: Required[int]
- generator: NotRequired[int]
- timestamp: NotRequired[str]
+ version: int
+ generator: int
+ timestamp: str
class RPImaType(TypedDict):
- ignored_keyrings: Required[List[str]]
- log_hash_alg: Required[Literal["sha1", "sha256", "sha384", "sha512"]]
+ ignored_keyrings: List[str]
+ log_hash_alg: Literal["sha1", "sha256", "sha384", "sha512"]
dm_policy: Optional[Policies]
RuntimePolicyType = TypedDict(
"RuntimePolicyType",
{
- "meta": Required[RPMetaType],
- "release": NotRequired[int],
- "digests": Required[Dict[str, List[str]]],
- "excludes": Required[List[str]],
- "keyrings": Required[Dict[str, List[str]]],
- "ima": Required[RPImaType],
- "ima-buf": Required[Dict[str, List[str]]],
- "verification-keys": Required[str],
+ "meta": RPMetaType,
+ "release": int,
+ "digests": Dict[str, List[str]],
+ "excludes": List[str],
+ "keyrings": Dict[str, List[str]],
+ "ima": RPImaType,
+ "ima-buf": Dict[str, List[str]],
+ "verification-keys": str,
},
)
diff --git a/keylime/models/base/basic_model.py b/keylime/models/base/basic_model.py
index 68a126e..6f5de83 100644
--- a/keylime/models/base/basic_model.py
+++ b/keylime/models/base/basic_model.py
@@ -407,7 +407,9 @@ class BasicModel(ABC, metaclass=BasicModelMeta):
if max and length > max:
self._add_error(field, msg or f"should be at most {length} {element_type}(s)")
- def validate_number(self, field: str, *expressions: tuple[str, int | float], msg: Optional[str] = None) -> None:
+ def validate_number(
+ self, field: str, *expressions: tuple[str, Union[int, float]], msg: Optional[str] = None
+ ) -> None:
value = self.values.get(field)
if not value:
diff --git a/keylime/models/base/basic_model_meta.py b/keylime/models/base/basic_model_meta.py
index 353e004..84617d4 100644
--- a/keylime/models/base/basic_model_meta.py
+++ b/keylime/models/base/basic_model_meta.py
@@ -1,6 +1,6 @@
from abc import ABCMeta
from types import MappingProxyType
-from typing import Any, Callable, Mapping, TypeAlias, Union
+from typing import Any, Callable, Mapping, Union
from sqlalchemy.types import TypeEngine
@@ -40,7 +40,7 @@ class BasicModelMeta(ABCMeta):
# pylint: disable=bad-staticmethod-argument, no-value-for-parameter, using-constant-test
- DeclaredFieldType: TypeAlias = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
+ DeclaredFieldType = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
@classmethod
def _is_model_class(mcs, cls: type) -> bool: # type: ignore[reportSelfClassParameterName]
diff --git a/keylime/models/base/field.py b/keylime/models/base/field.py
index 7fb3dcb..d1e3bc3 100644
--- a/keylime/models/base/field.py
+++ b/keylime/models/base/field.py
@@ -1,6 +1,6 @@
import re
from inspect import isclass
-from typing import TYPE_CHECKING, Any, Optional, TypeAlias, Union
+from typing import TYPE_CHECKING, Any, Optional, Union
from sqlalchemy.types import TypeEngine
@@ -23,7 +23,7 @@ class ModelField:
[2] https://docs.python.org/3/library/functions.html#property
"""
- DeclaredFieldType: TypeAlias = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
+ DeclaredFieldType = Union[ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
FIELD_NAME_REGEX = re.compile(r"^[A-Za-z_]+[A-Za-z0-9_]*$")
diff --git a/keylime/models/base/persistable_model.py b/keylime/models/base/persistable_model.py
index 18f7d0d..015d661 100644
--- a/keylime/models/base/persistable_model.py
+++ b/keylime/models/base/persistable_model.py
@@ -1,4 +1,4 @@
-from typing import Any, Mapping, Optional, Sequence
+from typing import Any, Mapping, Optional, Sequence, Union
from keylime.models.base.basic_model import BasicModel
from keylime.models.base.db import db_manager
@@ -165,7 +165,7 @@ class PersistableModel(BasicModel, metaclass=PersistableModelMeta):
else:
return None
- def __init__(self, data: Optional[dict | object] = None, process_associations: bool = True) -> None:
+ def __init__(self, data: Optional[Union[dict, object]] = None, process_associations: bool = True) -> None:
if isinstance(data, type(self).db_mapping):
super().__init__({}, process_associations)
self._init_from_mapping(data, process_associations)
diff --git a/keylime/models/base/type.py b/keylime/models/base/type.py
index 2520f72..e4d924c 100644
--- a/keylime/models/base/type.py
+++ b/keylime/models/base/type.py
@@ -1,7 +1,7 @@
from decimal import Decimal
from inspect import isclass
from numbers import Real
-from typing import Any, TypeAlias, Union
+from typing import Any, Union
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.types import TypeEngine
@@ -99,7 +99,7 @@ class ModelType:
you should instead set ``_type_engine`` to ``None`` and override the ``get_db_type`` method.
"""
- DeclaredTypeEngine: TypeAlias = Union[TypeEngine, type[TypeEngine]]
+ DeclaredTypeEngine = Union[TypeEngine, type[TypeEngine]]
def __init__(self, type_engine: DeclaredTypeEngine) -> None:
if isclass(type_engine) and issubclass(type_engine, TypeEngine):
diff --git a/keylime/models/base/types/base64_bytes.py b/keylime/models/base/types/base64_bytes.py
index b9b4b13..a1eeced 100644
--- a/keylime/models/base/types/base64_bytes.py
+++ b/keylime/models/base/types/base64_bytes.py
@@ -1,6 +1,6 @@
import base64
import binascii
-from typing import Optional, TypeAlias, Union
+from typing import Optional, Union
from sqlalchemy.types import Text
@@ -62,7 +62,7 @@ class Base64Bytes(ModelType):
b64_str = Base64Bytes().cast("MIIE...")
"""
- IncomingValue: TypeAlias = Union[bytes, str, None]
+ IncomingValue = Union[bytes, str, None]
def __init__(self) -> None:
super().__init__(Text)
diff --git a/keylime/models/base/types/certificate.py b/keylime/models/base/types/certificate.py
index 2c27603..0f03169 100644
--- a/keylime/models/base/types/certificate.py
+++ b/keylime/models/base/types/certificate.py
@@ -1,7 +1,7 @@
import base64
import binascii
import io
-from typing import Optional, TypeAlias, Union
+from typing import Optional, Union
import cryptography.x509
from cryptography.hazmat.primitives.serialization import Encoding
@@ -78,7 +78,7 @@ class Certificate(ModelType):
cert = Certificate().cast("-----BEGIN CERTIFICATE-----\nMIIE...")
"""
- IncomingValue: TypeAlias = Union[cryptography.x509.Certificate, bytes, str, None]
+ IncomingValue = Union[cryptography.x509.Certificate, bytes, str, None]
def __init__(self) -> None:
super().__init__(Text)
@@ -195,18 +195,19 @@ class Certificate(ModelType):
"""
try:
- match self.infer_encoding(value):
- case "decoded":
- return None
- case "der":
- cryptography.x509.load_der_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
- case "pem":
- cryptography.x509.load_pem_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
- case "base64":
- der_value = base64.b64decode(value, validate=True) # type: ignore[reportArgumentType, arg-type]
- cryptography.x509.load_der_x509_certificate(der_value)
- case _:
- raise Exception
+ encoding_inf = self.infer_encoding(value)
+ if encoding_inf == "decoded":
+ return None
+
+ if encoding_inf == "der":
+ cryptography.x509.load_der_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
+ elif encoding_inf == "pem":
+ cryptography.x509.load_pem_x509_certificate(value) # type: ignore[reportArgumentType, arg-type]
+ elif encoding_inf == "base64":
+ der_value = base64.b64decode(value, validate=True) # type: ignore[reportArgumentType, arg-type]
+ cryptography.x509.load_der_x509_certificate(der_value)
+ else:
+ raise Exception
except Exception:
return False
@@ -227,37 +228,38 @@ class Certificate(ModelType):
if not value:
return None
- match self.infer_encoding(value):
- case "decoded":
- return value # type: ignore[reportReturnType, return-value]
- case "der":
- try:
- return self._load_der_cert(value) # type: ignore[reportArgumentType, arg-type]
- except PyAsn1Error as err:
- raise ValueError(
- f"value cast to certificate appears DER encoded but cannot be deserialized as such: {value!r}"
- ) from err
- case "pem":
- try:
- return self._load_pem_cert(value) # type: ignore[reportArgumentType, arg-type]
- except PyAsn1Error as err:
- raise ValueError(
- f"value cast to certificate appears PEM encoded but cannot be deserialized as such: "
- f"'{str(value)}'"
- ) from err
- case "base64":
- try:
- return self._load_der_cert(base64.b64decode(value, validate=True)) # type: ignore[reportArgumentType, arg-type]
- except (binascii.Error, PyAsn1Error) as err:
- raise ValueError(
- f"value cast to certificate appears Base64 encoded but cannot be deserialized as such: "
- f"'{str(value)}'"
- ) from err
- case _:
- raise TypeError(
- f"value cast to certificate is of type '{value.__class__.__name__}' but should be one of 'str', "
- f"'bytes' or 'cryptography.x509.Certificate': '{str(value)}'"
- )
+ encoding_inf = self.infer_encoding(value)
+ if encoding_inf == "decoded":
+ return value # type: ignore[reportReturnType, return-value]
+
+ if encoding_inf == "der":
+ try:
+ return self._load_der_cert(value) # type: ignore[reportArgumentType, arg-type]
+ except PyAsn1Error as err:
+ raise ValueError(
+ f"value cast to certificate appears DER encoded but cannot be deserialized as such: {value!r}"
+ ) from err
+ elif encoding_inf == "pem":
+ try:
+ return self._load_pem_cert(value) # type: ignore[reportArgumentType, arg-type]
+ except PyAsn1Error as err:
+ raise ValueError(
+ f"value cast to certificate appears PEM encoded but cannot be deserialized as such: "
+ f"'{str(value)}'"
+ ) from err
+ elif encoding_inf == "base64":
+ try:
+ return self._load_der_cert(base64.b64decode(value, validate=True)) # type: ignore[reportArgumentType, arg-type]
+ except (binascii.Error, PyAsn1Error) as err:
+ raise ValueError(
+ f"value cast to certificate appears Base64 encoded but cannot be deserialized as such: "
+ f"'{str(value)}'"
+ ) from err
+ else:
+ raise TypeError(
+ f"value cast to certificate is of type '{value.__class__.__name__}' but should be one of 'str', "
+ f"'bytes' or 'cryptography.x509.Certificate': '{str(value)}'"
+ )
def generate_error_msg(self, _value: IncomingValue) -> str:
return "must be a valid X.509 certificate in PEM format or otherwise encoded using Base64"
diff --git a/keylime/models/base/types/dictionary.py b/keylime/models/base/types/dictionary.py
index 7d9e811..d9ffec3 100644
--- a/keylime/models/base/types/dictionary.py
+++ b/keylime/models/base/types/dictionary.py
@@ -1,5 +1,5 @@
import json
-from typing import Optional, TypeAlias, Union
+from typing import Optional, Union
from sqlalchemy.types import Text
@@ -50,7 +50,7 @@ class Dictionary(ModelType):
kv_pairs = Dictionary().cast('{"key": "value"}')
"""
- IncomingValue: TypeAlias = Union[dict, str, None]
+ IncomingValue = Union[dict, str, None]
def __init__(self) -> None:
super().__init__(Text)
diff --git a/keylime/models/base/types/one_of.py b/keylime/models/base/types/one_of.py
index 479d417..faf097d 100644
--- a/keylime/models/base/types/one_of.py
+++ b/keylime/models/base/types/one_of.py
@@ -1,6 +1,6 @@
from collections import Counter
from inspect import isclass
-from typing import Any, Optional, TypeAlias, Union
+from typing import Any, Optional, Union
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.types import Float, Integer, String, TypeEngine
@@ -65,8 +65,8 @@ class OneOf(ModelType):
incoming PEM value would not be cast to a certificate object and remain a string.
"""
- Declaration: TypeAlias = Union[str, int, float, ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
- PermittedList: TypeAlias = list[Union[str, int, float, ModelType]]
+ Declaration = Union[str, int, float, ModelType, TypeEngine, type[ModelType], type[TypeEngine]]
+ PermittedList = list[Union[str, int, float, ModelType]]
def __init__(self, *args: Declaration) -> None:
# pylint: disable=super-init-not-called
diff --git a/keylime/models/registrar/registrar_agent.py b/keylime/models/registrar/registrar_agent.py
index 560c188..b232049 100644
--- a/keylime/models/registrar/registrar_agent.py
+++ b/keylime/models/registrar/registrar_agent.py
@@ -153,21 +153,22 @@ class RegistrarAgent(PersistableModel):
names = ", ".join(non_compliant_certs)
names = " and".join(names.rsplit(",", 1))
- match config.get("registrar", "malformed_cert_action"):
- case "ignore":
- return
- case "reject":
- logger.error(
- "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were rejected due to "
- "config ('malformed_cert_action = reject')",
- names,
- )
- case _:
- logger.warning(
- "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were re-encoded before "
- "parsing by python-cryptography",
- names,
- )
+ cfg = config.get("registrar", "malformed_cert_action")
+ if cfg == "ignore":
+ return
+
+ if cfg == "reject":
+ logger.error(
+ "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were rejected due to "
+ "config ('malformed_cert_action = reject')",
+ names,
+ )
+ else:
+ logger.warning(
+ "Certificate(s) %s may not conform to strict ASN.1 DER encoding rules and were re-encoded before "
+ "parsing by python-cryptography",
+ names,
+ )
def _bind_ak_to_iak(self, iak_attest, iak_sign):
# The ak-iak binding should only be verified when either aik_tpm or iak_tpm is changed
diff --git a/keylime/policy/create_runtime_policy.py b/keylime/policy/create_runtime_policy.py
index 6a412c4..8e1c687 100644
--- a/keylime/policy/create_runtime_policy.py
+++ b/keylime/policy/create_runtime_policy.py
@@ -972,7 +972,7 @@ def create_runtime_policy(args: argparse.Namespace) -> Optional[RuntimePolicyTyp
)
abort = True
else:
- if a not in algorithms.Hash:
+ if a not in set(algorithms.Hash):
if a == SHA256_OR_SM3:
algo = a
else:
diff --git a/keylime/registrar_client.py b/keylime/registrar_client.py
index 705ff12..97fbc2a 100644
--- a/keylime/registrar_client.py
+++ b/keylime/registrar_client.py
@@ -13,12 +13,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired
-else:
- from typing_extensions import NotRequired
-
-
class RegistrarData(TypedDict):
ip: Optional[str]
port: Optional[str]
@@ -27,7 +21,7 @@ class RegistrarData(TypedDict):
aik_tpm: str
ek_tpm: str
ekcert: Optional[str]
- provider_keys: NotRequired[Dict[str, str]]
+ provider_keys: Dict[str, str]
logger = keylime_logging.init_logging("registrar_client")
diff --git a/keylime/web/base/action_handler.py b/keylime/web/base/action_handler.py
index b20de89..e7b5888 100644
--- a/keylime/web/base/action_handler.py
+++ b/keylime/web/base/action_handler.py
@@ -1,4 +1,5 @@
import re
+import sys
import time
import traceback
from inspect import iscoroutinefunction
@@ -48,7 +49,11 @@ class ActionHandler(RequestHandler):
# Take the list of strings returned by format_exception, where each string ends in a newline and may contain
# internal newlines, and split the concatenation of all the strings by newline
- message = "".join(traceback.format_exception(err))
+ if sys.version_info < (3, 10):
+ message = "".join(traceback.format_exception(err, None, None))
+ else:
+ message = "".join(traceback.format_exception(err))
+
lines = message.split("\n")
for line in lines:
diff --git a/keylime/web/base/controller.py b/keylime/web/base/controller.py
index f1ac3c5..153535e 100644
--- a/keylime/web/base/controller.py
+++ b/keylime/web/base/controller.py
@@ -2,7 +2,7 @@ import http.client
import json
import re
from types import MappingProxyType
-from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence, TypeAlias, Union
+from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence, Union
from tornado.escape import parse_qs_bytes
from tornado.httputil import parse_body_arguments
@@ -15,14 +15,16 @@ if TYPE_CHECKING:
from keylime.models.base.basic_model import BasicModel
from keylime.web.base.action_handler import ActionHandler
-PathParams: TypeAlias = Mapping[str, str]
-QueryParams: TypeAlias = Mapping[str, str | Sequence[str]]
-MultipartParams: TypeAlias = Mapping[str, Union[str, bytes, Sequence[str | bytes]]]
-FormParams: TypeAlias = Union[QueryParams, MultipartParams]
-JSONConvertible: TypeAlias = Union[str, int, float, bool, None, "JSONObjectConvertible", "JSONArrayConvertible"]
-JSONObjectConvertible: TypeAlias = Mapping[str, JSONConvertible]
-JSONArrayConvertible: TypeAlias = Sequence[JSONConvertible] # pyright: ignore[reportInvalidTypeForm]
-Params: TypeAlias = Mapping[str, Union[str, bytes, Sequence[str | bytes], JSONObjectConvertible, JSONArrayConvertible]]
+PathParams = Mapping[str, str]
+QueryParams = Mapping[str, Union[str, Sequence[str]]]
+MultipartParams = Mapping[str, Union[str, bytes, Union[Sequence[str], Sequence[bytes]]]]
+FormParams = Union[QueryParams, MultipartParams]
+JSONConvertible = Union[str, int, float, bool, None, "JSONObjectConvertible", "JSONArrayConvertible"]
+JSONObjectConvertible = Mapping[str, JSONConvertible]
+JSONArrayConvertible = Sequence[JSONConvertible] # pyright: ignore[reportInvalidTypeForm]
+Params = Mapping[
+ str, Union[str, bytes, Union[Sequence[str], Sequence[bytes]], JSONObjectConvertible, JSONArrayConvertible]
+]
class Controller:
@@ -77,7 +79,7 @@ class Controller:
VERSION_REGEX = re.compile("^\\/v(\\d+)(?:\\.(\\d+))*")
@staticmethod
- def decode_url_query(query: str | bytes) -> QueryParams:
+ def decode_url_query(query: Union[str, bytes]) -> QueryParams:
"""Parses a binary query string (whether from a URL or HTTP body) into a dict of Unicode strings. If multiple
instances of the same key are present in the string, their values are collected into a list.
@@ -135,8 +137,8 @@ class Controller:
@staticmethod
def prepare_http_body(
- body: Union[str, JSONObjectConvertible | JSONArrayConvertible, Any], content_type: Optional[str] = None
- ) -> tuple[Optional[bytes | Any], Optional[str]]:
+ body: Union[str, Union[JSONObjectConvertible, JSONArrayConvertible], Any], content_type: Optional[str] = None
+ ) -> tuple[Optional[Union[bytes, Any]], Optional[str]]:
"""Prepares an object to be included in the body of an HTTP request or response and infers the appropriate
media type unless provided. ``body`` will be serialised into JSON if it contains a ``dict`` or ``list`` which is
serialisable unless a ``content_type`` other than ``"application/json"`` is provided.
@@ -155,32 +157,34 @@ class Controller:
if content_type:
content_type = content_type.lower().strip()
- body_out: Optional[bytes | Any]
- content_type_out: Optional[str]
-
- match (body, content_type):
- case (None, _):
- body_out = None
- content_type_out = content_type
- case ("", _):
- body_out = b""
- content_type_out = "text/plain; charset=utf-8"
- case (_, "text/plain"):
+ body_out: Optional[bytes | Any] = None
+ content_type_out: Optional[str] = None
+
+ if body is None:
+ body_out = None
+ content_type_out = content_type
+ elif body == "":
+ body_out = b""
+ content_type_out = "text/plain; charset=utf-8"
+ else:
+ if content_type == "text/plain":
body_out = str(body).encode("utf-8")
content_type_out = "text/plain; charset=utf-8"
- case (_, "application/json") if isinstance(body, str):
- body_out = body.encode("utf-8")
- content_type_out = "application/json"
- case (_, "application/json"):
- body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
- content_type_out = "application/json"
- case (_, None) if isinstance(body, str):
- body_out = body.encode("utf-8")
- content_type_out = "text/plain; charset=utf-8"
- case (_, None) if isinstance(body, (dict, list)):
- body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
- content_type_out = "application/json"
- case (_, _):
+ elif content_type == "application/json":
+ if isinstance(body, str):
+ body_out = body.encode("utf-8")
+ content_type_out = "application/json"
+ else:
+ body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
+ content_type_out = "application/json"
+ elif content_type is None:
+ if isinstance(body, str):
+ body_out = body.encode("utf-8")
+ content_type_out = "text/plain; charset=utf-8"
+ elif isinstance(body, (dict, list)):
+ body_out = json.dumps(body, allow_nan=False, indent=4).encode("utf-8")
+ content_type_out = "application/json"
+ else:
body_out = body
content_type_out = content_type
@@ -248,7 +252,7 @@ class Controller:
self,
code: int = 200,
status: Optional[str] = None,
- data: Optional[JSONObjectConvertible | JSONArrayConvertible] = None,
+ data: Optional[Union[JSONObjectConvertible, JSONArrayConvertible]] = None,
) -> None:
"""Converts a Python data structure to JSON and wraps it in the following boilerplate JSON object which is
returned by all v2 endpoints:
diff --git a/tox.ini b/tox.ini
index 031ac54..ce3974c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -51,3 +51,13 @@ commands = black --diff ./keylime ./test
deps =
isort
commands = isort --diff --check ./keylime ./test
+
+
+[testenv:pylint39]
+basepython = python3.9
+deps =
+ -r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+ pylint
+commands = bash scripts/check_codestyle.sh
+allowlist_externals = bash
--
2.47.1

View File

@ -1,104 +0,0 @@
Subject: [PATCH] Remove usage of Required/NotRequired typing_ext
Since we do not yet have typing_extensions packaged, let us not
use its functionality yet.
---
keylime/ima/types.py | 33 ++++++++++++++-------------------
keylime/registrar_client.py | 8 +-------
2 files changed, 15 insertions(+), 26 deletions(-)
diff --git a/keylime/ima/types.py b/keylime/ima/types.py
index 99f0aa7..a0fffdf 100644
--- a/keylime/ima/types.py
+++ b/keylime/ima/types.py
@@ -6,11 +6,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import Literal, TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired, Required
-else:
- from typing_extensions import NotRequired, Required
-
### Types for tpm_dm.py
RuleAttributeType = Optional[Union[int, str, bool]]
@@ -51,7 +46,7 @@ class Rule(TypedDict):
class Policies(TypedDict):
- version: Required[int]
+ version: int
match_on: MatchKeyType
rules: Dict[str, Rule]
@@ -60,27 +55,27 @@ class Policies(TypedDict):
class RPMetaType(TypedDict):
- version: Required[int]
- generator: NotRequired[int]
- timestamp: NotRequired[str]
+ version: int
+ generator: int
+ timestamp: str
class RPImaType(TypedDict):
- ignored_keyrings: Required[List[str]]
- log_hash_alg: Required[Literal["sha1", "sha256", "sha384", "sha512"]]
+ ignored_keyrings: List[str]
+ log_hash_alg: Literal["sha1", "sha256", "sha384", "sha512"]
dm_policy: Optional[Policies]
RuntimePolicyType = TypedDict(
"RuntimePolicyType",
{
- "meta": Required[RPMetaType],
- "release": NotRequired[int],
- "digests": Required[Dict[str, List[str]]],
- "excludes": Required[List[str]],
- "keyrings": Required[Dict[str, List[str]]],
- "ima": Required[RPImaType],
- "ima-buf": Required[Dict[str, List[str]]],
- "verification-keys": Required[str],
+ "meta": RPMetaType,
+ "release": int,
+ "digests": Dict[str, List[str]],
+ "excludes": List[str],
+ "keyrings": Dict[str, List[str]],
+ "ima": RPImaType,
+ "ima-buf": Dict[str, List[str]],
+ "verification-keys": str,
},
)
diff --git a/keylime/registrar_client.py b/keylime/registrar_client.py
index ab28977..ea5341b 100644
--- a/keylime/registrar_client.py
+++ b/keylime/registrar_client.py
@@ -13,12 +13,6 @@ if sys.version_info >= (3, 8):
else:
from typing_extensions import TypedDict
-if sys.version_info >= (3, 11):
- from typing import NotRequired
-else:
- from typing_extensions import NotRequired
-
-
class RegistrarData(TypedDict):
ip: Optional[str]
port: Optional[str]
@@ -27,7 +21,7 @@ class RegistrarData(TypedDict):
aik_tpm: str
ek_tpm: str
ekcert: Optional[str]
- provider_keys: NotRequired[Dict[str, str]]
+ provider_keys: Dict[str, str]
logger = keylime_logging.init_logging("registrar_client")
--
2.41.0

View File

@ -1,27 +0,0 @@
From e8a1fa55ff0892ee2380e832ac94abc629b401d6 Mon Sep 17 00:00:00 2001
From: Patrik Koncity <pkoncity@redhat.com>
Date: Thu, 10 Aug 2023 07:47:04 -0400
Subject: [PATCH 2/2] Allow keylime_server_t tcp connect to several domains
---
keylime-selinux-1.2.0/keylime.te | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/keylime-selinux-1.2.0/keylime.te b/keylime-selinux-1.2.0/keylime.te
index 8d47d26..8e6487b 100644
--- a/keylime-selinux-1.2.0/keylime.te
+++ b/keylime-selinux-1.2.0/keylime.te
@@ -83,6 +83,10 @@ allow keylime_server_t self:udp_socket create_stream_socket_perms;
manage_dirs_pattern(keylime_server_t, keylime_log_t, keylime_log_t)
manage_files_pattern(keylime_server_t, keylime_log_t, keylime_log_t)
+corenet_tcp_connect_http_cache_port(keylime_server_t)
+corenet_tcp_connect_mysqld_port(keylime_server_t)
+corenet_tcp_connect_postgresql_port(keylime_server_t)
+
fs_getattr_all_fs(keylime_server_t)
fs_rw_inherited_tmpfs_files(keylime_server_t)
--
2.39.3

View File

@ -0,0 +1,58 @@
From 5c5c7f7f7180111485b24061af4c0395476958b5 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Thu, 22 May 2025 11:25:15 -0400
Subject: [PATCH 2/6] tests: fix rpm repo tests from create-runtime-policy
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
.../create-runtime-policy/setup-rpm-tests | 28 +++++++++++++------
1 file changed, 20 insertions(+), 8 deletions(-)
diff --git a/test/data/create-runtime-policy/setup-rpm-tests b/test/data/create-runtime-policy/setup-rpm-tests
index 708438c..b62729b 100755
--- a/test/data/create-runtime-policy/setup-rpm-tests
+++ b/test/data/create-runtime-policy/setup-rpm-tests
@@ -217,20 +217,32 @@ create_rpm() {
# https://github.com/rpm-software-management/rpm/commit/96467dce18f264b278e17ffe1859c88d9b5aa4b6
_pkgname="DUMMY-${_name}-${_version}-${_rel}.noarch.rpm"
- _expected_pkg="${RPMSDIR}/noarch/${_pkgname}"
- [ -e "${_expected_pkg}" ] && return 0
+ # For some reason, it may not store the built package within the
+ # noarch directory, but directly in RPMS, so let's check both
+ # locations.
+ _expected_pkg="${RPMSDIR}/noarch/${_pkgname} ${RPMSDIR}/${_pkgname}"
+ for _expected in ${_expected_pkg}; do
+ if [ -e "${_expected}" ]; then
+ echo "(create_rpm) CREATED RPM: ${_expected}" >&2
+ return 0
+ fi
+ done
# OK, the package was not built where it should. Let us see if
# it was built in ~/rpmbuild instead, and if that is the case,
# copy it to the expected location.
- _bad_location_pkg="${HOME}/rpmbuild/RPMS/noarch/${_pkgname}"
- if [ -e "${_bad_location_pkg}" ]; then
- echo "WARNING: the package ${_pkgname} was built into ~/rpmbuild despite rpmbuild being instructed to build it at a different location. Probably a fallout from https://github.com/rpm-software-management/rpm/commit/96467dce" >&2
- install -D -m644 "${_bad_location_pkg}" "${_expected_pkg}"
- return 0
- fi
+ _bad_location_pkg="${HOME}/rpmbuild/RPMS/noarch/${_pkgname} ${HOME}/rpmbuild/RPMS/${_pkgname}"
+ for _bad_l in ${_bad_location_pkg}; do
+ if [ -e "${_bad_l}" ]; then
+ echo "WARNING: the package ${_pkgname} was built into ~/rpmbuild despite rpmbuild being instructed to build it at a different location. Probably a fallout from https://github.com/rpm-software-management/rpm/commit/96467dce" >&2
+ install -D -m644 "${_bad_l}" "${RPMSDIR}/noarch/${_pkgname}"
+ echo "(create_rpm) CREATED RPM: ${RPMSDIR}/noarch/${_pkgname}" >&2
+ return 0
+ fi
+ done
# Should not be here.
+ echo "create_rpm() ended with error; probably an issue with the location where the RPMs were built" >&2
return 1
}
--
2.47.1

View File

@ -1,51 +0,0 @@
From b8e26ca5e98e1b842db2fc21411962d40f27c557 Mon Sep 17 00:00:00 2001
From: rpm-build <rpm-build>
Date: Tue, 15 Aug 2023 07:19:28 -0400
Subject: [PATCH 3/4] Use version 2.0 as the minimum for the configuration
---
keylime/cmd/convert_config.py | 16 +++++++++++-----
1 file changed, 11 insertions(+), 5 deletions(-)
diff --git a/keylime/cmd/convert_config.py b/keylime/cmd/convert_config.py
index ac28151..1d71b99 100755
--- a/keylime/cmd/convert_config.py
+++ b/keylime/cmd/convert_config.py
@@ -191,7 +191,13 @@ def output(components: List[str], config: RawConfigParser, templates: str, outdi
# Check that there are templates for all components
for component in components:
- version = config[component]["version"].strip('" ')
+ # Minimum version.
+ version = '2.0'
+ if "version" in config[component]:
+ version = config[component]["version"].strip('" ')
+ else:
+ config[component]["version"] = version
+
version_dir = os.path.join(templates, version)
if not os.path.isdir(version_dir):
raise Exception(f"Could not find directory {version_dir}")
@@ -292,15 +298,15 @@ def process_mapping(
raise Exception("Invalid version number found in old configuration")
except (configparser.NoOptionError, configparser.NoSectionError):
- print(f"No version found in old configuration for {component}, using '1.0'")
- old_version = (1, 0)
+ print(f"No version found in old configuration for {component}, using '2.0'")
+ old_version = (2, 0)
else:
# If the old_version does not contain the component from the
# mapping, use the minimum version to use defaults
- old_version = (1, 0)
+ old_version = (2, 0)
# Skip versions lower than the current version
- if old_version >= new_version:
+ if old_version >= new_version and component in old_config:
new[component] = old_config[component]
continue
--
2.39.3

View File

@ -0,0 +1,52 @@
From 4e7cd6b75de27897ecc8e7329732cd945f7adfd0 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Thu, 22 May 2025 18:27:04 +0100
Subject: [PATCH 3/6] tests: skip measured-boot related tests for s390x and
ppc64le
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
test/test_create_mb_policy.py | 2 ++
test/test_mba_parsing.py | 2 ++
2 files changed, 4 insertions(+)
diff --git a/test/test_create_mb_policy.py b/test/test_create_mb_policy.py
index eaed0e3..b00d8e7 100644
--- a/test/test_create_mb_policy.py
+++ b/test/test_create_mb_policy.py
@@ -5,6 +5,7 @@ Copyright 2024 Red Hat, Inc.
import argparse
import os
+import platform
import unittest
from keylime.policy import create_mb_policy
@@ -12,6 +13,7 @@ from keylime.policy import create_mb_policy
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "data", "create-mb-policy"))
+@unittest.skipIf(platform.machine() in ["ppc64le", "s390x"], "ppc64le and s390x are not supported")
class CreateMeasuredBootPolicy_Test(unittest.TestCase):
def test_event_to_sha256(self):
test_cases = [
diff --git a/test/test_mba_parsing.py b/test/test_mba_parsing.py
index 670a602..e157116 100644
--- a/test/test_mba_parsing.py
+++ b/test/test_mba_parsing.py
@@ -1,10 +1,12 @@
import os
+import platform
import unittest
from keylime.common.algorithms import Hash
from keylime.mba import mba
+@unittest.skipIf(platform.machine() in ["ppc64le", "s390x"], "ppc64le and s390x are not supported")
class TestMBAParsing(unittest.TestCase):
def test_parse_bootlog(self):
"""Test parsing binary measured boot event log"""
--
2.47.1

View File

@ -1,88 +0,0 @@
From dbd521e8e8f0ffd9ace79c7b9b888f4cb89488f9 Mon Sep 17 00:00:00 2001
From: rpm-build <rpm-build>
Date: Tue, 15 Aug 2023 06:09:37 -0400
Subject: [PATCH 4/4] Duplicate str_to_version for the upgrade tool
So it does not depend on python-keylime
---
keylime/cmd/convert_config.py | 24 ++++++++++++++++++++++--
templates/2.0/adjust.py | 22 ++++++++++++++++++++--
2 files changed, 42 insertions(+), 4 deletions(-)
diff --git a/keylime/cmd/convert_config.py b/keylime/cmd/convert_config.py
index c1c6180..cad5e31 100755
--- a/keylime/cmd/convert_config.py
+++ b/keylime/cmd/convert_config.py
@@ -84,13 +84,33 @@ import importlib.util
import itertools
import json
import os
+import re
import shutil
from configparser import RawConfigParser
-from typing import List, Optional, Tuple
+from typing import List, Optional, Tuple, Union
from jinja2 import Template
-from keylime.common.version import str_to_version
+
+def str_to_version(v_str: str) -> Union[Tuple[int, int], None]:
+ """
+ Validates the string format and converts the provided string to a tuple of
+ ints which can be sorted and compared.
+
+ :returns: Tuple with version number parts converted to int. In case of
+ invalid version string, returns None
+ """
+
+ # Strip to remove eventual quotes and spaces
+ v_str = v_str.strip('" ')
+
+ m = re.match(r"^(\d+)\.(\d+)$", v_str)
+
+ if not m:
+ return None
+
+ return (int(m.group(1)), int(m.group(2)))
+
COMPONENTS = ["agent", "verifier", "tenant", "registrar", "ca", "logging"]
diff --git a/templates/2.0/adjust.py b/templates/2.0/adjust.py
index 312b790..c1e582a 100644
--- a/templates/2.0/adjust.py
+++ b/templates/2.0/adjust.py
@@ -2,9 +2,27 @@ import ast
import configparser
import re
from configparser import RawConfigParser
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, List, Optional, Tuple, Union
-from keylime.common.version import str_to_version
+
+def str_to_version(v_str: str) -> Union[Tuple[int, int], None]:
+ """
+ Validates the string format and converts the provided string to a tuple of
+ ints which can be sorted and compared.
+
+ :returns: Tuple with version number parts converted to int. In case of
+ invalid version string, returns None
+ """
+
+ # Strip to remove eventual quotes and spaces
+ v_str = v_str.strip('" ')
+
+ m = re.match(r"^(\d+)\.(\d+)$", v_str)
+
+ if not m:
+ return None
+
+ return (int(m.group(1)), int(m.group(2)))
def adjust(config: RawConfigParser, mapping: Dict) -> None: # pylint: disable=unused-argument
--
2.39.3

View File

@ -0,0 +1,52 @@
From 7ca86e1c0d68f45915d9f583ffaf149285905005 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Tue, 3 Jun 2025 10:50:48 +0100
Subject: [PATCH 4/6] templates: duplicate str_to_version() in the adjust
script
As a follow-up of upstream PR#1486, duplicate the str_to_version()
method in adjust.py so that we do not need the keylime modules in
order for the configuration upgrade script to run.
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
templates/2.0/adjust.py | 22 ++++++++++++++++++++--
1 file changed, 20 insertions(+), 2 deletions(-)
diff --git a/templates/2.0/adjust.py b/templates/2.0/adjust.py
index 6008e4c..24ba898 100644
--- a/templates/2.0/adjust.py
+++ b/templates/2.0/adjust.py
@@ -4,9 +4,27 @@ import logging
import re
from configparser import RawConfigParser
from logging import Logger
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, Tuple, Union
-from keylime.common.version import str_to_version
+
+def str_to_version(v_str: str) -> Union[Tuple[int, int], None]:
+ """
+ Validates the string format and converts the provided string to a tuple of
+ ints which can be sorted and compared.
+
+ :returns: Tuple with version number parts converted to int. In case of
+ invalid version string, returns None
+ """
+
+ # Strip to remove eventual quotes and spaces
+ v_str = v_str.strip('" ')
+
+ m = re.match(r"^(\d+)\.(\d+)$", v_str)
+
+ if not m:
+ return None
+
+ return (int(m.group(1)), int(m.group(2)))
def adjust(
--
2.47.1

View File

@ -0,0 +1,404 @@
From c60460eccab93863dbd1fd0b748e5a275c8e6737 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Tue, 3 Jun 2025 21:29:15 +0100
Subject: [PATCH 5/6] Restore RHEL-9 version of create_allowlist.sh
Signed-off-by: Sergio Correia <scorreia@redhat.com>
---
scripts/create_runtime_policy.sh | 335 ++++++++++---------------------
1 file changed, 104 insertions(+), 231 deletions(-)
diff --git a/scripts/create_runtime_policy.sh b/scripts/create_runtime_policy.sh
index 90ba50b..c0b641d 100755
--- a/scripts/create_runtime_policy.sh
+++ b/scripts/create_runtime_policy.sh
@@ -1,282 +1,155 @@
-#!/usr/bin/env bash
+#!/usr/bin/bash
################################################################################
# SPDX-License-Identifier: Apache-2.0
# Copyright 2017 Massachusetts Institute of Technology.
################################################################################
-
-if [ $0 != "-bash" ] ; then
- pushd `dirname "$0"` > /dev/null 2>&1
-fi
-KCRP_BASE_DIR=$(pwd)
-if [ $0 != "-bash" ] ; then
- popd 2>&1 > /dev/null
-fi
-KCRP_BASE_DIR=$KCRP_BASE_DIR/..
-
-function detect_hash {
- local hashstr=$1
-
- case "${#hashstr}" in
- 32) hashalgo=md5sum ;;
- 40) hashalgo=sha1sum ;;
- 64) hashalgo=sha256sum ;;
- 128) hashalgo=sha512sum ;;
- *) hashalgo="na";;
- esac
-
- echo $hashalgo
-}
-
-function announce {
- # 1 - MESSAGE
-
- MESSAGE=$(echo "${1}" | tr '\n' ' ')
- MESSAGE=$(echo $MESSAGE | sed "s/\t\t*/ /g")
-
- echo "==> $(date) - ${0} - $MESSAGE"
-}
-
-function valid_algo {
- local algo=$1
-
- [[ " ${ALGO_LIST[@]} " =~ " ${algo} " ]]
-}
-
# Configure the installer here
INITRAMFS_TOOLS_GIT=https://salsa.debian.org/kernel-team/initramfs-tools.git
INITRAMFS_TOOLS_VER="master"
-# All defaults
-ALGO=sha1sum
-WORK_DIR=/tmp/kcrp
-OUTPUT_DIR=${WORK_DIR}/output
-ALLOWLIST_DIR=${WORK_DIR}/allowlist
-INITRAMFS_LOC="/boot/"
-INITRAMFS_STAGING_DIR=${WORK_DIR}/ima_ramfs/
-INITRAMFS_TOOLS_DIR=${WORK_DIR}/initramfs-tools
-BOOT_AGGREGATE_LOC="/sys/kernel/security/ima/ascii_runtime_measurements"
-ROOTFS_LOC="/"
-EXCLUDE_LIST="none"
-SKIP_PATH="none"
-ALGO_LIST=("sha1sum" "sha256sum" "sha512sum")
+WORKING_DIR=$(readlink -f "$0")
+WORKING_DIR=$(dirname "$WORKING_DIR")
# Grabs Debian's initramfs_tools from Git repo if no other options exist
if [[ ! `command -v unmkinitramfs` && ! -x "/usr/lib/dracut/skipcpio" ]] ; then
# Create temp dir for pulling in initramfs-tools
- announce "INFO: Downloading initramfs-tools: $INITRAMFS_TOOLS_DIR"
+ TMPDIR=`mktemp -d` || exit 1
+ echo "INFO: Downloading initramfs-tools: $TMPDIR"
- mkdir -p $INITRAMFS_TOOLS_DIR
# Clone initramfs-tools repo
- pushd $INITRAMFS_TOOLS_DIR > /dev/null 2>&1
- git clone $INITRAMFS_TOOLS_GIT initramfs-tools > /dev/null 2>&1
- pushd initramfs-tools > /dev/null 2>&1
- git checkout $INITRAMFS_TOOLS_VER > /dev/null 2>&1
- popd > /dev/null 2>&1
- popd > /dev/null 2>&1
+ pushd $TMPDIR
+ git clone $INITRAMFS_TOOLS_GIT initramfs-tools
+ pushd initramfs-tools
+ git checkout $INITRAMFS_TOOLS_VER
+ popd # $TMPDIR
+ popd
shopt -s expand_aliases
- alias unmkinitramfs=$INITRAMFS_TOOLS_DIR/initramfs-tools/unmkinitramfs
-
- which unmkinitramfs > /dev/null 2>&1 || exit 1
+ alias unmkinitramfs=$TMPDIR/initramfs-tools/unmkinitramfs
fi
+
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" 1>&2
exit 1
fi
-USAGE=$(cat <<-END
- Usage: $0 -o/--output_file FILENAME [-a/--algo ALGO] [-x/--ramdisk-location PATH] [-y/--boot_aggregate-location PATH] [-z/--rootfs-location PATH] [-e/--exclude_list FILENAME] [-s/--skip-path PATH] [-h/--help]
+if [ $# -lt 1 ]
+then
+ echo "No arguments provided" >&2
+ echo "Usage: `basename $0` -o [filename] -h [hash-algo]" >&2
+ exit $NOARGS;
+fi
- optional arguments:
- -a/--algo (checksum algorithm to be used, default: $ALGO)
- -x/--ramdisk-location (path to initramdisk, default: $INITRAMFS_LOC, set to "none" to skip)
- -y/--boot_aggregate-location (path for IMA log, used for boot aggregate extraction, default: $BOOT_AGGREGATE_LOC, set to "none" to skip)
- -z/--rootfs-location (path to root filesystem, default: $ROOTFS_LOC, cannot be skipped)
- -e/--exclude_list (filename containing a list of paths to be excluded (i.e., verifier will not try to match checksums, default: $EXCLUDE_LIST)
- -s/--skip-path (comma-separated path list, files found there will not have checksums calculated, default: $SKIP_PATH)
- -h/--help (show this message and exit)
-END
-)
+ALGO=sha256sum
-while [[ $# -gt 0 ]]
-do
- key="$1"
+ALGO_LIST=("sha1sum" "sha256sum" "sha512sum")
+
+valid_algo() {
+ local algo=$1
+
+ [[ " ${ALGO_LIST[@]} " =~ " ${algo} " ]]
+}
- case $key in
- -a|--algo)
- ALGO="$2"
- shift
- ;;
- -a=*|--algo=*)
- ALGO=$(echo $key | cut -d '=' -f 2)
- ;;
- -x|--ramdisk-location)
- INITRAMFS_LOC="$2"
- shift
- ;;
- -x=*|--ramdisk-location=*)
- INITRAMFS_LOC=$(echo $key | cut -d '=' -f 2)
- ;;
- -y|--boot_aggregate-location)
- BOOT_AGGREGATE_LOC=$2
- shift
- ;;
- -y=*|--boot_aggregate-location=*)
- BOOT_AGGREGATE_LOC=$(echo $key | cut -d '=' -f 2)
- ;;
- -z|--rootfs-location)
- ROOTFS_LOC=$2
- shift
- ;;
- -z=*|--rootfs-location=*)
- ROOTFS_LOC=$(echo $key | cut -d '=' -f 2)
- ;;
- -e|--exclude_list)
- EXCLUDE_LIST=$2
- shift
- ;;
- -e=*|--exclude_list=*)
- EXCLUDE_LIST=$(echo $key | cut -d '=' -f 2)
- ;;
- -o=*|--output_file=*)
- OUTPUT=$(echo $key | cut -d '=' -f 2)
- ;;
- -o|--output_file)
- OUTPUT=$2
- shift
- ;;
- -s=*|--skip-path=*)
- SKIP_PATH=$(echo $key | cut -d '=' -f 2)
- ;;
- -s|--skip-path)
- SKIP_PATH=$2
- shift
- ;;
- -h|--help)
- printf "%s\n" "$USAGE"
- exit 0
- shift
- ;;
- *)
- # unknown option
- ;;
- esac
- shift
+while getopts ":o:h:" opt; do
+ case $opt in
+ o)
+ OUTPUT=$(readlink -f $OPTARG)
+ rm -f $OUTPUT
+ ;;
+ h)
+ if valid_algo $OPTARG; then
+ ALGO=$OPTARG
+ else
+ echo "Invalid hash function argument: use sha1sum, sha256sum, or sha512sum"
+ exit 1
+ fi
+ ;;
+ esac
done
-if ! valid_algo $ALGO
+if [ ! "$OUTPUT" ]
then
- echo "Invalid hash function argument: pick from \"${ALGO_LIST[@]}\""
+ echo "Missing argument for -o" >&2;
+ echo "Usage: $0 -o [filename] -h [hash-algo]" >&2;
exit 1
fi
-if [[ -z $OUTPUT ]]
-then
- printf "%s\n" "$USAGE"
- exit 1
+
+# Where to look for initramfs image
+INITRAMFS_LOC="/boot"
+if [ -d "/ostree" ]; then
+ # If we are on an ostree system change where we look for initramfs image
+ loc=$(grep -E "/ostree/[^/]([^/]*)" -o /proc/cmdline | head -n 1 | cut -d / -f 3)
+ INITRAMFS_LOC="/boot/ostree/${loc}/"
fi
-rm -rf $ALLOWLIST_DIR
-rm -rf $INITRAMFS_STAGING_DIR
-rm -rf $OUTPUT_DIR
-announce "Writing allowlist $ALLOWLIST_DIR/${OUTPUT} with $ALGO..."
-mkdir -p $ALLOWLIST_DIR
+echo "Writing allowlist to $OUTPUT with $ALGO..."
-if [[ $BOOT_AGGREGATE_LOC != "none" ]]
-then
- announce "--- Adding boot agregate from $BOOT_AGGREGATE_LOC on allowlist $ALLOWLIST_DIR/${OUTPUT} ..."
# Add boot_aggregate from /sys/kernel/security/ima/ascii_runtime_measurements (IMA Log) file.
# The boot_aggregate measurement is always the first line in the IMA Log file.
# The format of the log lines is the following:
# <PCR_ID> <PCR_Value> <IMA_Template> <File_Digest> <File_Name> <File_Signature>
# File_Digest may start with the digest algorithm specified (e.g "sha1:", "sha256:") depending on the template used.
- head -n 1 $BOOT_AGGREGATE_LOC | awk '{ print $4 " boot_aggregate" }' | sed 's/.*://' >> $ALLOWLIST_DIR/${OUTPUT}
+head -n 1 /sys/kernel/security/ima/ascii_runtime_measurements | awk '{ print $4 " boot_aggregate" }' | sed 's/.*://' >> $OUTPUT
- bagghash=$(detect_hash $(cat $ALLOWLIST_DIR/${OUTPUT} | cut -d ' ' -f 1))
- if [[ $ALGO != $bagghash ]]
- then
- announce "ERROR: \"boot aggregate\" has was calculated with $bagghash, but files will be calculated with $ALGO. Use option -a $bagghash"
- exit 1
- fi
-else
- announce "--- Skipping boot aggregate..."
-fi
-
-announce "--- Adding all appropriate files from $ROOTFS_LOC on allowlist $ALLOWLIST_DIR/${OUTPUT} ..."
# Add all appropriate files under root FS to allowlist
-pushd $ROOTFS_LOC > /dev/null 2>&1
-BASE_EXCLUDE_DIRS="\bsys\b\|\brun\b\|\bproc\b\|\blost+found\b\|\bdev\b\|\bmedia\b\|\bsnap\b\|\bmnt\b\|\bvar\b\|\btmp\b"
-ROOTFS_FILE_LIST=$(ls | grep -v $BASE_EXCLUDE_DIRS)
-if [[ $SKIP_PATH != "none" ]]
-then
- SKIP_PATH=$(echo $SKIP_PATH | sed -e "s#^$ROOTFS_LOC##g" -e "s#,$ROOTFS_LOC##g" -e "s#,#\\\|#g")
- ROOTFS_FILE_LIST=$(echo "$ROOTFS_FILE_LIST" | grep -v "$SKIP_PATH")
-fi
-find $ROOTFS_FILE_LIST \( -fstype rootfs -o -xtype f -type l -o -type f \) -uid 0 -exec $ALGO "$ROOTFS_LOC/{}" >> $ALLOWLIST_DIR/${OUTPUT} \;
-popd > /dev/null 2>&1
+cd /
+find `ls / | grep -v "\bsys\b\|\brun\b\|\bproc\b\|\blost+found\b\|\bdev\b\|\bmedia\b\|\bsnap\b\|mnt"` \( -fstype rootfs -o -xtype f -type l -o -type f \) -uid 0 -exec $ALGO '/{}' >> $OUTPUT \;
# Create staging area for init ram images
-mkdir -p $INITRAMFS_STAGING_DIR
+rm -rf /tmp/ima/
+mkdir -p /tmp/ima
-if [[ $INITRAMFS_LOC != "none" ]]
-then
- # Where to look for initramfs image
- if [[ -d "/ostree" ]]
- then
- X=$INITRAMFS_LOC
- # If we are on an ostree system change where we look for initramfs image
- loc=$(grep -E "/ostree/[^/]([^/]*)" -o /proc/cmdline | head -n 1 | cut -d / -f 3)
- INITRAMFS_LOC="/boot/ostree/${loc}/"
- announce "--- The location of initramfs was overriden from \"${X}\" to \"$INITRAMFS_LOC\""
- fi
-
- announce "--- Creating allowlist for init ram disks found under \"$INITRAMFS_LOC\" to $ALLOWLIST_DIR/${OUTPUT} ..."
- for i in $(ls ${INITRAMFS_LOC}/initr* 2> /dev/null)
- do
- announce " extracting $i"
- mkdir -p $INITRAMFS_STAGING_DIR/$i-extracted
- cd $INITRAMFS_STAGING_DIR/$i-extracted
-
- # platform-specific handling of init ram disk images
- if [[ `command -v unmkinitramfs` ]] ; then
- mkdir -p $INITRAMFS_STAGING_DIR/$i-extracted-unmk
- unmkinitramfs $i $INITRAMFS_STAGING_DIR/$i-extracted-unmk
- if [[ -d "$INITRAMFS_STAGING_DIR/$i-extracted-unmk/main/" ]] ; then
- cp -r $INITRAMFS_STAGING_DIR/$i-extracted-unmk/main/. /tmp/ima/$i-extracted
- else
- cp -r $INITRAMFS_STAGING_DIR/$i-extracted-unmk/. /tmp/ima/$i-extracted
- fi
- elif [[ -x "/usr/lib/dracut/skipcpio" ]] ; then
- /usr/lib/dracut/skipcpio $i | gunzip -c | cpio -i -d 2> /dev/null
+# Iterate through init ram disks and add files to allowlist
+echo "Creating allowlist for init ram disk"
+for i in `ls ${INITRAMFS_LOC}/initr*`
+do
+ echo "extracting $i"
+ mkdir -p /tmp/ima/$i-extracted
+ cd /tmp/ima/$i-extracted
+
+ # platform-specific handling of init ram disk images
+ if [[ `command -v unmkinitramfs` ]] ; then
+ mkdir -p /tmp/ima/$i-extracted-unmk
+ unmkinitramfs $i /tmp/ima/$i-extracted-unmk
+ if [[ -d "/tmp/ima/$i-extracted-unmk/main/" ]] ; then
+ cp -r /tmp/ima/$i-extracted-unmk/main/. /tmp/ima/$i-extracted
else
- announce "ERROR: No tools for initramfs image processing found!"
- exit 1
+ cp -r /tmp/ima/$i-extracted-unmk/. /tmp/ima/$i-extracted
fi
+ elif [[ -x "/usr/lib/dracut/skipcpio" ]] ; then
+ /usr/lib/dracut/skipcpio $i | gunzip -c 2> /dev/null | cpio -i -d 2> /dev/null
+ else
+ echo "ERROR: No tools for initramfs image processing found!"
+ break
+ fi
- find -type f -exec $ALGO "./{}" \; | sed "s| \./\./| /|" >> $ALLOWLIST_DIR/${OUTPUT}
- done
-fi
-
-# Non-critical cleanup on the resulting file (when ROOTFS_LOC = '/', the path starts on allowlist ends up with double '//' )
-sed -i "s^ //^ /^g" $ALLOWLIST_DIR/${OUTPUT}
-# A bit of cleanup on the resulting file (among other problems, sha256sum might output a hash with the prefix '\\')
-sed -i "s/^\\\//g" $ALLOWLIST_DIR/${OUTPUT}
-
-# Convert to runtime policy
-mkdir -p $OUTPUT_DIR
-announce "Converting created allowlist ($ALLOWLIST_DIR/${OUTPUT}) to Keylime runtime policy ($OUTPUT_DIR/${OUTPUT}) ..."
-CONVERT_CMD_OPTS="--allowlist $ALLOWLIST_DIR/${OUTPUT} --output_file $OUTPUT_DIR/${OUTPUT}"
-[ -f $EXCLUDE_LIST ] && CONVERT_CMD_OPTS="$CONVERT_CMD_OPTS --excludelist "$(readlink -f -- "${EXCLUDE_LIST}")""
+ find -type f -exec $ALGO "./{}" \; | sed "s| \./\./| /|" >> $OUTPUT
+done
-pushd $KCRP_BASE_DIR > /dev/null 2>&1
-export PYTHONPATH=$KCRP_BASE_DIR:$PYTHONPATH
-# only 3 dependencies required: pip3 install cryptography lark packaging
-python3 ./keylime/cmd/convert_runtime_policy.py $CONVERT_CMD_OPTS; echo " "
-if [[ $? -eq 0 ]]
-then
- announce "Done, new runtime policy file present at ${OUTPUT_DIR}/$OUTPUT. It can be used on the tenant keylime host with \"keylime_tenant -c add --runtime-policy ${OUTPUT_DIR}/$OUTPUT <other options>"
-fi
-popd > /dev/null 2>&1
+# when ROOTFS_LOC = '/', the path starts on allowlist ends up with double '//'
+#
+# Example:
+#
+# b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c //bar
+#
+# Replace the unwanted '//' with a single '/'
+sed -i 's| /\+| /|g' $ALLOWLIST_DIR/${OUTPUT}
+
+# When the file name contains newlines or backslashes, the output of sha256sum
+# adds a backslash at the beginning of the line.
+#
+# Example:
+#
+# $ echo foo > ba\\r
+# $ sha256sum ba\\r
+# \b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c ba\\r
+#
+# Remove the unwanted backslash prefix
+sed -i 's/^\\//g' $ALLOWLIST_DIR/${OUTPUT}
+
+# Clean up
+rm -rf /tmp/ima
--
2.47.1

View File

@ -1,50 +0,0 @@
From f2432efbeb7b6305067111bb3a77ef5d7da4eb5b Mon Sep 17 00:00:00 2001
From: Thore Sommer <mail@thson.de>
Date: Thu, 10 Aug 2023 16:15:57 +0300
Subject: [PATCH 5/6] elchecking/example: add ignores for
EV_PLATFORM_CONFIG_FLAGS
These are generated by edk2 when used with QEMU, but we do not have a
reference for them.
Signed-off-by: Thore Sommer <mail@thson.de>
---
keylime/mba/elchecking/example.py | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
diff --git a/keylime/mba/elchecking/example.py b/keylime/mba/elchecking/example.py
index 8885227..921db4e 100644
--- a/keylime/mba/elchecking/example.py
+++ b/keylime/mba/elchecking/example.py
@@ -75,7 +75,6 @@ shim_authcode_sha256_no_secureboot = tests.obj_test(
kernel_cmdline=tests.type_test(str),
)
-
allowed_kernel_list_test_no_secureboot = tests.list_test(shim_authcode_sha256_no_secureboot)
@@ -303,6 +302,20 @@ class Example(policies.Policy):
),
),
)
+ # edk2 measures up to 4 of those events, where we do not have a good way to get a reference
+ # See:
+ # - https://github.com/keylime/keylime/issues/1393
+ # - https://github.com/tianocore/edk2/commit/935343cf1639a28530904a1e8d73d6517a07cbff
+ dispatcher.set(
+ (1, "EV_PLATFORM_CONFIG_FLAGS"),
+ tests.Or(
+ tests.OnceTest(tests.AcceptAll()),
+ tests.OnceTest(tests.AcceptAll()),
+ tests.OnceTest(tests.AcceptAll()),
+ tests.OnceTest(tests.AcceptAll()),
+ ),
+ )
+
dispatcher.set((4, "EV_EFI_ACTION"), tests.EvEfiActionTest(4))
for pcr in range(8):
dispatcher.set((pcr, "EV_SEPARATOR"), tests.EvSeperatorTest())
--
2.39.3

View File

@ -1,43 +0,0 @@
From ed213b9533535ceae5026b2fab274f80bcc58cb8 Mon Sep 17 00:00:00 2001
From: rpm-build <rpm-build>
Date: Tue, 15 Aug 2023 09:18:32 -0400
Subject: [PATCH 6/6] Revert mapping changes
---
templates/2.0/mapping.json | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/templates/2.0/mapping.json b/templates/2.0/mapping.json
index 66addbc..0036b63 100644
--- a/templates/2.0/mapping.json
+++ b/templates/2.0/mapping.json
@@ -207,7 +207,7 @@
"registrar_port": {
"section": "cloud_verifier",
"option": "registrar_port",
- "default": "8881"
+ "default": "8891"
},
"tls_dir": {
"section": "cloud_verifier",
@@ -232,7 +232,7 @@
"server_key_password": {
"section": "cloud_verifier",
"option": "private_key_pw",
- "default": ""
+ "default": "default"
},
"enable_agent_mtls": {
"section": "cloud_verifier",
@@ -558,7 +558,7 @@
"server_key_password": {
"section": "registrar",
"option": "private_key_pw",
- "default": ""
+ "default": "default"
},
"server_cert": {
"section": "registrar",
--
2.39.3

View File

@ -1,90 +0,0 @@
From 3dc40e8b1878d84045ee80cb6d216348713c048a Mon Sep 17 00:00:00 2001
From: Karel Srot <ksrot@redhat.com>
Date: Tue, 15 Aug 2023 10:00:50 +0200
Subject: [PATCH 7/7] Handle session close using a session manager
Resolves https://github.com/keylime/keylime/issues/1455
Signed-off-by: Karel Srot <ksrot@redhat.com>
---
keylime/revocation_notifier.py | 50 +++++++++++++++++-----------------
packit-ci.fmf | 1 +
2 files changed, 26 insertions(+), 25 deletions(-)
diff --git a/keylime/revocation_notifier.py b/keylime/revocation_notifier.py
index 31a3095..5cc8b1a 100644
--- a/keylime/revocation_notifier.py
+++ b/keylime/revocation_notifier.py
@@ -132,32 +132,32 @@ def notify_webhook(tosend: Dict[str, Any]) -> None:
def worker_webhook(tosend: Dict[str, Any], url: str) -> None:
interval = config.getfloat("verifier", "retry_interval")
exponential_backoff = config.getboolean("verifier", "exponential_backoff")
- session = requests.session()
- logger.info("Sending revocation event via webhook...")
- for i in range(config.getint("verifier", "max_retries")):
- next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
- try:
- response = session.post(url, json=tosend, timeout=5)
- if response.status_code in [200, 202]:
- break
-
- logger.debug(
- "Unable to publish revocation message %d times via webhook, "
- "trying again in %d seconds. "
- "Server returned status code: %s",
- i,
- next_retry,
- response.status_code,
- )
- except requests.exceptions.RequestException as e:
- logger.debug(
- "Unable to publish revocation message %d times via webhook, trying again in %d seconds: %s",
- i,
- next_retry,
- e,
- )
+ with requests.Session() as session:
+ logger.info("Sending revocation event via webhook...")
+ for i in range(config.getint("verifier", "max_retries")):
+ next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
+ try:
+ response = session.post(url, json=tosend, timeout=5)
+ if response.status_code in [200, 202]:
+ break
+
+ logger.debug(
+ "Unable to publish revocation message %d times via webhook, "
+ "trying again in %d seconds. "
+ "Server returned status code: %s",
+ i,
+ next_retry,
+ response.status_code,
+ )
+ except requests.exceptions.RequestException as e:
+ logger.debug(
+ "Unable to publish revocation message %d times via webhook, trying again in %d seconds: %s",
+ i,
+ next_retry,
+ e,
+ )
- time.sleep(next_retry)
+ time.sleep(next_retry)
w = functools.partial(worker_webhook, tosend, url)
t = threading.Thread(target=w, daemon=True)
diff --git a/packit-ci.fmf b/packit-ci.fmf
index f4d2dae..7abe313 100644
--- a/packit-ci.fmf
+++ b/packit-ci.fmf
@@ -108,6 +108,7 @@ adjust:
- /setup/configure_tpm_emulator
- /setup/install_upstream_keylime
- /setup/install_rust_keylime_from_copr
+ - /setup/configure_kernel_ima_module/ima_policy_simple
- /functional/basic-attestation-on-localhost
- /functional/basic-attestation-with-custom-certificates
- /functional/basic-attestation-without-mtls
--
2.41.0

View File

@ -1,31 +0,0 @@
From aa891f456d5cf0fc23e16d87fb28efc79a0d8073 Mon Sep 17 00:00:00 2001
From: Marcio Silva <marcio.a.silva@ibm.com>
Date: Wed, 23 Aug 2023 11:24:59 -0300
Subject: [PATCH 8/8] verifier: should read parameters from verifier.conf only
Single-line fix for #1446
The verifier should read "durable attestation" backend imports from
verifier.conf (and NOT from registrar.conf)
Signed-off-by: Marcio Silva <marcio.a.silva@ibm.com>
---
keylime/cloud_verifier_tornado.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/keylime/cloud_verifier_tornado.py b/keylime/cloud_verifier_tornado.py
index d65cb63..261022a 100644
--- a/keylime/cloud_verifier_tornado.py
+++ b/keylime/cloud_verifier_tornado.py
@@ -51,7 +51,7 @@ except SQLAlchemyError as err:
sys.exit(1)
try:
- rmc = record.get_record_mgt_class(config.get("registrar", "durable_attestation_import", fallback=""))
+ rmc = record.get_record_mgt_class(config.get("verifier", "durable_attestation_import", fallback=""))
if rmc:
rmc = rmc("verifier")
except record.RecordManagementException as rme:
--
2.41.0

View File

@ -1,48 +0,0 @@
From 9e5ac9f25cd400b16d5969f531cee28290543f2a Mon Sep 17 00:00:00 2001
From: Marcio Silva <marcio.a.silva@ibm.com>
Date: Wed, 12 Jul 2023 12:05:47 -0300
Subject: [PATCH] Fix for CVE-2023-38201 (Security Advisory
GHSA-f4r5-q63f-gcww)
In addition to remove the offending message, this patch also ensures
deletion of an agent's record from the database in case of failure after
a single attempt.
Signed-off-by: Marcio Silva <marcio.a.silva@ibm.com>
---
keylime/registrar_common.py | 15 +++++++++++++--
1 file changed, 13 insertions(+), 2 deletions(-)
diff --git a/keylime/registrar_common.py b/keylime/registrar_common.py
index 1fd97cd0c..7f15ae430 100644
--- a/keylime/registrar_common.py
+++ b/keylime/registrar_common.py
@@ -250,7 +250,9 @@ def get_network_params(
try:
port = int(port)
if port < 1 or port > 65535:
- logger.warning("Contact port for agent %s is not a number between 1 and got: %s.", agent_id, port)
+ logger.warning(
+ "Contact port for agent %s is not a number between 1 and 65535 got: %s.", agent_id, port
+ )
port = None
except ValueError:
logger.warning("Contact port for agent %s is not a valid number got: %s.", agent_id, port)
@@ -447,7 +449,16 @@ def do_PUT(self) -> None:
logger.error("SQLAlchemy Error: %s", e)
raise
else:
- raise Exception(f"Auth tag {auth_tag} does not match expected value {ex_mac}")
+ if agent_id and session.query(RegistrarMain).filter_by(agent_id=agent_id).delete():
+ try:
+ session.commit()
+ except SQLAlchemyError as e:
+ logger.error("SQLAlchemy Error: %s", e)
+ raise
+
+ raise Exception(
+ f"Auth tag {auth_tag} for agent {agent_id} does not match expected value. The agent has been deleted from database, and a restart of it will be required"
+ )
web_util.echo_json_response(self, 200, "Success")
logger.info("PUT activated: %s", agent_id)

View File

@ -1,69 +0,0 @@
From e17d5a6a47c1405a799a06754d3e905856e3035d Mon Sep 17 00:00:00 2001
From: florian <264356+flozilla@users.noreply.github.com>
Date: Tue, 11 Jul 2023 21:31:27 +0200
Subject: [PATCH 10/10] CVE-2023-38200
Extend Registrar SSL socket to be non-blocking
Fixes: CVE-2023-38200
Upstream:
- https://github.com/keylime/keylime/commit/c68d8f0b7
- https://github.com/keylime/keylime/commit/27d515f4b
---
keylime/registrar_common.py | 23 ++++++++++++++++++++++-
1 file changed, 22 insertions(+), 1 deletion(-)
diff --git a/keylime/registrar_common.py b/keylime/registrar_common.py
index d1d20dd..6441e3b 100644
--- a/keylime/registrar_common.py
+++ b/keylime/registrar_common.py
@@ -2,8 +2,10 @@ import base64
import http.server
import ipaddress
import os
+import select
import signal
import socket
+import ssl
import sys
import threading
from http.server import BaseHTTPRequestHandler, HTTPServer
@@ -77,6 +79,25 @@ class BaseHandler(BaseHTTPRequestHandler, SessionManager):
class ProtectedHandler(BaseHandler):
+ def handle(self) -> None:
+ """Need to perform SSL handshake here, as
+ do_handshake_on_connect=False for non-blocking SSL socket"""
+ while True:
+ try:
+ self.request.do_handshake()
+ break
+ except ssl.SSLWantReadError:
+ select.select([self.request], [], [])
+ except ssl.SSLWantWriteError:
+ select.select([], [self.request], [])
+ except ssl.SSLError as e:
+ logger.error("SSL connection error: %s", e)
+ return
+ except Exception as e:
+ logger.error("General communication failure: %s", e)
+ return
+ BaseHTTPRequestHandler.handle(self)
+
def do_HEAD(self) -> None:
"""HEAD not supported"""
web_util.echo_json_response(self, 405, "HEAD not supported")
@@ -494,7 +515,7 @@ def start(host: str, tlsport: int, port: int) -> None:
protected_server = RegistrarServer((host, tlsport), ProtectedHandler)
context = web_util.init_mtls("registrar", logger=logger)
if context is not None:
- protected_server.socket = context.wrap_socket(protected_server.socket, server_side=True)
+ protected_server.socket = context.wrap_socket(protected_server.socket, server_side=True, do_handshake_on_connect=False)
thread_protected_server = threading.Thread(target=protected_server.serve_forever)
# Set up the unprotected registrar server
--
2.41.0

View File

@ -1,244 +0,0 @@
From b0cf69c9db20eb319ea2e90c22f500e09b704224 Mon Sep 17 00:00:00 2001
From: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
Date: Wed, 23 Aug 2023 16:24:15 +0200
Subject: [PATCH] Implement automatic agent API version bump
Automatically update the agent supported API version in the database if
the agent is updated and its API version is bumped.
Previously, if an agent was added to a verifier while it used an old API
version, and then it is updated with an API version bump, the
attestation would fail as the verifier would try to reach the agent
using the old API version.
Fixes #1457
Signed-off-by: Anderson Toshiyuki Sasaki <ansasaki@redhat.com>
---
keylime/cloud_verifier_tornado.py | 185 +++++++++++++++++++++++++++---
1 file changed, 167 insertions(+), 18 deletions(-)
diff --git a/keylime/cloud_verifier_tornado.py b/keylime/cloud_verifier_tornado.py
index 261022ac6..31e6f7159 100644
--- a/keylime/cloud_verifier_tornado.py
+++ b/keylime/cloud_verifier_tornado.py
@@ -32,6 +32,7 @@
)
from keylime.agentstates import AgentAttestState, AgentAttestStates
from keylime.common import retry, states, validators
+from keylime.common.version import str_to_version
from keylime.da import record
from keylime.db.keylime_db import DBEngineManager, SessionManager
from keylime.db.verifier_db import VerfierMain, VerifierAllowlist
@@ -998,6 +999,80 @@ def data_received(self, chunk: Any) -> None:
raise NotImplementedError()
+async def update_agent_api_version(agent: Dict[str, Any], timeout: float = 60.0) -> Union[Dict[str, Any], None]:
+ agent_id = agent["agent_id"]
+
+ logger.info("Agent %s API version bump detected, trying to update stored API version", agent_id)
+ kwargs = {}
+ if agent["ssl_context"]:
+ kwargs["context"] = agent["ssl_context"]
+
+ res = tornado_requests.request(
+ "GET",
+ f"http://{agent['ip']}:{agent['port']}/version",
+ **kwargs,
+ timeout=timeout,
+ )
+ response = await res
+
+ if response.status_code != 200:
+ logger.warning(
+ "Could not get agent %s supported API version, Error: %s",
+ agent["agent_id"],
+ response.status_code,
+ )
+ return None
+
+ try:
+ json_response = json.loads(response.body)
+ new_version = json_response["results"]["supported_version"]
+ old_version = agent["supported_version"]
+
+ # Only update the API version to use if it is supported by the verifier
+ if new_version in keylime_api_version.all_versions():
+ new_version_tuple = str_to_version(new_version)
+ old_version_tuple = str_to_version(old_version)
+
+ assert new_version_tuple, f"Agent {agent_id} version {new_version} is invalid"
+ assert old_version_tuple, f"Agent {agent_id} version {old_version} is invalid"
+
+ # Check that the new version is greater than current version
+ if new_version_tuple <= old_version_tuple:
+ logger.warning(
+ "Agent %s API version %s is lower or equal to previous version %s",
+ agent_id,
+ new_version,
+ old_version,
+ )
+ return None
+
+ logger.info("Agent %s new API version %s is supported", agent_id, new_version)
+ session = get_session()
+ agent["supported_version"] = new_version
+
+ # Remove keys that should not go to the DB
+ agent_db = dict(agent)
+ for key in exclude_db:
+ if key in agent_db:
+ del agent_db[key]
+
+ session.query(VerfierMain).filter_by(agent_id=agent_id).update(agent_db) # pyright: ignore
+ session.commit()
+ else:
+ logger.warning("Agent %s new API version %s is not supported", agent_id, new_version)
+ return None
+
+ except SQLAlchemyError as e:
+ logger.error("SQLAlchemy Error updating API version for agent %s: %s", agent_id, e)
+ return None
+ except Exception as e:
+ logger.exception(e)
+ return None
+
+ logger.info("Agent %s API version updated to %s", agent["agent_id"], agent["supported_version"])
+ return agent
+
+
async def invoke_get_quote(
agent: Dict[str, Any], runtime_policy: str, need_pubkey: bool, timeout: float = 60.0
) -> None:
@@ -1028,15 +1103,43 @@ async def invoke_get_quote(
# this is a connection error, retry get quote
if response.status_code in [408, 500, 599]:
asyncio.ensure_future(process_agent(agent, states.GET_QUOTE_RETRY))
- else:
- # catastrophic error, do not continue
- logger.critical(
- "Unexpected Get Quote response error for cloud agent %s, Error: %s",
- agent["agent_id"],
- response.status_code,
- )
- failure.add_event("no_quote", "Unexpected Get Quote reponse from agent", False)
- asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ if response.status_code == 400:
+ try:
+ json_response = json.loads(response.body)
+ if "API version not supported" in json_response["status"]:
+ update = update_agent_api_version(agent)
+ updated = await update
+
+ if updated:
+ asyncio.ensure_future(process_agent(updated, states.GET_QUOTE_RETRY))
+ else:
+ logger.warning("Could not update stored agent %s API version", agent["agent_id"])
+ failure.add_event(
+ "version_not_supported",
+ {"context": "Agent API version not supported", "data": json_response},
+ False,
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ except Exception as e:
+ logger.exception(e)
+ failure.add_event(
+ "exception", {"context": "Agent caused the verifier to throw an exception", "data": str(e)}, False
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ # catastrophic error, do not continue
+ logger.critical(
+ "Unexpected Get Quote response error for cloud agent %s, Error: %s",
+ agent["agent_id"],
+ response.status_code,
+ )
+ failure.add_event("no_quote", "Unexpected Get Quote reponse from agent", False)
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
else:
try:
json_response = json.loads(response.body)
@@ -1100,15 +1203,43 @@ async def invoke_provide_v(agent: Dict[str, Any], timeout: float = 60.0) -> None
if response.status_code != 200:
if response.status_code in [408, 500, 599]:
asyncio.ensure_future(process_agent(agent, states.PROVIDE_V_RETRY))
- else:
- # catastrophic error, do not continue
- logger.critical(
- "Unexpected Provide V response error for cloud agent %s, Error: %s",
- agent["agent_id"],
- response.status_code,
- )
- failure.add_event("no_v", {"message": "Unexpected provide V response", "data": response.status_code}, False)
- asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ if response.status_code == 400:
+ try:
+ json_response = json.loads(response.body)
+ if "API version not supported" in json_response["status"]:
+ update = update_agent_api_version(agent)
+ updated = await update
+
+ if updated:
+ asyncio.ensure_future(process_agent(updated, states.PROVIDE_V_RETRY))
+ else:
+ logger.warning("Could not update stored agent %s API version", agent["agent_id"])
+ failure.add_event(
+ "version_not_supported",
+ {"context": "Agent API version not supported", "data": json_response},
+ False,
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ except Exception as e:
+ logger.exception(e)
+ failure.add_event(
+ "exception", {"context": "Agent caused the verifier to throw an exception", "data": str(e)}, False
+ )
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
+ return
+
+ # catastrophic error, do not continue
+ logger.critical(
+ "Unexpected Provide V response error for cloud agent %s, Error: %s",
+ agent["agent_id"],
+ response.status_code,
+ )
+ failure.add_event("no_v", {"message": "Unexpected provide V response", "data": response.status_code}, False)
+ asyncio.ensure_future(process_agent(agent, states.FAILED, failure))
else:
asyncio.ensure_future(process_agent(agent, states.GET_QUOTE))
@@ -1134,6 +1265,24 @@ async def invoke_notify_error(agent: Dict[str, Any], tosend: Dict[str, Any], tim
agent["agent_id"],
)
elif response.status_code != 200:
+ if response.status_code == 400:
+ try:
+ json_response = json.loads(response.body)
+ if "API version not supported" in json_response["status"]:
+ update = update_agent_api_version(agent)
+ updated = await update
+
+ if updated:
+ asyncio.ensure_future(invoke_notify_error(updated, tosend))
+ else:
+ logger.warning("Could not update stored agent %s API version", agent["agent_id"])
+
+ return
+
+ except Exception as e:
+ logger.exception(e)
+ return
+
logger.warning(
"Unexpected Notify Revocation response error for cloud agent %s, Error: %s",
agent["agent_id"],

View File

@ -1,59 +0,0 @@
--- a/scripts/create_runtime_policy.sh 2023-10-09 17:04:26.121194607 +0200
+++ b/scripts/create_runtime_policy.sh 2023-10-09 17:06:02.089855614 +0200
@@ -42,7 +42,7 @@
exit $NOARGS;
fi
-ALGO=sha1sum
+ALGO=sha256sum
ALGO_LIST=("sha1sum" "sha256sum" "sha512sum")
@@ -78,7 +78,7 @@
# Where to look for initramfs image
-INITRAMFS_LOC="/boot/"
+INITRAMFS_LOC="/boot"
if [ -d "/ostree" ]; then
# If we are on an ostree system change where we look for initramfs image
loc=$(grep -E "/ostree/[^/]([^/]*)" -o /proc/cmdline | head -n 1 | cut -d / -f 3)
@@ -121,7 +121,7 @@
cp -r /tmp/ima/$i-extracted-unmk/. /tmp/ima/$i-extracted
fi
elif [[ -x "/usr/lib/dracut/skipcpio" ]] ; then
- /usr/lib/dracut/skipcpio $i | gunzip -c | cpio -i -d 2> /dev/null
+ /usr/lib/dracut/skipcpio $i | gunzip -c 2> /dev/null | cpio -i -d 2> /dev/null
else
echo "ERROR: No tools for initramfs image processing found!"
break
@@ -130,9 +130,26 @@
find -type f -exec $ALGO "./{}" \; | sed "s| \./\./| /|" >> $OUTPUT
done
-# Convert to runtime policy
-echo "Converting created allowlist to Keylime runtime policy"
-python3 $WORKING_DIR/../keylime/cmd/convert_runtime_policy.py -a $OUTPUT -o $OUTPUT
+# when ROOTFS_LOC = '/', the path starts on allowlist ends up with double '//'
+#
+# Example:
+#
+# b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c //bar
+#
+# Replace the unwanted '//' with a single '/'
+sed -i 's| /\+| /|g' $ALLOWLIST_DIR/${OUTPUT}
+
+# When the file name contains newlines or backslashes, the output of sha256sum
+# adds a backslash at the beginning of the line.
+#
+# Example:
+#
+# $ echo foo > ba\\r
+# $ sha256sum ba\\r
+# \b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c ba\\r
+#
+# Remove the unwanted backslash prefix
+sed -i 's/^\\//g' $ALLOWLIST_DIR/${OUTPUT}
# Clean up
rm -rf /tmp/ima

View File

@ -1,44 +0,0 @@
diff --git a/keylime/cloud_verifier_common.py b/keylime/cloud_verifier_common.py
index a7399d2..c0f416d 100644
--- a/keylime/cloud_verifier_common.py
+++ b/keylime/cloud_verifier_common.py
@@ -8,7 +8,7 @@ from keylime.agentstates import AgentAttestState, AgentAttestStates, TPMClockInf
from keylime.common import algorithms
from keylime.db.verifier_db import VerfierMain
from keylime.failure import Component, Event, Failure
-from keylime.ima import file_signatures
+from keylime.ima import file_signatures, ima
from keylime.ima.types import RuntimePolicyType
from keylime.tpm import tpm_util
from keylime.tpm.tpm_main import Tpm
@@ -271,7 +271,7 @@ def process_get_status(agent: VerfierMain) -> Dict[str, Any]:
logger.debug('The contents of the agent %s attribute "mb_refstate" are %s', agent.agent_id, agent.mb_refstate)
has_runtime_policy = 0
- if agent.ima_policy.generator and agent.ima_policy.generator > 1:
+ if agent.ima_policy.generator and agent.ima_policy.generator > ima.RUNTIME_POLICY_GENERATOR.EmptyAllowList:
has_runtime_policy = 1
response = {
diff --git a/keylime/cmd/create_policy.py b/keylime/cmd/create_policy.py
index 0841d64..086b92a 100755
--- a/keylime/cmd/create_policy.py
+++ b/keylime/cmd/create_policy.py
@@ -6,6 +6,7 @@ import argparse
import binascii
import collections
import copy
+import datetime
import gzip
import json
import multiprocessing
@@ -580,6 +581,9 @@ def main() -> None:
policy["excludes"] = sorted(list(set(policy["excludes"])))
policy["ima"]["ignored_keyrings"] = sorted(list(set(policy["ima"]["ignored_keyrings"])))
+ policy["meta"]["generator"] = ima.RUNTIME_POLICY_GENERATOR.LegacyAllowList
+ policy["meta"]["timestamp"] = str(datetime.datetime.now())
+
try:
ima.validate_runtime_policy(policy)
except ima.ImaValidationError as ex:

View File

@ -1,80 +0,0 @@
From add9847988e963fd124863736592fc16cc8c716b Mon Sep 17 00:00:00 2001
From: Stefan Berger <stefanb@linux.ibm.com>
Date: Tue, 11 Jul 2023 18:03:28 -0400
Subject: [PATCH 14/14] tpm_util: Replace a logger.error with an Exception in
case of invalid signature
This fixes a possibly severe issue in 7.2.5 & 7.3.0.
Signed-off-by: Stefan Berger <stefanb@linux.ibm.com>
---
keylime/tpm/tpm_util.py | 6 +-----
keylime/tpm/tpm_util_test.py | 21 +++++++++++++++++++++
2 files changed, 22 insertions(+), 5 deletions(-)
diff --git a/keylime/tpm/tpm_util.py b/keylime/tpm/tpm_util.py
index ce2ce0f..58a1a04 100644
--- a/keylime/tpm/tpm_util.py
+++ b/keylime/tpm/tpm_util.py
@@ -3,7 +3,6 @@ import string
import struct
from typing import Any, Dict, List, Optional, Tuple, Union
-from cryptography.exceptions import InvalidSignature
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import hashes, hmac, serialization
from cryptography.hazmat.primitives.asymmetric import ec, padding
@@ -155,10 +154,7 @@ def checkquote(
digest.update(quoteblob)
quote_digest = digest.finalize()
- try:
- verify(pubkey, signature, quote_digest, hashfunc)
- except InvalidSignature:
- logger.error("Invalid quote signature!")
+ verify(pubkey, signature, quote_digest, hashfunc)
# Check that reported nonce is expected one
retDict = tpm2_objects.unmarshal_tpms_attest(quoteblob)
diff --git a/keylime/tpm/tpm_util_test.py b/keylime/tpm/tpm_util_test.py
index aaf16cd..2c73997 100644
--- a/keylime/tpm/tpm_util_test.py
+++ b/keylime/tpm/tpm_util_test.py
@@ -2,6 +2,7 @@ import base64
import unittest
from unittest import mock
+from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.primitives.asymmetric.ec import (
SECP256R1,
EllipticCurve,
@@ -60,6 +61,26 @@ class TestTpmUtil(unittest.TestCase):
except Exception as e:
self.fail(f"checkquote failed with {e}")
+ # test bad input
+ bad_quoteblob = bytearray(quoteblob)
+ bad_quoteblob[5] ^= 0x1
+ with self.assertRaises(InvalidSignature):
+ checkquote(aikblob, nonce, sigblob, bad_quoteblob, pcrblob, "sha256")
+
+ l = list(nonce)
+ l[0] = "a"
+ bad_nonce = "".join(l)
+ with self.assertRaises(Exception):
+ checkquote(aikblob, bad_nonce, sigblob, quoteblob, pcrblob, "sha256")
+
+ bad_pcrblob = bytearray(pcrblob)
+ bad_pcrblob[5] ^= 0x1
+ with self.assertRaises(Exception):
+ checkquote(aikblob, nonce, sigblob, quoteblob, bad_pcrblob, "sha256")
+
+ with self.assertRaises(ValueError):
+ checkquote(aikblob, nonce, sigblob, quoteblob, pcrblob, "sha1")
+
@staticmethod
def not_random(numbytes: int) -> bytes:
return b"\x12" * numbytes
--
2.41.0

File diff suppressed because it is too large Load Diff

View File

@ -1,167 +0,0 @@
From 4bd644b74719fdbb6c521d3d5eb2430d8dc18b36 Mon Sep 17 00:00:00 2001
From: Sergio Correia <scorreia@redhat.com>
Date: Wed, 5 Feb 2025 16:16:25 +0000
Subject: [PATCH 16/16] Use TLS on revocation notification webhook
---
keylime/requests_client.py | 5 ++
keylime/revocation_notifier.py | 91 +++++++++++++++++++++++-----------
2 files changed, 68 insertions(+), 28 deletions(-)
diff --git a/keylime/requests_client.py b/keylime/requests_client.py
index 85a175c..e993fbc 100644
--- a/keylime/requests_client.py
+++ b/keylime/requests_client.py
@@ -1,3 +1,4 @@
+import re
import ssl
from typing import Any, Dict, Optional
@@ -15,6 +16,10 @@ class RequestsClient:
ignore_hostname: bool = True,
**kwargs: Any,
) -> None:
+ # Remove eventual "http?://" from the base url
+ if base_url.startswith("http"):
+ base_url = re.sub(r"https?://", "", base_url)
+
if tls_enabled:
self.base_url = f"https://{base_url}"
else:
diff --git a/keylime/revocation_notifier.py b/keylime/revocation_notifier.py
index 5cc8b1a..434bf64 100644
--- a/keylime/revocation_notifier.py
+++ b/keylime/revocation_notifier.py
@@ -9,8 +9,9 @@ from typing import Any, Callable, Dict, Optional, Set
import requests
-from keylime import config, crypto, json, keylime_logging
+from keylime import config, crypto, json, keylime_logging, web_util
from keylime.common import retry
+from keylime.requests_client import RequestsClient
logger = keylime_logging.init_logging("revocation_notifier")
broker_proc: Optional[Process] = None
@@ -109,7 +110,10 @@ def notify(tosend: Dict[str, Any]) -> None:
exponential_backoff = config.getboolean("verifier", "exponential_backoff")
next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
logger.debug(
- "Unable to publish revocation message %d times, trying again in %f seconds: %s", i, next_retry, e
+ "Unable to publish revocation message %d times, trying again in %f seconds: %s",
+ i,
+ next_retry,
+ e,
)
time.sleep(next_retry)
mysock.close()
@@ -132,30 +136,50 @@ def notify_webhook(tosend: Dict[str, Any]) -> None:
def worker_webhook(tosend: Dict[str, Any], url: str) -> None:
interval = config.getfloat("verifier", "retry_interval")
exponential_backoff = config.getboolean("verifier", "exponential_backoff")
- with requests.Session() as session:
- logger.info("Sending revocation event via webhook...")
- for i in range(config.getint("verifier", "max_retries")):
- next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
+
+ max_retries = config.getint("verifier", "max_retries")
+ if max_retries <= 0:
+ logger.info("Invalid value found in 'max_retries' option for verifier, using default value")
+ max_retries = 5
+
+ # Get TLS options from the configuration
+ (cert, key, trusted_ca, key_password), verify_server_cert = web_util.get_tls_options(
+ "verifier", is_client=True, logger=logger
+ )
+
+ # Generate the TLS context using the obtained options
+ tls_context = web_util.generate_tls_context(cert, key, trusted_ca, key_password, is_client=True, logger=logger)
+
+ logger.info("Sending revocation event via webhook to %s ...", url)
+ for i in range(max_retries):
+ next_retry = retry.retry_time(exponential_backoff, interval, i, logger)
+
+ with RequestsClient(
+ url,
+ verify_server_cert,
+ tls_context,
+ ) as client:
try:
- response = session.post(url, json=tosend, timeout=5)
- if response.status_code in [200, 202]:
- break
-
- logger.debug(
- "Unable to publish revocation message %d times via webhook, "
- "trying again in %d seconds. "
- "Server returned status code: %s",
- i,
- next_retry,
- response.status_code,
- )
- except requests.exceptions.RequestException as e:
- logger.debug(
- "Unable to publish revocation message %d times via webhook, trying again in %d seconds: %s",
- i,
- next_retry,
- e,
- )
+ res = client.post("", json=tosend, timeout=5)
+ except requests.exceptions.SSLError as ssl_error:
+ if "TLSV1_ALERT_UNKNOWN_CA" in str(ssl_error):
+ logger.warning(
+ "Keylime does not recognize certificate from peer. Check if verifier 'trusted_server_ca' is configured correctly"
+ )
+
+ raise ssl_error from ssl_error
+
+ if res and res.status_code in [200, 202]:
+ break
+
+ logger.debug(
+ "Unable to publish revocation message %d times via webhook, "
+ "trying again in %d seconds. "
+ "Server returned status code: %s",
+ i + 1,
+ next_retry,
+ res.status_code,
+ )
time.sleep(next_retry)
@@ -167,7 +191,11 @@ def notify_webhook(tosend: Dict[str, Any]) -> None:
cert_key = None
-def process_revocation(revocation: Dict[str, Any], callback: Callable[[Dict[str, Any]], None], cert_path: str) -> None:
+def process_revocation(
+ revocation: Dict[str, Any],
+ callback: Callable[[Dict[str, Any]], None],
+ cert_path: str,
+) -> None:
global cert_key
if cert_key is None:
@@ -179,10 +207,17 @@ def process_revocation(revocation: Dict[str, Any], callback: Callable[[Dict[str,
cert_key = crypto.x509_import_pubkey(certpem)
if cert_key is None:
- logger.warning("Unable to check signature of revocation message: %s not available", cert_path)
+ logger.warning(
+ "Unable to check signature of revocation message: %s not available",
+ cert_path,
+ )
elif "signature" not in revocation or revocation["signature"] == "none":
logger.warning("No signature on revocation message from server")
- elif not crypto.rsa_verify(cert_key, revocation["msg"].encode("utf-8"), revocation["signature"].encode("utf-8")):
+ elif not crypto.rsa_verify(
+ cert_key,
+ revocation["msg"].encode("utf-8"),
+ revocation["signature"].encode("utf-8"),
+ ):
logger.error("Invalid revocation message siganture %s", revocation)
else:
message = json.loads(revocation["msg"])
--
2.47.1

17
ci_tests.fmf Normal file
View File

@ -0,0 +1,17 @@
/e2e:
plan:
import:
url: https://github.com/RedHat-SP-Security/keylime-plans.git
name: /generic/e2e
/package-update:
plan:
import:
url: https://github.com/RedHat-SP-Security/keylime-plans.git
name: /generic/package-update
/rpmverify:
plan:
import:
url: https://github.com/RedHat-SP-Security/keylime-plans.git
name: /generic/rpmverify

View File

@ -1,80 +0,0 @@
# define context to filter out all test requiring TPM device
context:
swtpm: yes
agent: rust
execute:
how: tmt
/functional:
summary: run keylime e2e tests
discover:
how: fmf
url: https://github.com/RedHat-SP-Security/keylime-tests
ref: "@.tmt/dynamic_ref.fmf"
test:
- /setup/configure_tpm_emulator
- /setup/inject_SELinux_AVC_check
# change IMA policy to simple and run one attestation scenario
# this is to utilize also a different parser
- /setup/configure_kernel_ima_module/ima_policy_simple
- /functional/basic-attestation-on-localhost
# now change IMA policy to signing and run all tests
- /setup/configure_kernel_ima_module/ima_policy_signing
- "^/functional/.*"
- "^/compatibility/.*"
/package-update:
summary: package update scenario
prepare:
- how: shell
order: 90
script:
# remove installed (tested) keylime and any leftovers
- dnf -y remove '*keylime*'
- rm -rf /var/lib/keylime /etc/keylime
# install older keylime
- dnf -y install keylime --disablerepo test-artifacts
discover:
- name: Update_scenario_setup
how: fmf
url: https://github.com/RedHat-SP-Security/keylime-tests
ref: "@.tmt/dynamic_ref.fmf"
test:
- /setup/configure_tpm_emulator
- /setup/inject_SELinux_AVC_check
- /setup/enable_keylime_debug_messages
- /setup/configure_kernel_ima_module/ima_policy_signing
# do the actual keylime test setup
- /update/basic-attestation-on-localhost/setup
- name: Update_keylime_package
how: shell
tests:
- name: keylime_update
test: dnf -y update '*keylime*'
duration: 2m
- name: Test_scenario_post-update
how: fmf
url: https://github.com/RedHat-SP-Security/keylime-tests
ref: "@.tmt/dynamic_ref.fmf"
test:
# run the post-update test scenario
- /update/basic-attestation-on-localhost/test
/rpmverify:
summary: rpmverify test
discover:
- name: test
how: shell
tests:
- name: rpmverify
test: 'rpmverify $(rpm -qa | grep keylime)'
duration: 2m

View File

@ -3,6 +3,4 @@ product_versions:
- rhel-9
decision_context: osci_compose_gate
rules:
- !PassingTestCaseRule {test_case_name: baseos-ci.brew-build.openstack-swtpm.functional}
- !PassingTestCaseRule {test_case_name: baseos-ci.brew-build.beaker-tpm-ima.functional}
- !PassingTestCaseRule {test_case_name: baseos-ci.brew-build.beaker-swtpm-multihost.functional}
- !PassingTestCaseRule {test_case_name: osci.brew-build.tier0.functional}

View File

@ -1,5 +1,5 @@
%global srcname keylime
%global policy_version 1.2.0
%global policy_version 38.1.0
%global with_selinux 1
%global selinuxtype targeted
@ -8,8 +8,8 @@
%global debug_package %{nil}
Name: keylime
Version: 7.3.0
Release: 15%{?dist}
Version: 7.12.1
Release: 1%{?dist}
Summary: Open source TPM software for Bootstrapping and Maintaining Trust
URL: https://github.com/keylime/keylime
@ -17,34 +17,35 @@ Source0: https://github.com/keylime/keylime/archive/refs/tags/v%{version}
Source1: %{srcname}.sysusers
Source2: https://github.com/RedHat-SP-Security/%{name}-selinux/archive/v%{policy_version}/keylime-selinux-%{policy_version}.tar.gz
Patch: 0001-Remove-usage-of-Required-NotRequired-typing_ext.patch
Patch: 0002-Allow-keylime_server_t-tcp-connect-to-several-domain.patch
Patch: 0003-Use-version-2.0-as-the-minimum-for-the-configuration.patch
Patch: 0004-Duplicate-str_to_version-for-the-upgrade-tool.patch
Patch: 0005-elchecking-example-add-ignores-for-EV_PLATFORM_CONFI.patch
Patch: 0006-Revert-mapping-changes.patch
Patch: 0007-Handle-session-close-using-a-session-manager.patch
Patch: 0008-verifier-should-read-parameters-from-verifier.conf-o.patch
Patch: 0009-CVE-2023-38201.patch
Patch: 0010-CVE-2023-38200.patch
Patch: 0011-Automatically-update-agent-API-version.patch
Patch: 0012-Restore-create-allowlist.patch
Patch: 0013-Set-generator-and-timestamp-in-create-policy.patch
Patch: 0014-tpm_util-Replace-a-logger.error-with-an-Exception-in.patch
Patch: 0015-Backport-keylime-policy-tool.patch
Patch: 0016-Use-TLS-on-revocation-notification-webhook.patch
Patch: 0001-Make-keylime-compatible-with-python-3.9.patch
Patch: 0002-tests-fix-rpm-repo-tests-from-create-runtime-policy.patch
Patch: 0003-tests-skip-measured-boot-related-tests-for-s390x-and.patch
Patch: 0004-templates-duplicate-str_to_version-in-the-adjust-scr.patch
# RHEL-9 ships a slightly modified version of create_allowlist.sh.
# DO NOT REMOVE THE FOLLOWING PATCH IN FOLLOWING RHEL-9.x REBASES.
Patch: 0005-Restore-RHEL-9-version-of-create_allowlist.sh.patch
License: ASL 2.0 and MIT
BuildRequires: git-core
BuildRequires: swig
BuildRequires: openssl-devel
BuildRequires: python3-devel
BuildRequires: python3-dbus
BuildRequires: python3-jinja2
BuildRequires: python3-cryptography
BuildRequires: python3-pyasn1
BuildRequires: python3-pyasn1-modules
BuildRequires: python3-tornado
BuildRequires: python3-sqlalchemy
BuildRequires: python3-lark-parser
BuildRequires: python3-psutil
BuildRequires: python3-pyyaml
BuildRequires: python3-jsonschema
BuildRequires: python3-setuptools
BuildRequires: systemd-rpm-macros
BuildRequires: tpm2-abrmd-selinux
BuildRequires: rpm-sign
BuildRequires: createrepo_c
BuildRequires: tpm2-tools
Requires: python3-%{srcname} = %{version}-%{release}
Requires: %{srcname}-base = %{version}-%{release}
@ -71,6 +72,7 @@ Requires(pre): shadow-utils
Requires(pre): util-linux
Requires: procps-ng
Requires: tpm2-tss
Requires: openssl
%if 0%{?with_selinux}
# This ensures that the *-selinux package and all its dependencies are not pulled
@ -79,6 +81,7 @@ Recommends: (%{srcname}-selinux if selinux-policy-%{selinuxtype})
%endif
%ifarch %efi
BuildRequires: efivar-libs
Requires: efivar-libs
%endif
@ -179,7 +182,6 @@ bzip2 -9 %{srcname}.pp
%py3_install
mkdir -p %{buildroot}/%{_sharedstatedir}/%{srcname}
mkdir -p --mode=0700 %{buildroot}/%{_rundir}/%{srcname}
mkdir -p --mode=0700 %{buildroot}/%{_localstatedir}/log/%{srcname}
mkdir -p --mode=0700 %{buildroot}/%{_sysconfdir}/%{srcname}/
for comp in "verifier" "tenant" "registrar" "ca" "logging"; do
@ -229,12 +231,44 @@ EOF
install -p -D -m 0644 %{SOURCE1} %{buildroot}%{_sysusersdir}/%{srcname}.conf
%check
# Create the default configuration files to be used by the tests.
# Also set the associated environment variables so that the tests
# will actually use them.
CONF_TEMP_DIR="$(mktemp -d)"
%{python3} -m keylime.cmd.convert_config --out "${CONF_TEMP_DIR}" --templates templates/
export KEYLIME_VERIFIER_CONFIG="${CONF_TEMP_DIR}/verifier.conf"
export KEYLIME_TENANT_CONFIG="${CONF_TEMP_DIR}/tenant.conf"
export KEYLIME_REGISTRAR_CONFIG="${CONF_TEMP_DIR}/registrar.conf"
export KEYLIME_CA_CONFIG="${CONF_TEMP_DIR}/ca.conf"
export KEYLIME_LOGGING_CONFIG="${CONF_TEMP_DIR}/logging.conf"
# Run the tests.
%{python3} -m unittest
# Cleanup.
[ "${CONF_TEMP_DIR}" ] && rm -rf "${CONF_TEMP_DIR}"
for e in KEYLIME_VERIFIER_CONFIG \
KEYLIME_TENANT_CONFIG \
KEYLIME_REGISTRAR_CONFIG \
KEYLIME_CA_CONFIG \
KEYLIME_LOGGING_CONFIG; do
unset "${e}"
done
exit 0
%pre base
%sysusers_create_compat %{SOURCE1}
exit 0
%post base
/usr/bin/keylime_upgrade_config --component ca --component logging >/dev/null
for c in ca logging; do
[ -e /etc/keylime/"${c}.conf" ] || continue
/usr/bin/keylime_upgrade_config --component "${c}" \
--input /etc/keylime/"${c}.conf" \
>/dev/null
done
exit 0
%posttrans base
@ -260,17 +294,26 @@ fi
exit 0
%post verifier
/usr/bin/keylime_upgrade_config --component verifier >/dev/null
[ -e /etc/keylime/verifier.conf ] && \
/usr/bin/keylime_upgrade_config --component verifier \
--input /etc/keylime/verifier.conf \
>/dev/null
%systemd_post %{srcname}_verifier.service
exit 0
%post registrar
/usr/bin/keylime_upgrade_config --component registrar >/dev/null
[ -e /etc/keylime/registrar.conf ] && \
/usr/bin/keylime_upgrade_config --component registrar \
--input /etc/keylime/registrar.conf /
>/dev/null
%systemd_post %{srcname}_registrar.service
exit 0
%post tenant
/usr/bin/keylime_upgrade_config --component tenant >/dev/null
[ -e /etc/keylime/tenant.conf ] && \
/usr/bin/keylime_upgrade_config --component tenant \
--input /etc/keylime/tenant.conf \
>/dev/null
exit 0
%preun verifier
@ -360,7 +403,6 @@ fi
%config(noreplace) %verify(not md5 size mode mtime) %attr(400,%{srcname},%{srcname}) %{_sysconfdir}/%{srcname}/ca.conf
%config(noreplace) %verify(not md5 size mode mtime) %attr(400,%{srcname},%{srcname}) %{_sysconfdir}/%{srcname}/logging.conf
%attr(700,%{srcname},%{srcname}) %dir %{_rundir}/%{srcname}
%attr(700,%{srcname},%{srcname}) %dir %{_localstatedir}/log/%{srcname}
%attr(700,%{srcname},%{srcname}) %dir %{_sharedstatedir}/%{srcname}
%attr(500,%{srcname},%{srcname}) %dir %{_sharedstatedir}/%{srcname}/tpm_cert_store
%attr(400,%{srcname},%{srcname}) %{_sharedstatedir}/%{srcname}/tpm_cert_store/*.pem
@ -375,6 +417,10 @@ fi
%license LICENSE
%changelog
* Thu May 22 2025 Sergio Correia <scorreia@redhat.com> - 7.12.1-1
- Update to 7.12.1
Resolves: RHEL-78418
* Wed Feb 05 2025 Sergio Correia <scorreia@redhat.com> - 7.3.0-15
- Use TLS on revocation notification webhook
- Include system installed CA certificates when verifying webhook

View File

@ -1,2 +1,2 @@
SHA512 (v7.3.0.tar.gz) = 6a5ee3e642015b4c09058ab84db9c1c132d94b387284cb363285fb43a875921fdf0e88ef4b67ab886ceed4e6a5a49aeef0334d42d9662d27f865287d3e9e000b
SHA512 (keylime-selinux-1.2.0.tar.gz) = 6557738add1cebbc962f8366657a028f4092a36aea0d8a624aa0568a50ff49a516d34f16d699366ac039352d219c522c8ee2ab3a8eea69bd72c616cc4e9a9a7c
SHA512 (v7.12.1.tar.gz) = c1297ebfc659102d73283255cfda4a977dfbff9bdd3748e05de405dadb70f752ad39aa5848edda9143d8ec620d07c21f1551fa4a914c99397620ab1682e58458
SHA512 (keylime-selinux-38.1.0.tar.gz) = cbb54511b14a0352e1c2679909b0dcbc00924bacf8f783b230a782d0fae6e3b0168704ea4896c273199163e04a26bcb6217cf30dc480fc300e1fdcb7e39d00a8