diff --git a/.gitignore b/.gitignore index 3c75e1e..9de9a82 100644 --- a/.gitignore +++ b/.gitignore @@ -51,3 +51,4 @@ /pcs-web-ui-0.1.22.tar.gz /pcs-web-ui-node-modules-0.1.22.tar.xz /pcs-0.12.0.tar.gz +/rack-3.1.10.gem diff --git a/RHEL-78654-fix-filter-clones-by-agent-name-in-resource-tree.patch b/RHEL-78654-fix-filter-clones-by-agent-name-in-resource-tree.patch new file mode 100644 index 0000000..2ddaf01 --- /dev/null +++ b/RHEL-78654-fix-filter-clones-by-agent-name-in-resource-tree.patch @@ -0,0 +1,82 @@ +From e949ae0e2fc350ed1e74ce48b50ac812efd92f30 Mon Sep 17 00:00:00 2001 +From: Ivan Devat +Date: Mon, 20 Jan 2025 12:13:25 +0100 +Subject: [PATCH] fix: filter clones by agent name in resource tree + +--- + .../src/app/view/cluster/resources/tree/filter.ts | 13 +++++++------ + .../test/src/test/scenes/resources/tree.test.ts | 14 +++++++++++--- + 2 files changed, 18 insertions(+), 9 deletions(-) + +diff --git a/packages/app/src/app/view/cluster/resources/tree/filter.ts b/packages/app/src/app/view/cluster/resources/tree/filter.ts +index 118e6cc2..b047a094 100644 +--- a/packages/app/src/app/view/cluster/resources/tree/filter.ts ++++ b/packages/app/src/app/view/cluster/resources/tree/filter.ts +@@ -1,4 +1,4 @@ +-import {Resource} from "app/view/cluster/types"; ++import type {FenceDevice, Resource} from "app/view/cluster/types"; + + type Group = Extract; + type Clone = Extract; +@@ -14,13 +14,14 @@ const createResourceFilter = (filter: string) => { + const match = (searchable: string) => + searchable.toLowerCase().includes(filter.toLowerCase()); + ++ const matchPrimitive = (primitive: Primitive | FenceDevice) => ++ match(primitive.id) || match(primitive.agentName); ++ + const filterPrimitive = (primitive: Primitive) => +- match(primitive.id) || match(primitive.agentName) ? primitive : null; ++ matchPrimitive(primitive) ? primitive : null; + + const filterGroup = (group: Group): Group | null => { +- const primitives = group.resources.filter( +- primitive => match(primitive.id) || match(primitive.agentName), +- ); ++ const primitives = group.resources.filter(matchPrimitive); + return match(group.id) || primitives.length > 0 + ? {...group, resources: primitives} + : null; +@@ -34,7 +35,7 @@ const createResourceFilter = (filter: string) => { + : null; + } + +- if (match(clone.member.id)) { ++ if (matchPrimitive(clone.member)) { + return clone; + } + +diff --git a/packages/test/src/test/scenes/resources/tree.test.ts b/packages/test/src/test/scenes/resources/tree.test.ts +index f9b118f8..19190955 100644 +--- a/packages/test/src/test/scenes/resources/tree.test.ts ++++ b/packages/test/src/test/scenes/resources/tree.test.ts +@@ -19,7 +19,15 @@ const resourceList = [ + "Clone1", + cs.group("Group2", [cs.primitive("ResourceD"), cs.primitive("ResourceE")]), + ), +- cs.clone("Clone2", cs.primitive("ResourceF")), ++ cs.clone( ++ "Clone2", ++ cs.primitive("ResourceF", { ++ agentname: "ocf:heartbeat:apache", ++ provider: "heartbeat", ++ class: "ocf", ++ type: "apache", ++ }), ++ ), + ]; + + const primitiveItem = (id: string) => +@@ -129,7 +137,7 @@ describe("Resource tree filter", () => { + await isAbsent(groupItem("Group2")); + await isAbsent(primitiveItem("ResourceD")); + await isAbsent(primitiveItem("ResourceE")); +- await isAbsent(cloneItem("Clone2")); +- await isAbsent(primitiveItem("ResourceF")); ++ await isVisible(cloneItem("Clone2")); ++ await isVisible(primitiveItem("ResourceF")); + }); + }); +-- +2.48.1 + diff --git a/RHEL-79033-fix-restarting-bundle-instances.patch b/RHEL-79033-fix-restarting-bundle-instances.patch new file mode 100644 index 0000000..3828c5f --- /dev/null +++ b/RHEL-79033-fix-restarting-bundle-instances.patch @@ -0,0 +1,169 @@ +From df6e16235702f3d5f2bb8eb24e633a75c27e1e70 Mon Sep 17 00:00:00 2001 +From: Tomas Jelinek +Date: Wed, 12 Feb 2025 14:00:26 +0100 +Subject: [PATCH 1/2] fix restarting bundle instances + +* fixes a regression introduced in 557ea61b7cfb7948fb2f60916dda84651c4ef4f8 +--- + pcs/lib/commands/resource.py | 98 ++++++++++--------- + .../lib/commands/resource/test_restart.py | 16 +-- + 2 files changed, 54 insertions(+), 60 deletions(-) + +diff --git a/pcs/lib/commands/resource.py b/pcs/lib/commands/resource.py +index db9be21fe..10a43e1cc 100644 +--- a/pcs/lib/commands/resource.py ++++ b/pcs/lib/commands/resource.py +@@ -2665,63 +2665,69 @@ def restart( + timeout -- abort if the command doesn't finish in this time (integer + unit) + """ + cib = env.get_cib() ++ ++ # To be able to restart bundle instances, which are not to be found in CIB, ++ # do not fail if specified ID is not found in CIB. Pacemaker provides ++ # reasonable messages when the ID to be restarted is not a resource or ++ # doesn't exist. We only search for the resource in order to provide hints ++ # when the user attempts to restart bundle's or clone's inner resources. ++ resource_found = False + try: + resource_el = get_element_by_id(cib, resource_id) +- except ElementNotFound as e: +- env.report_processor.report( +- ReportItem.error( +- reports.messages.IdNotFound( +- resource_id, expected_types=["resource"] ++ resource_found = True ++ except ElementNotFound: ++ pass ++ ++ if resource_found: ++ if not resource.common.is_resource(resource_el): ++ env.report_processor.report( ++ ReportItem.error( ++ reports.messages.IdBelongsToUnexpectedType( ++ resource_id, ++ expected_types=["resource"], ++ current_type=resource_el.tag, ++ ) + ) + ) +- ) +- raise LibraryError() from e +- if not resource.common.is_resource(resource_el): +- env.report_processor.report( +- ReportItem.error( +- reports.messages.IdBelongsToUnexpectedType( +- resource_id, +- expected_types=["resource"], +- current_type=resource_el.tag, ++ raise LibraryError() ++ ++ if resource.stonith.is_stonith(resource_el): ++ env.report_processor.report( ++ reports.ReportItem.error( ++ reports.messages.CommandArgumentTypeMismatch( ++ "stonith resource" ++ ) + ) + ) +- ) +- raise LibraryError() +- if resource.stonith.is_stonith(resource_el): +- env.report_processor.report( +- reports.ReportItem.error( +- reports.messages.CommandArgumentTypeMismatch("stonith resource") +- ) +- ) +- raise LibraryError() ++ raise LibraryError() + +- parent_resource_el = resource.clone.get_parent_any_clone(resource_el) +- if parent_resource_el is None: +- parent_resource_el = resource.bundle.get_parent_bundle(resource_el) +- if parent_resource_el is not None: +- env.report_processor.report( +- reports.ReportItem.warning( +- reports.messages.ResourceRestartUsingParentRersource( +- str(resource_el.attrib["id"]), +- str(parent_resource_el.attrib["id"]), ++ parent_resource_el = resource.clone.get_parent_any_clone(resource_el) ++ if parent_resource_el is None: ++ parent_resource_el = resource.bundle.get_parent_bundle(resource_el) ++ if parent_resource_el is not None: ++ env.report_processor.report( ++ reports.ReportItem.warning( ++ reports.messages.ResourceRestartUsingParentRersource( ++ str(resource_el.attrib["id"]), ++ str(parent_resource_el.attrib["id"]), ++ ) + ) + ) +- ) +- resource_el = parent_resource_el ++ resource_el = parent_resource_el + +- if node and not ( +- resource.clone.is_any_clone(resource_el) +- or resource.bundle.is_bundle(resource_el) +- ): +- env.report_processor.report( +- reports.ReportItem.error( +- reports.messages.ResourceRestartNodeIsForMultiinstanceOnly( +- str(resource_el.attrib["id"]), +- resource_el.tag, +- node, ++ if node and not ( ++ resource.clone.is_any_clone(resource_el) ++ or resource.bundle.is_bundle(resource_el) ++ ): ++ env.report_processor.report( ++ reports.ReportItem.error( ++ reports.messages.ResourceRestartNodeIsForMultiinstanceOnly( ++ str(resource_el.attrib["id"]), ++ resource_el.tag, ++ node, ++ ) + ) + ) +- ) + + if timeout is not None: + env.report_processor.report_list( +@@ -2733,7 +2739,7 @@ def restart( + + resource_restart( + env.cmd_runner(), +- str(resource_el.attrib["id"]), ++ str(resource_el.attrib["id"]) if resource_found else resource_id, + node=node, + timeout=timeout, + ) +diff --git a/pcs_test/tier0/lib/commands/resource/test_restart.py b/pcs_test/tier0/lib/commands/resource/test_restart.py +index d3c8fae92..7c072dc5f 100644 +--- a/pcs_test/tier0/lib/commands/resource/test_restart.py ++++ b/pcs_test/tier0/lib/commands/resource/test_restart.py +@@ -104,20 +104,8 @@ class ResourceRestart(TestCase): + ) + + def test_resource_not_found(self): +- self.env_assist.assert_raise_library_error( +- lambda: resource.restart(self.env_assist.get_env(), "RX") +- ) +- self.env_assist.assert_reports( +- [ +- fixture.error( +- reports.codes.ID_NOT_FOUND, +- id="RX", +- expected_types=["resource"], +- context_type="", +- context_id="", +- ) +- ] +- ) ++ self.config.runner.pcmk.resource_restart("RX") ++ resource.restart(self.env_assist.get_env(), "RX") + + def test_not_a_resource(self): + self.env_assist.assert_raise_library_error( +-- +2.48.1 + diff --git a/RHEL-79090-fix-deletion-of-misconfigured-bundles.patch b/RHEL-79090-fix-deletion-of-misconfigured-bundles.patch new file mode 100644 index 0000000..2278eec --- /dev/null +++ b/RHEL-79090-fix-deletion-of-misconfigured-bundles.patch @@ -0,0 +1,280 @@ +From ae5fbbefab98618c644befe80c8791d513bdd711 Mon Sep 17 00:00:00 2001 +From: Peter Romancik +Date: Thu, 13 Feb 2025 11:03:20 +0100 +Subject: [PATCH 2/2] fix deletion of misconfigured bundles + +--- + pcs/common/reports/codes.py | 3 + + pcs/common/reports/messages.py | 30 +++++++++ + pcs/lib/cib/remove_elements.py | 59 +++++++++++------ + .../tier0/common/reports/test_messages.py | 23 +++++++ + pcs_test/tier0/lib/commands/test_cib.py | 66 +++++++++++++++++++ + 5 files changed, 162 insertions(+), 19 deletions(-) + +diff --git a/pcs/common/reports/codes.py b/pcs/common/reports/codes.py +index cde8365f6..0da1aa08b 100644 +--- a/pcs/common/reports/codes.py ++++ b/pcs/common/reports/codes.py +@@ -182,6 +182,9 @@ CLUSTER_UUID_ALREADY_SET = M("CLUSTER_UUID_ALREADY_SET") + CLUSTER_WILL_BE_DESTROYED = M("CLUSTER_WILL_BE_DESTROYED") + COMMAND_INVALID_PAYLOAD = M("COMMAND_INVALID_PAYLOAD") + COMMAND_UNKNOWN = M("COMMAND_UNKNOWN") ++CONFIGURED_RESOURCE_MISSING_IN_STATUS = M( ++ "CONFIGURED_RESOURCE_MISSING_IN_STATUS" ++) + LIVE_ENVIRONMENT_NOT_CONSISTENT = M("LIVE_ENVIRONMENT_NOT_CONSISTENT") + LIVE_ENVIRONMENT_REQUIRED = M("LIVE_ENVIRONMENT_REQUIRED") + LIVE_ENVIRONMENT_REQUIRED_FOR_LOCAL_NODE = M( +diff --git a/pcs/common/reports/messages.py b/pcs/common/reports/messages.py +index b8e151ef4..23bebd1f5 100644 +--- a/pcs/common/reports/messages.py ++++ b/pcs/common/reports/messages.py +@@ -31,6 +31,7 @@ from pcs.common.resource_agent.dto import ( + ResourceAgentNameDto, + get_resource_agent_full_name, + ) ++from pcs.common.resource_status import ResourceState + from pcs.common.str_tools import ( + format_list, + format_list_custom_last_separator, +@@ -6430,6 +6431,35 @@ class CannotStopResourcesBeforeDeleting(ReportItemMessage): + ) + + ++@dataclass(frozen=True) ++class ConfiguredResourceMissingInStatus(ReportItemMessage): ++ """ ++ Cannot check status of resource, because the resource is missing in cluster ++ status despite being configured in CIB. This happens for misconfigured ++ resources, e.g. bundle with primitive resource inside and no IP address ++ for the bundle specified. ++ ++ resource_id -- id of the resource ++ checked_state -- expected state of the resource ++ """ ++ ++ resource_id: str ++ checked_state: Optional[ResourceState] = None ++ _code = codes.CONFIGURED_RESOURCE_MISSING_IN_STATUS ++ ++ @property ++ def message(self) -> str: ++ return ( ++ "Cannot check if the resource '{resource_id}' is in expected " ++ "state{state}, since the resource is missing in cluster status" ++ ).format( ++ resource_id=self.resource_id, ++ state=format_optional( ++ self.checked_state and self.checked_state.name.lower(), " ({})" ++ ), ++ ) ++ ++ + @dataclass(frozen=True) + class ResourceBanPcmkError(ReportItemMessage): + """ +diff --git a/pcs/lib/cib/remove_elements.py b/pcs/lib/cib/remove_elements.py +index d9596a679..d601f85c1 100644 +--- a/pcs/lib/cib/remove_elements.py ++++ b/pcs/lib/cib/remove_elements.py +@@ -251,17 +251,27 @@ def warn_resource_unmanaged( + report_list.extend(parser.get_warnings()) + + status = ResourcesStatusFacade.from_resources_status_dto(status_dto) +- report_list.extend( +- reports.ReportItem.warning( +- reports.messages.ResourceIsUnmanaged(resource_id) +- ) +- for resource_id in resource_ids +- if status.is_state( +- resource_id, +- None, +- ResourceState.UNMANAGED, +- ) +- ) ++ for r_id in resource_ids: ++ if not status.exists(r_id, None): ++ # Pacemaker does not put misconfigured resources into cluster ++ # status and we are unable to check state of such resources. ++ # This happens for e.g. undle with primitive resource inside and ++ # no IP address for the bundle specified. We expect the resource ++ # to be stopped since it is misconfigured. Stopping it again ++ # even when it is unmanaged should not break anything. ++ report_list.append( ++ reports.ReportItem.debug( ++ reports.messages.ConfiguredResourceMissingInStatus( ++ r_id, ResourceState.UNMANAGED ++ ) ++ ) ++ ) ++ elif status.is_state(r_id, None, ResourceState.UNMANAGED): ++ report_list.append( ++ reports.ReportItem.warning( ++ reports.messages.ResourceIsUnmanaged(r_id) ++ ) ++ ) + except NotImplementedError: + # TODO remove when issue with bundles in status is fixed + report_list.extend( +@@ -310,20 +320,31 @@ def ensure_resources_stopped( + report_list.extend(parser.get_warnings()) + + status = ResourcesStatusFacade.from_resources_status_dto(status_dto) +- not_stopped_ids = [ +- resource_id +- for resource_id in resource_ids +- if not status.is_state( +- resource_id, ++ for r_id in resource_ids: ++ if not status.exists(r_id, None): ++ # Pacemaker does not put misconfigured resources into cluster ++ # status and we are unable to check state of such resources. ++ # This happens for e.g. undle with primitive resource inside and ++ # no IP address for the bundle specified. We expect the resource ++ # to be stopped since it is misconfigured. ++ report_list.append( ++ reports.ReportItem.debug( ++ reports.messages.ConfiguredResourceMissingInStatus( ++ r_id, ResourceState.STOPPED ++ ) ++ ) ++ ) ++ elif not status.is_state( ++ r_id, + None, + ResourceState.STOPPED, + instances_quantifier=( + MoreChildrenQuantifierType.ALL +- if status.can_have_multiple_instances(resource_id) ++ if status.can_have_multiple_instances(r_id) + else None + ), +- ) +- ] ++ ): ++ not_stopped_ids.append(r_id) + except NotImplementedError: + # TODO remove when issue with bundles in status is fixed + not_stopped_ids = [ +diff --git a/pcs_test/tier0/common/reports/test_messages.py b/pcs_test/tier0/common/reports/test_messages.py +index f2e2fb70c..b4eff2a20 100644 +--- a/pcs_test/tier0/common/reports/test_messages.py ++++ b/pcs_test/tier0/common/reports/test_messages.py +@@ -11,6 +11,7 @@ from pcs.common.file import RawFileError + from pcs.common.reports import const + from pcs.common.reports import messages as reports + from pcs.common.resource_agent.dto import ResourceAgentNameDto ++from pcs.common.resource_status import ResourceState + from pcs.common.types import CibRuleExpressionType + + # pylint: disable=too-many-lines +@@ -6101,3 +6102,25 @@ class CannotStopResourcesBeforeDeleting(NameBuildTest): + ["resourceId1", "resourceId2"] + ), + ) ++ ++ ++class ConfiguredResourceMissingInStatus(NameBuildTest): ++ def test_only_resource_id(self): ++ self.assert_message_from_report( ++ ( ++ "Cannot check if the resource 'id' is in expected state, " ++ "since the resource is missing in cluster status" ++ ), ++ reports.ConfiguredResourceMissingInStatus("id"), ++ ) ++ ++ def test_with_expected_state(self): ++ self.assert_message_from_report( ++ ( ++ "Cannot check if the resource 'id' is in expected state " ++ "(stopped), since the resource is missing in cluster status" ++ ), ++ reports.ConfiguredResourceMissingInStatus( ++ "id", ResourceState.STOPPED ++ ), ++ ) +diff --git a/pcs_test/tier0/lib/commands/test_cib.py b/pcs_test/tier0/lib/commands/test_cib.py +index 603447689..78bdaa85a 100644 +--- a/pcs_test/tier0/lib/commands/test_cib.py ++++ b/pcs_test/tier0/lib/commands/test_cib.py +@@ -5,6 +5,7 @@ from unittest import ( + ) + + from pcs.common import reports ++from pcs.common.resource_status import ResourceState + from pcs.lib.commands import cib as lib + + from pcs_test.tools import fixture +@@ -949,3 +950,68 @@ class RemoveElementsStopResources(TestCase, StopResourcesWaitMixin): + ), + ] + ) ++ ++ def test_skip_state_check_on_missing_from_status(self): ++ self.config.runner.cib.load( ++ resources=""" ++ ++ ++ ++ ++ ++ ++ """ ++ ) ++ self.fixture_stop_resources_wait_calls( ++ self.config.calls.get("runner.cib.load").stdout, ++ initial_state_modifiers={"resources": ""}, ++ after_disable_cib_modifiers={ ++ "resources": """ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ """ ++ }, ++ after_disable_state_modifiers={"resources": ""}, ++ ) ++ self.fixture_push_cib_after_stopping( ++ resources=""" ++ ++ ++ ++ ++ ++ """ ++ ) ++ lib.remove_elements(self.env_assist.get_env(), ["apa"]) ++ self.env_assist.assert_reports( ++ [ ++ fixture.info( ++ reports.codes.STOPPING_RESOURCES_BEFORE_DELETING, ++ resource_id_list=["apa"], ++ ), ++ fixture.debug( ++ reports.codes.CONFIGURED_RESOURCE_MISSING_IN_STATUS, ++ resource_id="apa", ++ checked_state=ResourceState.UNMANAGED, ++ ), ++ fixture.info(reports.codes.WAIT_FOR_IDLE_STARTED, timeout=0), ++ fixture.debug( ++ reports.codes.CONFIGURED_RESOURCE_MISSING_IN_STATUS, ++ resource_id="apa", ++ checked_state=ResourceState.STOPPED, ++ ), ++ fixture.info( ++ reports.codes.CIB_REMOVE_REFERENCES, ++ id_tag_map={"apa": "primitive", "test-bundle": "bundle"}, ++ removing_references_from={"apa": {"test-bundle"}}, ++ ), ++ ] ++ ) +-- +2.48.1 + diff --git a/pcs.spec b/pcs.spec index da51ef6..a4427ed 100644 --- a/pcs.spec +++ b/pcs.spec @@ -1,6 +1,6 @@ Name: pcs Version: 0.12.0 -Release: 2%{?dist} +Release: 3%{?dist} # https://docs.fedoraproject.org/en-US/packaging-guidelines/LicensingGuidelines/ # https://fedoraproject.org/wiki/Licensing:Main?rd=Licensing#Good_Licenses # GPL-2.0-only: pcs @@ -66,7 +66,7 @@ ExclusiveArch: x86_64 s390x ppc64le aarch64 %global version_rubygem_mustermann 3.0.3 %global version_rubygem_nio4r 2.7.4 %global version_rubygem_puma 6.4.3 -%global version_rubygem_rack 3.1.8 +%global version_rubygem_rack 3.1.10 %global version_rubygem_rack_protection 4.0.0 %global version_rubygem_rack_session 2.0.0 %global version_rubygem_rack_test 2.1.0 @@ -126,9 +126,12 @@ Source101: https://github.com/ClusterLabs/pcs-web-ui/releases/download/%{ui_vers # Patch1: name.patch Patch1: do-not-support-cluster-setup-with-udp-u-transport.patch Patch2: show-info-page-instead-of-webui.patch +Patch3: RHEL-79033-fix-restarting-bundle-instances.patch +Patch4: RHEL-79090-fix-deletion-of-misconfigured-bundles.patch # ui patches: >200 # Patch201: name-web-ui.patch +Patch201: RHEL-78654-fix-filter-clones-by-agent-name-in-resource-tree.patch # git for patches @@ -354,6 +357,7 @@ update_times_patch(){ %autopatch -p1 -m 201 # update_times_patch %%{PATCH201} +update_times_patch %{PATCH201} # patch pcs sources %autosetup -S git -n %{pcs_source_name} -N @@ -361,6 +365,8 @@ update_times_patch(){ # update_times_patch %%{PATCH1} update_times_patch %{PATCH1} update_times_patch %{PATCH2} +update_times_patch %{PATCH3} +update_times_patch %{PATCH4} # generate .tarball-version if building from an untagged commit, not a released version # autogen uses git-version-gen which uses .tarball-version for generating version number @@ -651,6 +657,16 @@ run_all_tests %changelog +* Fri Feb 14 2025 Michal Pospíšil - 0.12.0-3 +- Fixed restarting bundles + Resolves: RHEL-79033 +- Fixed deletion of misconfigured bundles + Resolves: RHEL-79090 +- Fixed filtering of resource clones in web console + Resolves: RHEL-78654 +- Updated bundled rubygem rack + Resolves: RHEL-79676 + * Fri Jan 10 2025 Michal Pospíšil - 0.12.0-2 - Rebased pcs to the latest sources (see CHANGELOG.md) Resolves: RHEL-44421 diff --git a/sources b/sources index 98a89f0..8a5fc6c 100644 --- a/sources +++ b/sources @@ -13,9 +13,9 @@ SHA512 (ffi-1.17.0.gem) = 5cdaf19eaa499127607de7389f69b4927c7bd8a154a53071c53906 SHA512 (mustermann-3.0.3.gem) = d205985a5da83d83248899642ed359056b0cdb511e77d51309319c2f8d8b6c84040e9e1d3a56b7f83a0b26aed4b344f4df371b310e419c20170f0a486e89ba6f SHA512 (nio4r-2.7.4.gem) = 6c8280484066ffc39e98b99dfaf5a69fe2a28cafb380924f448673fda8b69b5d97f8b75b8345b91d92f6186f0664e09bec8e1c7c19c070219b030f554824d2ac SHA512 (puma-6.4.3.gem) = e8baf137c5164f11b8563561405fc4218210707bfb15d0f21118d4be0fd0d071050c46357337a9c6fdda7ce230f3ae09ebfe9976f0a7a0243824bda7871d7a18 -SHA512 (rack-3.1.8.gem) = 7d7b3d8d40c3afc184de90c7602385854bc890ec13c98029e31db0134dbebd7655425334249c908bdb9e5fd1f15754872064611bbf1317a46c6082fd7cdf9b9d SHA512 (rackup-2.2.1.gem) = e63c4dee6f1a677d507df0ae7bcebec88673e7a0a8d6621997949045db60801907038a148a0608f6e62864cb2ac056fca382f3438dc227b0fa7a3be52d56ea66 SHA512 (tilt-2.4.0.gem) = 8cf5036017f501da8843340a9c574ee647074782dcb27ee0aa906fb96ad1e66b90dfb80159aa4c5e7605490058c5ae478bd0fe09f17ae50a2697327d02c814cc SHA512 (pcs-web-ui-0.1.22.tar.gz) = cb97ffba625326ab3857b9c22b4400907177a5ea88769ae611cc9315758c1ceca7e16a1b92d43fc594911fcde6003d3beadb388c6bdd5c1bcc67d699c49c9b2c SHA512 (pcs-web-ui-node-modules-0.1.22.tar.xz) = b1db7d8c04e942baf8a99f115cbda31a84f562c7deeee5f1371e5ddf5fb5e73ce084ede1f0da7083b2d0114228f58006599f1b9a29dd3a5202b3119e41f74d69 SHA512 (pcs-0.12.0.tar.gz) = f9b93bd39ce18898aa3bab7d4da86535e7351b75f15a0bc333598e2dae03ebf0f0f0638b1b441533af7f32909902353c3cf3ed5a219d0ce3b67a3a0e2c63b194 +SHA512 (rack-3.1.10.gem) = 2da18867543b7c536e152cbb48a1680473b9b53c405f71c51c3263ae7d40435dfcc66b99caee0e14d901affa36bfcedb6fa5defe9ecbd2ce82687f71f8d2ef43