From 9073af646407d2830cda3370fe4fa4ce5ff7b75c Mon Sep 17 00:00:00 2001 From: CentOS Sources Date: Tue, 28 Feb 2023 18:12:39 +0000 Subject: [PATCH] import curl-7.61.1-30.el8 --- SOURCES/0047-curl-7.61.1-CVE-2023-23916.patch | 331 ++++++++++++++++++ SPECS/curl.spec | 9 +- 2 files changed, 339 insertions(+), 1 deletion(-) create mode 100644 SOURCES/0047-curl-7.61.1-CVE-2023-23916.patch diff --git a/SOURCES/0047-curl-7.61.1-CVE-2023-23916.patch b/SOURCES/0047-curl-7.61.1-CVE-2023-23916.patch new file mode 100644 index 0000000..d3c4c7d --- /dev/null +++ b/SOURCES/0047-curl-7.61.1-CVE-2023-23916.patch @@ -0,0 +1,331 @@ +From 95f873ff983a1ae57415b3c16a881e74432cf8b8 Mon Sep 17 00:00:00 2001 +From: Fabian Keil +Date: Tue, 9 Feb 2021 14:04:32 +0100 +Subject: [PATCH 1/2] runtests.pl: support the nonewline attribute for the data + part + +Closes #8239 + +Upstream-commit: 736847611a40c01e7c290407e22e2f0f5f8efd6a +Signed-off-by: Kamil Dudka +--- + tests/runtests.pl | 7 +++++++ + tests/server/getpart.c | 11 ++++++++++- + 2 files changed, 17 insertions(+), 1 deletion(-) + +diff --git a/tests/runtests.pl b/tests/runtests.pl +index 40315aa..2e1500d 100755 +--- a/tests/runtests.pl ++++ b/tests/runtests.pl +@@ -3817,6 +3817,13 @@ sub singletest { + else { + # check against the data section + @reply = getpart("reply", "data"); ++ if(@reply) { ++ my %hash = getpartattr("reply", "data"); ++ if($hash{'nonewline'}) { ++ # cut off the final newline from the final line of the data ++ chomp($reply[$#reply]); ++ } ++ } + # get the mode attribute + my $filemode=$replyattr{'mode'}; + if($filemode && ($filemode eq "text") && $has_textaware) { +diff --git a/tests/server/getpart.c b/tests/server/getpart.c +index 32b55bc..f8fe3f6 100644 +--- a/tests/server/getpart.c ++++ b/tests/server/getpart.c +@@ -5,7 +5,7 @@ + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * +- * Copyright (C) 1998 - 2017, Daniel Stenberg, , et al. ++ * Copyright (C) 1998 - 2022, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms +@@ -295,6 +295,7 @@ int getpart(char **outbuf, size_t *outlen, + size_t outalloc = 256; + int in_wanted_part = 0; + int base64 = 0; ++ int nonewline = 0; + int error; + + enum { +@@ -360,6 +361,8 @@ int getpart(char **outbuf, size_t *outlen, + if(error) + return error; + } ++ if(nonewline) ++ (*outlen)--; + break; + } + } +@@ -377,6 +380,8 @@ int getpart(char **outbuf, size_t *outlen, + if(error) + return error; + } ++ if(nonewline) ++ (*outlen)--; + break; + } + } +@@ -451,6 +456,10 @@ int getpart(char **outbuf, size_t *outlen, + /* bit rough test, but "mostly" functional, */ + /* treat wanted part data as base64 encoded */ + base64 = 1; ++ if(strstr(patt, "nonewline=")) { ++ show(("* setting nonewline\n")); ++ nonewline = 1; ++ } + } + continue; + } +-- +2.39.1 + + +From bc5fc958b017895728962c9d44c469418cbec1a0 Mon Sep 17 00:00:00 2001 +From: Patrick Monnerat +Date: Mon, 13 Feb 2023 08:33:09 +0100 +Subject: [PATCH 2/2] content_encoding: do not reset stage counter for each + header + +Test 418 verifies + +Closes #10492 + +Upstream-commit: 119fb187192a9ea13dc90d9d20c215fc82799ab9 +Signed-off-by: Kamil Dudka +--- + lib/content_encoding.c | 7 +- + lib/urldata.h | 1 + + tests/data/Makefile.inc | 1 + + tests/data/test387 | 2 +- + tests/data/test418 | 152 ++++++++++++++++++++++++++++++++++++++++ + 5 files changed, 158 insertions(+), 5 deletions(-) + create mode 100644 tests/data/test418 + +diff --git a/lib/content_encoding.c b/lib/content_encoding.c +index bfc13e2..94344d6 100644 +--- a/lib/content_encoding.c ++++ b/lib/content_encoding.c +@@ -944,7 +944,6 @@ CURLcode Curl_build_unencoding_stack(struct connectdata *conn, + { + struct Curl_easy *data = conn->data; + struct SingleRequest *k = &data->req; +- int counter = 0; + + do { + const char *name; +@@ -979,9 +978,9 @@ CURLcode Curl_build_unencoding_stack(struct connectdata *conn, + if(!encoding) + encoding = &error_encoding; /* Defer error at stack use. */ + +- if(++counter >= MAX_ENCODE_STACK) { +- failf(data, "Reject response due to %u content encodings", +- counter); ++ if(k->writer_stack_depth++ >= MAX_ENCODE_STACK) { ++ failf(data, "Reject response due to more than %u content encodings", ++ MAX_ENCODE_STACK); + return CURLE_BAD_CONTENT_ENCODING; + } + /* Stack the unencoding stage. */ +diff --git a/lib/urldata.h b/lib/urldata.h +index 5b4b34f..8c8c20b 100644 +--- a/lib/urldata.h ++++ b/lib/urldata.h +@@ -539,6 +539,7 @@ struct SingleRequest { + + struct curltime start; /* transfer started at this time */ + struct curltime now; /* current time */ ++ unsigned char writer_stack_depth; /* Unencoding stack depth. */ + bool header; /* incoming data has HTTP header */ + enum { + HEADER_NORMAL, /* no bad header at all */ +diff --git a/tests/data/Makefile.inc b/tests/data/Makefile.inc +index fb51cd6..86b6f85 100644 +--- a/tests/data/Makefile.inc ++++ b/tests/data/Makefile.inc +@@ -66,6 +66,7 @@ test393 test394 test395 \ + \ + test400 test401 test402 test403 test404 test405 test406 test407 test408 \ + test409 \ ++test418 \ + \ + test500 test501 test502 test503 test504 test505 test506 test507 test508 \ + test509 test510 test511 test512 test513 test514 test515 test516 test517 \ +diff --git a/tests/data/test387 b/tests/data/test387 +index 015ec25..644fc7f 100644 +--- a/tests/data/test387 ++++ b/tests/data/test387 +@@ -47,7 +47,7 @@ Accept: */* + 61 + + +-curl: (61) Reject response due to 5 content encodings ++curl: (61) Reject response due to more than 5 content encodings + + + +diff --git a/tests/data/test418 b/tests/data/test418 +new file mode 100644 +index 0000000..50e974e +--- /dev/null ++++ b/tests/data/test418 +@@ -0,0 +1,152 @@ ++ ++ ++ ++HTTP ++gzip ++ ++ ++ ++# ++# Server-side ++ ++ ++HTTP/1.1 200 OK ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++Transfer-Encoding: gzip ++ ++-foo- ++ ++ ++ ++# ++# Client-side ++ ++ ++http ++ ++ ++Response with multiple Transfer-Encoding headers ++ ++ ++http://%HOSTIP:%HTTPPORT/%TESTNUMBER -sS ++ ++ ++ ++# ++# Verify data after the test has been "shot" ++ ++ ++GET /%TESTNUMBER HTTP/1.1 ++Host: %HOSTIP:%HTTPPORT ++User-Agent: curl/7.61.1 ++Accept: */* ++ ++ ++ ++# CURLE_BAD_CONTENT_ENCODING is 61 ++ ++61 ++ ++ ++curl: (61) Reject response due to more than 5 content encodings ++ ++ ++ +-- +2.39.1 + diff --git a/SPECS/curl.spec b/SPECS/curl.spec index 7d100ba..e3d5c98 100644 --- a/SPECS/curl.spec +++ b/SPECS/curl.spec @@ -1,7 +1,7 @@ Summary: A utility for getting files from remote servers (FTP, HTTP, and others) Name: curl Version: 7.61.1 -Release: 29%{?dist} +Release: 30%{?dist} License: MIT Source: https://curl.haxx.se/download/%{name}-%{version}.tar.xz @@ -133,6 +133,9 @@ Patch45: 0045-curl-7.61.1-CVE-2022-43552.patch # h2: lower initial window size to 32 MiB (#2166254) Patch46: 0046-curl-7.61.1-h2-window-size.patch +# fix HTTP multi-header compression denial of service (CVE-2023-23916) +Patch47: 0047-curl-7.61.1-CVE-2023-23916.patch + # patch making libcurl multilib ready Patch101: 0101-curl-7.32.0-multilib.patch @@ -352,6 +355,7 @@ sed -e 's|:8992/|:%{?__isa_bits}92/|g' -i tests/data/test97{3..6} %patch44 -p1 %patch45 -p1 %patch46 -p1 +%patch47 -p1 # make tests/*.py use Python 3 sed -e '1 s|^#!/.*python|#!%{__python3}|' -i tests/*.py @@ -514,6 +518,9 @@ rm -f ${RPM_BUILD_ROOT}%{_libdir}/libcurl.la %{_libdir}/libcurl.so.4.[0-9].[0-9].minimal %changelog +* Wed Feb 15 2023 Kamil Dudka - 7.61.1-30 +- fix HTTP multi-header compression denial of service (CVE-2023-23916) + * Tue Feb 07 2023 Kamil Dudka - 7.61.1-29 - h2: lower initial window size to 32 MiB (#2166254)