From 9152c92e753646b0ee09096c58747c82903459e7 Mon Sep 17 00:00:00 2001 From: CentOS Sources Date: Wed, 4 Nov 2020 15:09:46 -0500 Subject: [PATCH] import squid-4.4-8.module+el8.2.0+7834+b8ecaeef.2 --- SOURCES/squid-4.4.0-CVE-2020-15810.patch | 56 +++++++++ SOURCES/squid-4.4.0-CVE-2020-15811.patch | 139 +++++++++++++++++++++++ SPECS/squid.spec | 14 ++- 3 files changed, 208 insertions(+), 1 deletion(-) create mode 100644 SOURCES/squid-4.4.0-CVE-2020-15810.patch create mode 100644 SOURCES/squid-4.4.0-CVE-2020-15811.patch diff --git a/SOURCES/squid-4.4.0-CVE-2020-15810.patch b/SOURCES/squid-4.4.0-CVE-2020-15810.patch new file mode 100644 index 0000000..8c8176c --- /dev/null +++ b/SOURCES/squid-4.4.0-CVE-2020-15810.patch @@ -0,0 +1,56 @@ +diff --git a/src/HttpHeader.cc b/src/HttpHeader.cc +index 284a057..cd9d71f 100644 +--- a/src/HttpHeader.cc ++++ b/src/HttpHeader.cc +@@ -446,18 +446,6 @@ HttpHeader::parse(const char *header_start, size_t hdrLen) + return 0; + } + +- if (e->id == Http::HdrType::OTHER && stringHasWhitespace(e->name.termedBuf())) { +- debugs(55, warnOnError, "WARNING: found whitespace in HTTP header name {" << +- getStringPrefix(field_start, field_end-field_start) << "}"); +- +- if (!Config.onoff.relaxed_header_parser) { +- delete e; +- PROF_stop(HttpHeaderParse); +- clean(); +- return 0; +- } +- } +- + addEntry(e); + } + +@@ -1418,6 +1406,20 @@ HttpHeaderEntry::parse(const char *field_start, const char *field_end) + return NULL; + } + ++ /* RFC 7230 section 3.2: ++ * ++ * header-field = field-name ":" OWS field-value OWS ++ * field-name = token ++ * token = 1*TCHAR ++ */ ++ for (const char *pos = field_start; pos < (field_start+name_len); ++pos) { ++ if (!CharacterSet::TCHAR[*pos]) { ++ debugs(55, 2, "found header with invalid characters in " << ++ Raw("field-name", field_start, min(name_len,100)) << "..."); ++ return nullptr; ++ } ++ } ++ + /* now we know we can parse it */ + + debugs(55, 9, "parsing HttpHeaderEntry: near '" << getStringPrefix(field_start, field_end-field_start) << "'"); +diff --git a/src/HttpHeader.cc b/src/HttpHeader.cc +index adeea9c..85c1c00 100644 +--- a/src/HttpHeader.cc ++++ b/src/HttpHeader.cc +@@ -13,6 +13,7 @@ + #include "base64.h" + #include "globals.h" + #include "http/ContentLengthInterpreter.h" ++#include "base/CharacterSet.h" + #include "HttpHdrCc.h" + #include "HttpHdrContRange.h" + #include "HttpHdrScTarget.h" // also includes HttpHdrSc.h diff --git a/SOURCES/squid-4.4.0-CVE-2020-15811.patch b/SOURCES/squid-4.4.0-CVE-2020-15811.patch new file mode 100644 index 0000000..09e2b70 --- /dev/null +++ b/SOURCES/squid-4.4.0-CVE-2020-15811.patch @@ -0,0 +1,139 @@ +diff --git a/src/HttpHeader.cc b/src/HttpHeader.cc +index cd9d71f..adeea9c 100644 +--- a/src/HttpHeader.cc ++++ b/src/HttpHeader.cc +@@ -174,6 +174,7 @@ HttpHeader::operator =(const HttpHeader &other) + update(&other); // will update the mask as well + len = other.len; + conflictingContentLength_ = other.conflictingContentLength_; ++ teUnsupported_ = other.teUnsupported_; + } + return *this; + } +@@ -222,6 +223,7 @@ HttpHeader::clean() + httpHeaderMaskInit(&mask, 0); + len = 0; + conflictingContentLength_ = false; ++ teUnsupported_ = false; + PROF_stop(HttpHeaderClean); + } + +@@ -455,11 +457,23 @@ HttpHeader::parse(const char *header_start, size_t hdrLen) + Raw("header", header_start, hdrLen)); + } + +- if (chunked()) { ++ String rawTe; ++ if (getByIdIfPresent(Http::HdrType::TRANSFER_ENCODING, &rawTe)) { + // RFC 2616 section 4.4: ignore Content-Length with Transfer-Encoding + // RFC 7230 section 3.3.3 #3: Transfer-Encoding overwrites Content-Length + delById(Http::HdrType::CONTENT_LENGTH); + // and clen state becomes irrelevant ++ ++ if (rawTe == "chunked") { ++ ; // leave header present for chunked() method ++ } else if (rawTe == "identity") { // deprecated. no coding ++ delById(Http::HdrType::TRANSFER_ENCODING); ++ } else { ++ // This also rejects multiple encodings until we support them properly. ++ debugs(55, warnOnError, "WARNING: unsupported Transfer-Encoding used by client: " << rawTe); ++ teUnsupported_ = true; ++ } ++ + } else if (clen.sawBad) { + // ensure our callers do not accidentally see bad Content-Length values + delById(Http::HdrType::CONTENT_LENGTH); +diff --git a/src/HttpHeader.h b/src/HttpHeader.h +index 3b262be..2a73af4 100644 +--- a/src/HttpHeader.h ++++ b/src/HttpHeader.h +@@ -140,7 +140,13 @@ public: + int hasListMember(Http::HdrType id, const char *member, const char separator) const; + int hasByNameListMember(const char *name, const char *member, const char separator) const; + void removeHopByHopEntries(); +- inline bool chunked() const; ///< whether message uses chunked Transfer-Encoding ++ ++ /// whether the message uses chunked Transfer-Encoding ++ /// optimized implementation relies on us rejecting/removing other codings ++ bool chunked() const { return has(Http::HdrType::TRANSFER_ENCODING); } ++ ++ /// whether message used an unsupported and/or invalid Transfer-Encoding ++ bool unsupportedTe() const { return teUnsupported_; } + + /* protected, do not use these, use interface functions instead */ + std::vector entries; /**< parsed fields in raw format */ +@@ -158,6 +164,9 @@ protected: + private: + HttpHeaderEntry *findLastEntry(Http::HdrType id) const; + bool conflictingContentLength_; ///< found different Content-Length fields ++ /// unsupported encoding, unnecessary syntax characters, and/or ++ /// invalid field-value found in Transfer-Encoding header ++ bool teUnsupported_ = false; + }; + + int httpHeaderParseQuotedString(const char *start, const int len, String *val); +@@ -167,13 +176,6 @@ SBuf httpHeaderQuoteString(const char *raw); + + void httpHeaderCalcMask(HttpHeaderMask * mask, Http::HdrType http_hdr_type_enums[], size_t count); + +-inline bool +-HttpHeader::chunked() const +-{ +- return has(Http::HdrType::TRANSFER_ENCODING) && +- hasListMember(Http::HdrType::TRANSFER_ENCODING, "chunked", ','); +-} +- + void httpHeaderInitModule(void); + + #endif /* SQUID_HTTPHEADER_H */ +diff --git a/src/client_side.cc b/src/client_side.cc +index d61e278..429ce7f 100644 +--- a/src/client_side.cc ++++ b/src/client_side.cc +@@ -1552,9 +1552,7 @@ void + clientProcessRequest(ConnStateData *conn, const Http1::RequestParserPointer &hp, Http::Stream *context) + { + ClientHttpRequest *http = context->http; +- bool chunked = false; + bool mustReplyToOptions = false; +- bool unsupportedTe = false; + bool expectBody = false; + + // We already have the request parsed and checked, so we +@@ -1611,13 +1609,7 @@ clientProcessRequest(ConnStateData *conn, const Http1::RequestParserPointer &hp, + request->http_ver.minor = http_ver.minor; + } + +- if (request->header.chunked()) { +- chunked = true; +- } else if (request->header.has(Http::HdrType::TRANSFER_ENCODING)) { +- const String te = request->header.getList(Http::HdrType::TRANSFER_ENCODING); +- // HTTP/1.1 requires chunking to be the last encoding if there is one +- unsupportedTe = te.size() && te != "identity"; +- } // else implied identity coding ++ const auto unsupportedTe = request->header.unsupportedTe(); + + mustReplyToOptions = (request->method == Http::METHOD_OPTIONS) && + (request->header.getInt64(Http::HdrType::MAX_FORWARDS) == 0); +@@ -1634,6 +1626,7 @@ clientProcessRequest(ConnStateData *conn, const Http1::RequestParserPointer &hp, + return; + } + ++ const auto chunked = request->header.chunked(); + if (!chunked && !clientIsContentLengthValid(request.getRaw())) { + clientStreamNode *node = context->getClientReplyContext(); + clientReplyContext *repContext = dynamic_cast(node->data.getRaw()); +diff --git a/src/http.cc b/src/http.cc +index 1ed98ca..68594aa 100644 +--- a/src/http.cc ++++ b/src/http.cc +@@ -1292,6 +1292,9 @@ HttpStateData::continueAfterParsingHeader() + } else if (vrep->header.conflictingContentLength()) { + fwd->dontRetry(true); + error = ERR_INVALID_RESP; ++ } else if (vrep->header.unsupportedTe()) { ++ fwd->dontRetry(true); ++ error = ERR_INVALID_RESP; + } else { + return true; // done parsing, got reply, and no error + } diff --git a/SPECS/squid.spec b/SPECS/squid.spec index 7c507cf..7c0b7fa 100644 --- a/SPECS/squid.spec +++ b/SPECS/squid.spec @@ -2,7 +2,7 @@ Name: squid Version: 4.4 -Release: 8%{?dist}.1 +Release: 8%{?dist}.2 Summary: The Squid proxy caching server Epoch: 7 # See CREDITS for breakdown of non GPLv2+ code @@ -49,6 +49,10 @@ Patch502: squid-4.4.0-CVE-2019-12519.patch Patch503: squid-4.4.0-CVE-2020-11945.patch # https://bugzilla.redhat.com/show_bug.cgi?id=1829402 Patch504: squid-4.4.0-CVE-2019-12525.patch +# https://bugzilla.redhat.com/show_bug.cgi?id=1871700 +Patch505: squid-4.4.0-CVE-2020-15810.patch +# https://bugzilla.redhat.com/show_bug.cgi?id=1871702 +Patch506: squid-4.4.0-CVE-2020-15811.patch Requires: bash >= 2.0 Requires(pre): shadow-utils @@ -113,6 +117,8 @@ lookup program (dnsserver), a program for retrieving FTP data %patch502 -p1 -b .CVE-2019-12519 %patch503 -p1 -b .CVE-2020-11945 %patch504 -p1 -b .CVE-2019-12525 +%patch505 -p1 -b .CVE-2020-15810 +%patch506 -p1 -b .CVE-2020-15811 # https://bugzilla.redhat.com/show_bug.cgi?id=1679526 # Patch in the vendor documentation and used different location for documentation @@ -329,6 +335,12 @@ fi %changelog +* Wed Aug 26 2020 Lubos Uhliarik - 7:4.4-8.2 +- Resolves: #1872345 - CVE-2020-15811 squid:4/squid: HTTP Request Splitting + could result in cache poisoning +- Resolves: #1872330 - CVE-2020-15810 squid:4/squid: HTTP Request Smuggling + could result in cache poisoning + * Wed Apr 29 2020 Lubos Uhliarik - 7:4.4-8.1 - Resolves: #1828368 - CVE-2019-12519 squid: improper check for new member in ESIExpression::Evaluate allows for stack buffer overflow