aboutsummaryrefslogtreecommitdiffstats
path: root/meta-networking/recipes-daemons/squid/files/CVE-2023-46846.patch
blob: a6d0965e7abafe33ad499ac28a9357442937587b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
From 05f6af2f4c85cc99323cfff6149c3d74af661b6d Mon Sep 17 00:00:00 2001
From: Amos Jeffries <yadij@users.noreply.github.com>
Date: Fri, 13 Oct 2023 08:44:16 +0000
Subject: [PATCH] RFC 9112: Improve HTTP chunked encoding compliance (#1498)

Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/squid/tree/debian/patches/CVE-2023-46846.patch?h=ubuntu/focal-security&id=9ccd217ca9428c9a6597e9310a99552026b245fa
Upstream commit https://github.com/squid-cache/squid/commit/05f6af2f4c85cc99323cfff6149c3d74af661b6d]
CVE: CVE-2023-46846
Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
---
 src/http/one/Parser.cc          |  8 +-------
 src/http/one/Parser.h           |  4 +---
 src/http/one/TeChunkedParser.cc | 23 ++++++++++++++++++-----
 src/parser/Tokenizer.cc         | 12 ++++++++++++
 src/parser/Tokenizer.h          |  7 +++++++
 5 files changed, 39 insertions(+), 15 deletions(-)

--- a/src/http/one/Parser.cc
+++ b/src/http/one/Parser.cc
@@ -65,16 +65,10 @@ Http::One::Parser::DelimiterCharacters()
 void
 Http::One::Parser::skipLineTerminator(Tokenizer &tok) const
 {
-    if (tok.skip(Http1::CrLf()))
-        return;
-
     if (Config.onoff.relaxed_header_parser && tok.skipOne(CharacterSet::LF))
         return;
 
-    if (tok.atEnd() || (tok.remaining().length() == 1 && tok.remaining().at(0) == '\r'))
-        throw InsufficientInput();
-
-    throw TexcHere("garbage instead of CRLF line terminator");
+    tok.skipRequired("line-terminating CRLF", Http1::CrLf());
 }
 
 /// all characters except the LF line terminator
--- a/src/http/one/Parser.h
+++ b/src/http/one/Parser.h
@@ -120,9 +120,7 @@ protected:
      * detect and skip the CRLF or (if tolerant) LF line terminator
      * consume from the tokenizer.
      *
-     * \throws exception on bad or InsuffientInput.
-     * \retval true only if line terminator found.
-     * \retval false incomplete or missing line terminator, need more data.
+     * \throws exception on bad or InsufficientInput
      */
     void skipLineTerminator(Tokenizer &) const;
 
--- a/src/http/one/TeChunkedParser.cc
+++ b/src/http/one/TeChunkedParser.cc
@@ -91,6 +91,11 @@ Http::One::TeChunkedParser::parseChunkSi
 {
     Must(theChunkSize <= 0); // Should(), really
 
+    static const SBuf bannedHexPrefixLower("0x");
+    static const SBuf bannedHexPrefixUpper("0X");
+    if (tok.skip(bannedHexPrefixLower) || tok.skip(bannedHexPrefixUpper))
+        throw TextException("chunk starts with 0x", Here());
+
     int64_t size = -1;
     if (tok.int64(size, 16, false) && !tok.atEnd()) {
         if (size < 0)
@@ -121,7 +126,7 @@ Http::One::TeChunkedParser::parseChunkMe
     // bad or insufficient input, like in the code below. TODO: Expand up.
     try {
         parseChunkExtensions(tok); // a possibly empty chunk-ext list
-        skipLineTerminator(tok);
+        tok.skipRequired("CRLF after [chunk-ext]", Http1::CrLf());
         buf_ = tok.remaining();
         parsingStage_ = theChunkSize ? Http1::HTTP_PARSE_CHUNK : Http1::HTTP_PARSE_MIME;
         return true;
@@ -132,12 +137,14 @@ Http::One::TeChunkedParser::parseChunkMe
     // other exceptions bubble up to kill message parsing
 }
 
-/// Parses the chunk-ext list (RFC 7230 section 4.1.1 and its Errata #4667):
+/// Parses the chunk-ext list (RFC 9112 section 7.1.1:
 /// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
 void
-Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &tok)
+Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &callerTok)
 {
     do {
+        auto tok = callerTok;
+
         ParseBws(tok); // Bug 4492: IBM_HTTP_Server sends SP after chunk-size
 
         if (!tok.skip(';'))
@@ -145,6 +152,7 @@ Http::One::TeChunkedParser::parseChunkEx
 
         parseOneChunkExtension(tok);
         buf_ = tok.remaining(); // got one extension
+        callerTok = tok;
     } while (true);
 }
 
@@ -158,11 +166,14 @@ Http::One::ChunkExtensionValueParser::Ig
 /// Parses a single chunk-ext list element:
 /// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
 void
-Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &tok)
+Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &callerTok)
 {
+    auto tok = callerTok;
+
     ParseBws(tok); // Bug 4492: ICAP servers send SP before chunk-ext-name
 
     const auto extName = tok.prefix("chunk-ext-name", CharacterSet::TCHAR);
+    callerTok = tok; // in case we determine that this is a valueless chunk-ext
 
     ParseBws(tok);
 
@@ -176,6 +187,8 @@ Http::One::TeChunkedParser::parseOneChun
         customExtensionValueParser->parse(tok, extName);
     else
         ChunkExtensionValueParser::Ignore(tok, extName);
+
+    callerTok = tok;
 }
 
 bool
@@ -209,7 +222,7 @@ Http::One::TeChunkedParser::parseChunkEn
     Must(theLeftBodySize == 0); // Should(), really
 
     try {
-        skipLineTerminator(tok);
+        tok.skipRequired("chunk CRLF", Http1::CrLf());
         buf_ = tok.remaining(); // parse checkpoint
         theChunkSize = 0; // done with the current chunk
         parsingStage_ = Http1::HTTP_PARSE_CHUNK_SZ;
--- a/src/parser/Tokenizer.cc
+++ b/src/parser/Tokenizer.cc
@@ -147,6 +147,18 @@ Parser::Tokenizer::skipAll(const Charact
     return success(prefixLen);
 }
 
+void
+Parser::Tokenizer::skipRequired(const char *description, const SBuf &tokenToSkip)
+{
+    if (skip(tokenToSkip) || tokenToSkip.isEmpty())
+        return;
+
+    if (tokenToSkip.startsWith(buf_))
+        throw InsufficientInput();
+
+    throw TextException(ToSBuf("cannot skip ", description), Here());
+}
+
 bool
 Parser::Tokenizer::skipOne(const CharacterSet &chars)
 {
--- a/src/parser/Tokenizer.h
+++ b/src/parser/Tokenizer.h
@@ -115,6 +115,13 @@ public:
      */
     SBuf::size_type skipAll(const CharacterSet &discardables);
 
+    /** skips a given character sequence (string);
+     * does nothing if the sequence is empty
+     *
+     * \throws exception on mismatching prefix or InsufficientInput
+     */
+    void skipRequired(const char *description, const SBuf &tokenToSkip);
+
     /** Removes a single trailing character from the set.
      *
      * \return whether a character was removed