Skip to content

Commit b77df05

Browse files
committed
feat(http1): trailer limits are configurable
Trailer parsing now honors h1_max_headers option. It also supports a future h1_max_header_size option.
1 parent ae36700 commit b77df05

File tree

2 files changed

+87
-49
lines changed

2 files changed

+87
-49
lines changed

src/proto/h1/conn.rs

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -267,11 +267,20 @@ where
267267
self.try_keep_alive(cx);
268268
}
269269
} else if msg.expect_continue && msg.head.version.gt(&Version::HTTP_10) {
270-
self.state.reading =
271-
Reading::Continue(Decoder::new(msg.decode, self.state.h1_max_headers));
270+
let h1_max_header_size = None; // TODO: remove this when we land h1_max_header_size support
271+
self.state.reading = Reading::Continue(Decoder::new(
272+
msg.decode,
273+
self.state.h1_max_headers,
274+
h1_max_header_size,
275+
));
272276
wants = wants.add(Wants::EXPECT);
273277
} else {
274-
self.state.reading = Reading::Body(Decoder::new(msg.decode, self.state.h1_max_headers));
278+
let h1_max_header_size = None; // TODO: remove this when we land h1_max_header_size support
279+
self.state.reading = Reading::Body(Decoder::new(
280+
msg.decode,
281+
self.state.h1_max_headers,
282+
h1_max_header_size,
283+
));
275284
}
276285

277286
self.state.allow_trailer_fields = msg

src/proto/h1/decode.rs

Lines changed: 75 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ const CHUNKED_EXTENSIONS_LIMIT: u64 = 1024 * 16;
2222

2323
/// Maximum number of bytes allowed for all trailer fields.
2424
///
25-
/// TODO: replace this when we land h1_max_header_size support
26-
const TRAILER_LIMIT: u64 = DEFAULT_MAX_HEADERS as u64 * 64;
25+
/// TODO: remove this when we land h1_max_header_size support
26+
const TRAILER_LIMIT: usize = 1024 * 16;
2727

2828
/// Decoders to handle different Transfer-Encodings.
2929
///
@@ -46,6 +46,7 @@ enum Kind {
4646
trailers_buf: Option<BytesMut>,
4747
trailers_cnt: usize,
4848
h1_max_headers: Option<usize>,
49+
h1_max_header_size: Option<usize>,
4950
},
5051
/// A Reader used for responses that don't indicate a length or chunked.
5152
///
@@ -92,7 +93,10 @@ impl Decoder {
9293
}
9394
}
9495

95-
pub(crate) fn chunked(h1_max_headers: Option<usize>) -> Decoder {
96+
pub(crate) fn chunked(
97+
h1_max_headers: Option<usize>,
98+
h1_max_header_size: Option<usize>,
99+
) -> Decoder {
96100
Decoder {
97101
kind: Kind::Chunked {
98102
state: ChunkedState::new(),
@@ -101,6 +105,7 @@ impl Decoder {
101105
trailers_buf: None,
102106
trailers_cnt: 0,
103107
h1_max_headers,
108+
h1_max_header_size,
104109
},
105110
}
106111
}
@@ -111,9 +116,13 @@ impl Decoder {
111116
}
112117
}
113118

114-
pub(super) fn new(len: DecodedLength, h1_max_headers: Option<usize>) -> Self {
119+
pub(super) fn new(
120+
len: DecodedLength,
121+
h1_max_headers: Option<usize>,
122+
h1_max_header_size: Option<usize>,
123+
) -> Self {
115124
match len {
116-
DecodedLength::CHUNKED => Decoder::chunked(h1_max_headers),
125+
DecodedLength::CHUNKED => Decoder::chunked(h1_max_headers, h1_max_header_size),
117126
DecodedLength::CLOSE_DELIMITED => Decoder::eof(),
118127
length => Decoder::length(length.danger_len()),
119128
}
@@ -167,7 +176,10 @@ impl Decoder {
167176
ref mut trailers_buf,
168177
ref mut trailers_cnt,
169178
ref h1_max_headers,
179+
ref h1_max_header_size,
170180
} => {
181+
let h1_max_headers = h1_max_headers.unwrap_or(DEFAULT_MAX_HEADERS);
182+
let h1_max_header_size = h1_max_header_size.unwrap_or(TRAILER_LIMIT);
171183
loop {
172184
let mut buf = None;
173185
// advances the chunked state
@@ -180,6 +192,7 @@ impl Decoder {
180192
trailers_buf,
181193
trailers_cnt,
182194
h1_max_headers,
195+
h1_max_header_size
183196
))?;
184197
if *state == ChunkedState::End {
185198
trace!("end of chunked");
@@ -188,7 +201,7 @@ impl Decoder {
188201
trace!("found possible trailers");
189202

190203
// decoder enforces that trailers count will not exceed h1_max_headers
191-
if *trailers_cnt >= h1_max_headers.unwrap_or(DEFAULT_MAX_HEADERS) {
204+
if *trailers_cnt >= h1_max_headers {
192205
return Poll::Ready(Err(io::Error::new(
193206
io::ErrorKind::InvalidData,
194207
"chunk trailers count overflow",
@@ -267,11 +280,10 @@ macro_rules! or_overflow {
267280
}
268281

269282
macro_rules! put_u8 {
270-
($trailers_buf:expr, $byte:expr) => {
283+
($trailers_buf:expr, $byte:expr, $limit:expr) => {
271284
$trailers_buf.put_u8($byte);
272285

273-
// check if trailer_buf exceeds TRAILER_LIMIT
274-
if $trailers_buf.len() as u64 >= TRAILER_LIMIT {
286+
if $trailers_buf.len() >= $limit {
275287
return Poll::Ready(Err(io::Error::new(
276288
io::ErrorKind::InvalidData,
277289
"chunk trailers bytes over limit",
@@ -293,7 +305,8 @@ impl ChunkedState {
293305
buf: &mut Option<Bytes>,
294306
trailers_buf: &mut Option<BytesMut>,
295307
trailers_cnt: &mut usize,
296-
h1_max_headers: &Option<usize>,
308+
h1_max_headers: usize,
309+
h1_max_header_size: usize,
297310
) -> Poll<Result<ChunkedState, io::Error>> {
298311
use self::ChunkedState::*;
299312
match *self {
@@ -305,12 +318,17 @@ impl ChunkedState {
305318
Body => ChunkedState::read_body(cx, body, size, buf),
306319
BodyCr => ChunkedState::read_body_cr(cx, body),
307320
BodyLf => ChunkedState::read_body_lf(cx, body),
308-
Trailer => ChunkedState::read_trailer(cx, body, trailers_buf),
309-
TrailerLf => {
310-
ChunkedState::read_trailer_lf(cx, body, trailers_buf, trailers_cnt, h1_max_headers)
311-
}
312-
EndCr => ChunkedState::read_end_cr(cx, body, trailers_buf),
313-
EndLf => ChunkedState::read_end_lf(cx, body, trailers_buf),
321+
Trailer => ChunkedState::read_trailer(cx, body, trailers_buf, h1_max_header_size),
322+
TrailerLf => ChunkedState::read_trailer_lf(
323+
cx,
324+
body,
325+
trailers_buf,
326+
trailers_cnt,
327+
h1_max_headers,
328+
h1_max_header_size,
329+
),
330+
EndCr => ChunkedState::read_end_cr(cx, body, trailers_buf, h1_max_header_size),
331+
EndLf => ChunkedState::read_end_lf(cx, body, trailers_buf, h1_max_header_size),
314332
End => Poll::Ready(Ok(ChunkedState::End)),
315333
}
316334
}
@@ -512,11 +530,16 @@ impl ChunkedState {
512530
cx: &mut Context<'_>,
513531
rdr: &mut R,
514532
trailers_buf: &mut Option<BytesMut>,
533+
h1_max_header_size: usize,
515534
) -> Poll<Result<ChunkedState, io::Error>> {
516535
trace!("read_trailer");
517536
let byte = byte!(rdr, cx);
518537

519-
put_u8!(trailers_buf.as_mut().expect("trailers_buf is None"), byte);
538+
put_u8!(
539+
trailers_buf.as_mut().expect("trailers_buf is None"),
540+
byte,
541+
h1_max_header_size
542+
);
520543

521544
match byte {
522545
b'\r' => Poll::Ready(Ok(ChunkedState::TrailerLf)),
@@ -529,20 +552,25 @@ impl ChunkedState {
529552
rdr: &mut R,
530553
trailers_buf: &mut Option<BytesMut>,
531554
trailers_cnt: &mut usize,
532-
h1_max_headers: &Option<usize>,
555+
h1_max_headers: usize,
556+
h1_max_header_size: usize,
533557
) -> Poll<Result<ChunkedState, io::Error>> {
534558
let byte = byte!(rdr, cx);
535559
match byte {
536560
b'\n' => {
537-
if *trailers_cnt >= h1_max_headers.unwrap_or(DEFAULT_MAX_HEADERS) {
561+
if *trailers_cnt >= h1_max_headers {
538562
return Poll::Ready(Err(io::Error::new(
539563
io::ErrorKind::InvalidData,
540564
"chunk trailers count overflow",
541565
)));
542566
}
543567
*trailers_cnt += 1;
544568

545-
put_u8!(trailers_buf.as_mut().expect("trailers_buf is None"), byte);
569+
put_u8!(
570+
trailers_buf.as_mut().expect("trailers_buf is None"),
571+
byte,
572+
h1_max_header_size
573+
);
546574

547575
Poll::Ready(Ok(ChunkedState::EndCr))
548576
}
@@ -557,12 +585,13 @@ impl ChunkedState {
557585
cx: &mut Context<'_>,
558586
rdr: &mut R,
559587
trailers_buf: &mut Option<BytesMut>,
588+
h1_max_header_size: usize,
560589
) -> Poll<Result<ChunkedState, io::Error>> {
561590
let byte = byte!(rdr, cx);
562591
match byte {
563592
b'\r' => {
564593
if let Some(trailers_buf) = trailers_buf {
565-
put_u8!(trailers_buf, byte);
594+
put_u8!(trailers_buf, byte, h1_max_header_size);
566595
}
567596
Poll::Ready(Ok(ChunkedState::EndLf))
568597
}
@@ -575,7 +604,7 @@ impl ChunkedState {
575604
*trailers_buf = Some(buf);
576605
}
577606
Some(ref mut trailers_buf) => {
578-
put_u8!(trailers_buf, byte);
607+
put_u8!(trailers_buf, byte, h1_max_header_size);
579608
}
580609
}
581610

@@ -587,12 +616,13 @@ impl ChunkedState {
587616
cx: &mut Context<'_>,
588617
rdr: &mut R,
589618
trailers_buf: &mut Option<BytesMut>,
619+
h1_max_header_size: usize,
590620
) -> Poll<Result<ChunkedState, io::Error>> {
591621
let byte = byte!(rdr, cx);
592622
match byte {
593623
b'\n' => {
594624
if let Some(trailers_buf) = trailers_buf {
595-
put_u8!(trailers_buf, byte);
625+
put_u8!(trailers_buf, byte, h1_max_header_size);
596626
}
597627
Poll::Ready(Ok(ChunkedState::End))
598628
}
@@ -726,7 +756,8 @@ mod tests {
726756
&mut None,
727757
&mut None,
728758
&mut trailers_cnt,
729-
&None,
759+
DEFAULT_MAX_HEADERS,
760+
TRAILER_LIMIT,
730761
)
731762
})
732763
.await;
@@ -755,7 +786,8 @@ mod tests {
755786
&mut None,
756787
&mut None,
757788
&mut trailers_cnt,
758-
&None,
789+
DEFAULT_MAX_HEADERS,
790+
TRAILER_LIMIT,
759791
)
760792
})
761793
.await;
@@ -841,7 +873,7 @@ mod tests {
841873
9\r\n\
842874
foo bar\
843875
"[..];
844-
let mut decoder = Decoder::chunked(None);
876+
let mut decoder = Decoder::chunked(None, None);
845877
assert_eq!(
846878
decoder
847879
.decode_fut(&mut bytes)
@@ -860,7 +892,7 @@ mod tests {
860892
#[tokio::test]
861893
async fn test_read_chunked_single_read() {
862894
let mut mock_buf = &b"10\r\n1234567890abcdef\r\n0\r\n"[..];
863-
let buf = Decoder::chunked(None)
895+
let buf = Decoder::chunked(None, None)
864896
.decode_fut(&mut mock_buf)
865897
.await
866898
.expect("decode")
@@ -885,7 +917,7 @@ mod tests {
885917
scratch.extend(b"0\r\n\r\n");
886918
let mut mock_buf = Bytes::from(scratch);
887919

888-
let mut decoder = Decoder::chunked(None);
920+
let mut decoder = Decoder::chunked(None, None);
889921
let buf1 = decoder
890922
.decode_fut(&mut mock_buf)
891923
.await
@@ -906,7 +938,7 @@ mod tests {
906938
#[tokio::test]
907939
async fn test_read_chunked_trailer_with_missing_lf() {
908940
let mut mock_buf = &b"10\r\n1234567890abcdef\r\n0\r\nbad\r\r\n"[..];
909-
let mut decoder = Decoder::chunked(None);
941+
let mut decoder = Decoder::chunked(None, None);
910942
decoder.decode_fut(&mut mock_buf).await.expect("decode");
911943
let e = decoder.decode_fut(&mut mock_buf).await.unwrap_err();
912944
assert_eq!(e.kind(), io::ErrorKind::InvalidInput);
@@ -916,7 +948,7 @@ mod tests {
916948
#[tokio::test]
917949
async fn test_read_chunked_after_eof() {
918950
let mut mock_buf = &b"10\r\n1234567890abcdef\r\n0\r\n\r\n"[..];
919-
let mut decoder = Decoder::chunked(None);
951+
let mut decoder = Decoder::chunked(None, None);
920952

921953
// normal read
922954
let buf = decoder
@@ -1006,7 +1038,7 @@ mod tests {
10061038
async fn test_read_chunked_async() {
10071039
let content = "3\r\nfoo\r\n3\r\nbar\r\n0\r\n\r\n";
10081040
let expected = "foobar";
1009-
all_async_cases(content, expected, Decoder::chunked(None)).await;
1041+
all_async_cases(content, expected, Decoder::chunked(None, None)).await;
10101042
}
10111043

10121044
#[cfg(not(miri))]
@@ -1093,7 +1125,7 @@ mod tests {
10931125
}
10941126

10951127
#[tokio::test]
1096-
async fn test_trailer_h1_max_header_size_enforced() {
1128+
async fn test_trailer_max_headers_enforced() {
10971129
let h1_max_headers = 10;
10981130
let mut scratch = vec![];
10991131
scratch.extend(b"10\r\n1234567890abcdef\r\n0\r\n");
@@ -1103,7 +1135,7 @@ mod tests {
11031135
scratch.extend(b"\r\n");
11041136
let mut mock_buf = Bytes::from(scratch);
11051137

1106-
let mut decoder = Decoder::chunked(Some(h1_max_headers));
1138+
let mut decoder = Decoder::chunked(Some(h1_max_headers), None);
11071139

11081140
// ready chunked body
11091141
let buf = decoder
@@ -1123,20 +1155,15 @@ mod tests {
11231155
}
11241156

11251157
#[tokio::test]
1126-
async fn test_trailer_limit_huge_trailer() {
1158+
async fn test_trailer_max_header_size_huge_trailer() {
1159+
let max_header_size = 1024;
11271160
let mut scratch = vec![];
11281161
scratch.extend(b"10\r\n1234567890abcdef\r\n0\r\n");
1129-
scratch.extend(
1130-
format!(
1131-
"huge_trailer: {}\r\n",
1132-
"x".repeat(TRAILER_LIMIT.try_into().unwrap())
1133-
)
1134-
.as_bytes(),
1135-
);
1162+
scratch.extend(format!("huge_trailer: {}\r\n", "x".repeat(max_header_size)).as_bytes());
11361163
scratch.extend(b"\r\n");
11371164
let mut mock_buf = Bytes::from(scratch);
11381165

1139-
let mut decoder = Decoder::chunked(None);
1166+
let mut decoder = Decoder::chunked(None, Some(max_header_size));
11401167

11411168
// ready chunked body
11421169
let buf = decoder
@@ -1156,18 +1183,20 @@ mod tests {
11561183
}
11571184

11581185
#[tokio::test]
1159-
async fn test_trailer_limit_many_small_trailers() {
1186+
async fn test_trailer_max_header_size_many_small_trailers() {
1187+
let max_headers = 10;
1188+
let header_size = 64;
11601189
let mut scratch = vec![];
11611190
scratch.extend(b"10\r\n1234567890abcdef\r\n0\r\n");
11621191

1163-
for i in 0..TRAILERS_FIELD_LIMIT {
1164-
scratch.extend(format!("trailer{}: {}\r\n", i, "x".repeat(64)).as_bytes());
1192+
for i in 0..max_headers {
1193+
scratch.extend(format!("trailer{}: {}\r\n", i, "x".repeat(header_size)).as_bytes());
11651194
}
11661195

11671196
scratch.extend(b"\r\n");
11681197
let mut mock_buf = Bytes::from(scratch);
11691198

1170-
let mut decoder = Decoder::chunked(None);
1199+
let mut decoder = Decoder::chunked(None, Some(max_headers * header_size));
11711200

11721201
// ready chunked body
11731202
let buf = decoder

0 commit comments

Comments
 (0)