@@ -22,8 +22,8 @@ const CHUNKED_EXTENSIONS_LIMIT: u64 = 1024 * 16;
22
22
23
23
/// Maximum number of bytes allowed for all trailer fields.
24
24
///
25
- /// TODO: replace this when we land h1_max_header_size support
26
- const TRAILER_LIMIT : u64 = DEFAULT_MAX_HEADERS as u64 * 64 ;
25
+ /// TODO: remove this when we land h1_max_header_size support
26
+ const TRAILER_LIMIT : usize = 1024 * 16 ;
27
27
28
28
/// Decoders to handle different Transfer-Encodings.
29
29
///
@@ -46,6 +46,7 @@ enum Kind {
46
46
trailers_buf : Option < BytesMut > ,
47
47
trailers_cnt : usize ,
48
48
h1_max_headers : Option < usize > ,
49
+ h1_max_header_size : Option < usize > ,
49
50
} ,
50
51
/// A Reader used for responses that don't indicate a length or chunked.
51
52
///
@@ -92,7 +93,10 @@ impl Decoder {
92
93
}
93
94
}
94
95
95
- pub ( crate ) fn chunked ( h1_max_headers : Option < usize > ) -> Decoder {
96
+ pub ( crate ) fn chunked (
97
+ h1_max_headers : Option < usize > ,
98
+ h1_max_header_size : Option < usize > ,
99
+ ) -> Decoder {
96
100
Decoder {
97
101
kind : Kind :: Chunked {
98
102
state : ChunkedState :: new ( ) ,
@@ -101,6 +105,7 @@ impl Decoder {
101
105
trailers_buf : None ,
102
106
trailers_cnt : 0 ,
103
107
h1_max_headers,
108
+ h1_max_header_size,
104
109
} ,
105
110
}
106
111
}
@@ -111,9 +116,13 @@ impl Decoder {
111
116
}
112
117
}
113
118
114
- pub ( super ) fn new ( len : DecodedLength , h1_max_headers : Option < usize > ) -> Self {
119
+ pub ( super ) fn new (
120
+ len : DecodedLength ,
121
+ h1_max_headers : Option < usize > ,
122
+ h1_max_header_size : Option < usize > ,
123
+ ) -> Self {
115
124
match len {
116
- DecodedLength :: CHUNKED => Decoder :: chunked ( h1_max_headers) ,
125
+ DecodedLength :: CHUNKED => Decoder :: chunked ( h1_max_headers, h1_max_header_size ) ,
117
126
DecodedLength :: CLOSE_DELIMITED => Decoder :: eof ( ) ,
118
127
length => Decoder :: length ( length. danger_len ( ) ) ,
119
128
}
@@ -167,7 +176,10 @@ impl Decoder {
167
176
ref mut trailers_buf,
168
177
ref mut trailers_cnt,
169
178
ref h1_max_headers,
179
+ ref h1_max_header_size,
170
180
} => {
181
+ let h1_max_headers = h1_max_headers. unwrap_or ( DEFAULT_MAX_HEADERS ) ;
182
+ let h1_max_header_size = h1_max_header_size. unwrap_or ( TRAILER_LIMIT ) ;
171
183
loop {
172
184
let mut buf = None ;
173
185
// advances the chunked state
@@ -180,6 +192,7 @@ impl Decoder {
180
192
trailers_buf,
181
193
trailers_cnt,
182
194
h1_max_headers,
195
+ h1_max_header_size
183
196
) ) ?;
184
197
if * state == ChunkedState :: End {
185
198
trace ! ( "end of chunked" ) ;
@@ -188,7 +201,7 @@ impl Decoder {
188
201
trace ! ( "found possible trailers" ) ;
189
202
190
203
// decoder enforces that trailers count will not exceed h1_max_headers
191
- if * trailers_cnt >= h1_max_headers. unwrap_or ( DEFAULT_MAX_HEADERS ) {
204
+ if * trailers_cnt >= h1_max_headers {
192
205
return Poll :: Ready ( Err ( io:: Error :: new (
193
206
io:: ErrorKind :: InvalidData ,
194
207
"chunk trailers count overflow" ,
@@ -267,11 +280,10 @@ macro_rules! or_overflow {
267
280
}
268
281
269
282
macro_rules! put_u8 {
270
- ( $trailers_buf: expr, $byte: expr) => {
283
+ ( $trailers_buf: expr, $byte: expr, $limit : expr ) => {
271
284
$trailers_buf. put_u8( $byte) ;
272
285
273
- // check if trailer_buf exceeds TRAILER_LIMIT
274
- if $trailers_buf. len( ) as u64 >= TRAILER_LIMIT {
286
+ if $trailers_buf. len( ) >= $limit {
275
287
return Poll :: Ready ( Err ( io:: Error :: new(
276
288
io:: ErrorKind :: InvalidData ,
277
289
"chunk trailers bytes over limit" ,
@@ -293,7 +305,8 @@ impl ChunkedState {
293
305
buf : & mut Option < Bytes > ,
294
306
trailers_buf : & mut Option < BytesMut > ,
295
307
trailers_cnt : & mut usize ,
296
- h1_max_headers : & Option < usize > ,
308
+ h1_max_headers : usize ,
309
+ h1_max_header_size : usize ,
297
310
) -> Poll < Result < ChunkedState , io:: Error > > {
298
311
use self :: ChunkedState :: * ;
299
312
match * self {
@@ -305,12 +318,17 @@ impl ChunkedState {
305
318
Body => ChunkedState :: read_body ( cx, body, size, buf) ,
306
319
BodyCr => ChunkedState :: read_body_cr ( cx, body) ,
307
320
BodyLf => ChunkedState :: read_body_lf ( cx, body) ,
308
- Trailer => ChunkedState :: read_trailer ( cx, body, trailers_buf) ,
309
- TrailerLf => {
310
- ChunkedState :: read_trailer_lf ( cx, body, trailers_buf, trailers_cnt, h1_max_headers)
311
- }
312
- EndCr => ChunkedState :: read_end_cr ( cx, body, trailers_buf) ,
313
- EndLf => ChunkedState :: read_end_lf ( cx, body, trailers_buf) ,
321
+ Trailer => ChunkedState :: read_trailer ( cx, body, trailers_buf, h1_max_header_size) ,
322
+ TrailerLf => ChunkedState :: read_trailer_lf (
323
+ cx,
324
+ body,
325
+ trailers_buf,
326
+ trailers_cnt,
327
+ h1_max_headers,
328
+ h1_max_header_size,
329
+ ) ,
330
+ EndCr => ChunkedState :: read_end_cr ( cx, body, trailers_buf, h1_max_header_size) ,
331
+ EndLf => ChunkedState :: read_end_lf ( cx, body, trailers_buf, h1_max_header_size) ,
314
332
End => Poll :: Ready ( Ok ( ChunkedState :: End ) ) ,
315
333
}
316
334
}
@@ -512,11 +530,16 @@ impl ChunkedState {
512
530
cx : & mut Context < ' _ > ,
513
531
rdr : & mut R ,
514
532
trailers_buf : & mut Option < BytesMut > ,
533
+ h1_max_header_size : usize ,
515
534
) -> Poll < Result < ChunkedState , io:: Error > > {
516
535
trace ! ( "read_trailer" ) ;
517
536
let byte = byte ! ( rdr, cx) ;
518
537
519
- put_u8 ! ( trailers_buf. as_mut( ) . expect( "trailers_buf is None" ) , byte) ;
538
+ put_u8 ! (
539
+ trailers_buf. as_mut( ) . expect( "trailers_buf is None" ) ,
540
+ byte,
541
+ h1_max_header_size
542
+ ) ;
520
543
521
544
match byte {
522
545
b'\r' => Poll :: Ready ( Ok ( ChunkedState :: TrailerLf ) ) ,
@@ -529,20 +552,25 @@ impl ChunkedState {
529
552
rdr : & mut R ,
530
553
trailers_buf : & mut Option < BytesMut > ,
531
554
trailers_cnt : & mut usize ,
532
- h1_max_headers : & Option < usize > ,
555
+ h1_max_headers : usize ,
556
+ h1_max_header_size : usize ,
533
557
) -> Poll < Result < ChunkedState , io:: Error > > {
534
558
let byte = byte ! ( rdr, cx) ;
535
559
match byte {
536
560
b'\n' => {
537
- if * trailers_cnt >= h1_max_headers. unwrap_or ( DEFAULT_MAX_HEADERS ) {
561
+ if * trailers_cnt >= h1_max_headers {
538
562
return Poll :: Ready ( Err ( io:: Error :: new (
539
563
io:: ErrorKind :: InvalidData ,
540
564
"chunk trailers count overflow" ,
541
565
) ) ) ;
542
566
}
543
567
* trailers_cnt += 1 ;
544
568
545
- put_u8 ! ( trailers_buf. as_mut( ) . expect( "trailers_buf is None" ) , byte) ;
569
+ put_u8 ! (
570
+ trailers_buf. as_mut( ) . expect( "trailers_buf is None" ) ,
571
+ byte,
572
+ h1_max_header_size
573
+ ) ;
546
574
547
575
Poll :: Ready ( Ok ( ChunkedState :: EndCr ) )
548
576
}
@@ -557,12 +585,13 @@ impl ChunkedState {
557
585
cx : & mut Context < ' _ > ,
558
586
rdr : & mut R ,
559
587
trailers_buf : & mut Option < BytesMut > ,
588
+ h1_max_header_size : usize ,
560
589
) -> Poll < Result < ChunkedState , io:: Error > > {
561
590
let byte = byte ! ( rdr, cx) ;
562
591
match byte {
563
592
b'\r' => {
564
593
if let Some ( trailers_buf) = trailers_buf {
565
- put_u8 ! ( trailers_buf, byte) ;
594
+ put_u8 ! ( trailers_buf, byte, h1_max_header_size ) ;
566
595
}
567
596
Poll :: Ready ( Ok ( ChunkedState :: EndLf ) )
568
597
}
@@ -575,7 +604,7 @@ impl ChunkedState {
575
604
* trailers_buf = Some ( buf) ;
576
605
}
577
606
Some ( ref mut trailers_buf) => {
578
- put_u8 ! ( trailers_buf, byte) ;
607
+ put_u8 ! ( trailers_buf, byte, h1_max_header_size ) ;
579
608
}
580
609
}
581
610
@@ -587,12 +616,13 @@ impl ChunkedState {
587
616
cx : & mut Context < ' _ > ,
588
617
rdr : & mut R ,
589
618
trailers_buf : & mut Option < BytesMut > ,
619
+ h1_max_header_size : usize ,
590
620
) -> Poll < Result < ChunkedState , io:: Error > > {
591
621
let byte = byte ! ( rdr, cx) ;
592
622
match byte {
593
623
b'\n' => {
594
624
if let Some ( trailers_buf) = trailers_buf {
595
- put_u8 ! ( trailers_buf, byte) ;
625
+ put_u8 ! ( trailers_buf, byte, h1_max_header_size ) ;
596
626
}
597
627
Poll :: Ready ( Ok ( ChunkedState :: End ) )
598
628
}
@@ -726,7 +756,8 @@ mod tests {
726
756
& mut None ,
727
757
& mut None ,
728
758
& mut trailers_cnt,
729
- & None ,
759
+ DEFAULT_MAX_HEADERS ,
760
+ TRAILER_LIMIT ,
730
761
)
731
762
} )
732
763
. await ;
@@ -755,7 +786,8 @@ mod tests {
755
786
& mut None ,
756
787
& mut None ,
757
788
& mut trailers_cnt,
758
- & None ,
789
+ DEFAULT_MAX_HEADERS ,
790
+ TRAILER_LIMIT ,
759
791
)
760
792
} )
761
793
. await ;
@@ -841,7 +873,7 @@ mod tests {
841
873
9\r \n \
842
874
foo bar\
843
875
"[ ..] ;
844
- let mut decoder = Decoder :: chunked ( None ) ;
876
+ let mut decoder = Decoder :: chunked ( None , None ) ;
845
877
assert_eq ! (
846
878
decoder
847
879
. decode_fut( & mut bytes)
@@ -860,7 +892,7 @@ mod tests {
860
892
#[ tokio:: test]
861
893
async fn test_read_chunked_single_read ( ) {
862
894
let mut mock_buf = & b"10\r \n 1234567890abcdef\r \n 0\r \n " [ ..] ;
863
- let buf = Decoder :: chunked ( None )
895
+ let buf = Decoder :: chunked ( None , None )
864
896
. decode_fut ( & mut mock_buf)
865
897
. await
866
898
. expect ( "decode" )
@@ -885,7 +917,7 @@ mod tests {
885
917
scratch. extend ( b"0\r \n \r \n " ) ;
886
918
let mut mock_buf = Bytes :: from ( scratch) ;
887
919
888
- let mut decoder = Decoder :: chunked ( None ) ;
920
+ let mut decoder = Decoder :: chunked ( None , None ) ;
889
921
let buf1 = decoder
890
922
. decode_fut ( & mut mock_buf)
891
923
. await
@@ -906,7 +938,7 @@ mod tests {
906
938
#[ tokio:: test]
907
939
async fn test_read_chunked_trailer_with_missing_lf ( ) {
908
940
let mut mock_buf = & b"10\r \n 1234567890abcdef\r \n 0\r \n bad\r \r \n " [ ..] ;
909
- let mut decoder = Decoder :: chunked ( None ) ;
941
+ let mut decoder = Decoder :: chunked ( None , None ) ;
910
942
decoder. decode_fut ( & mut mock_buf) . await . expect ( "decode" ) ;
911
943
let e = decoder. decode_fut ( & mut mock_buf) . await . unwrap_err ( ) ;
912
944
assert_eq ! ( e. kind( ) , io:: ErrorKind :: InvalidInput ) ;
@@ -916,7 +948,7 @@ mod tests {
916
948
#[ tokio:: test]
917
949
async fn test_read_chunked_after_eof ( ) {
918
950
let mut mock_buf = & b"10\r \n 1234567890abcdef\r \n 0\r \n \r \n " [ ..] ;
919
- let mut decoder = Decoder :: chunked ( None ) ;
951
+ let mut decoder = Decoder :: chunked ( None , None ) ;
920
952
921
953
// normal read
922
954
let buf = decoder
@@ -1006,7 +1038,7 @@ mod tests {
1006
1038
async fn test_read_chunked_async ( ) {
1007
1039
let content = "3\r \n foo\r \n 3\r \n bar\r \n 0\r \n \r \n " ;
1008
1040
let expected = "foobar" ;
1009
- all_async_cases ( content, expected, Decoder :: chunked ( None ) ) . await ;
1041
+ all_async_cases ( content, expected, Decoder :: chunked ( None , None ) ) . await ;
1010
1042
}
1011
1043
1012
1044
#[ cfg( not( miri) ) ]
@@ -1093,7 +1125,7 @@ mod tests {
1093
1125
}
1094
1126
1095
1127
#[ tokio:: test]
1096
- async fn test_trailer_h1_max_header_size_enforced ( ) {
1128
+ async fn test_trailer_max_headers_enforced ( ) {
1097
1129
let h1_max_headers = 10 ;
1098
1130
let mut scratch = vec ! [ ] ;
1099
1131
scratch. extend ( b"10\r \n 1234567890abcdef\r \n 0\r \n " ) ;
@@ -1103,7 +1135,7 @@ mod tests {
1103
1135
scratch. extend ( b"\r \n " ) ;
1104
1136
let mut mock_buf = Bytes :: from ( scratch) ;
1105
1137
1106
- let mut decoder = Decoder :: chunked ( Some ( h1_max_headers) ) ;
1138
+ let mut decoder = Decoder :: chunked ( Some ( h1_max_headers) , None ) ;
1107
1139
1108
1140
// ready chunked body
1109
1141
let buf = decoder
@@ -1123,20 +1155,15 @@ mod tests {
1123
1155
}
1124
1156
1125
1157
#[ tokio:: test]
1126
- async fn test_trailer_limit_huge_trailer ( ) {
1158
+ async fn test_trailer_max_header_size_huge_trailer ( ) {
1159
+ let max_header_size = 1024 ;
1127
1160
let mut scratch = vec ! [ ] ;
1128
1161
scratch. extend ( b"10\r \n 1234567890abcdef\r \n 0\r \n " ) ;
1129
- scratch. extend (
1130
- format ! (
1131
- "huge_trailer: {}\r \n " ,
1132
- "x" . repeat( TRAILER_LIMIT . try_into( ) . unwrap( ) )
1133
- )
1134
- . as_bytes ( ) ,
1135
- ) ;
1162
+ scratch. extend ( format ! ( "huge_trailer: {}\r \n " , "x" . repeat( max_header_size) ) . as_bytes ( ) ) ;
1136
1163
scratch. extend ( b"\r \n " ) ;
1137
1164
let mut mock_buf = Bytes :: from ( scratch) ;
1138
1165
1139
- let mut decoder = Decoder :: chunked ( None ) ;
1166
+ let mut decoder = Decoder :: chunked ( None , Some ( max_header_size ) ) ;
1140
1167
1141
1168
// ready chunked body
1142
1169
let buf = decoder
@@ -1156,18 +1183,20 @@ mod tests {
1156
1183
}
1157
1184
1158
1185
#[ tokio:: test]
1159
- async fn test_trailer_limit_many_small_trailers ( ) {
1186
+ async fn test_trailer_max_header_size_many_small_trailers ( ) {
1187
+ let max_headers = 10 ;
1188
+ let header_size = 64 ;
1160
1189
let mut scratch = vec ! [ ] ;
1161
1190
scratch. extend ( b"10\r \n 1234567890abcdef\r \n 0\r \n " ) ;
1162
1191
1163
- for i in 0 ..TRAILERS_FIELD_LIMIT {
1164
- scratch. extend ( format ! ( "trailer{}: {}\r \n " , i, "x" . repeat( 64 ) ) . as_bytes ( ) ) ;
1192
+ for i in 0 ..max_headers {
1193
+ scratch. extend ( format ! ( "trailer{}: {}\r \n " , i, "x" . repeat( header_size ) ) . as_bytes ( ) ) ;
1165
1194
}
1166
1195
1167
1196
scratch. extend ( b"\r \n " ) ;
1168
1197
let mut mock_buf = Bytes :: from ( scratch) ;
1169
1198
1170
- let mut decoder = Decoder :: chunked ( None ) ;
1199
+ let mut decoder = Decoder :: chunked ( None , Some ( max_headers * header_size ) ) ;
1171
1200
1172
1201
// ready chunked body
1173
1202
let buf = decoder
0 commit comments