Skip to content

Commit

Permalink
Merge commit from fork
Browse files Browse the repository at this point in the history
apply max_form_memory_size another level up in the parser
  • Loading branch information
davidism authored Oct 25, 2024
2 parents 8d6a12e + 8760275 commit 50cfeeb
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 0 deletions.
3 changes: 3 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ Version 3.0.6

Unreleased

- Fix how ``max_form_memory_size`` is applied when parsing large non-file
fields. :ghsa:`q34m-jh98-gwm2`


Version 3.0.5
-------------
Expand Down
11 changes: 11 additions & 0 deletions src/werkzeug/formparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,6 +352,7 @@ def parse(
self, stream: t.IO[bytes], boundary: bytes, content_length: int | None
) -> tuple[MultiDict[str, str], MultiDict[str, FileStorage]]:
current_part: Field | File
field_size: int | None = None
container: t.IO[bytes] | list[bytes]
_write: t.Callable[[bytes], t.Any]

Expand All @@ -370,13 +371,23 @@ def parse(
while not isinstance(event, (Epilogue, NeedData)):
if isinstance(event, Field):
current_part = event
field_size = 0
container = []
_write = container.append
elif isinstance(event, File):
current_part = event
field_size = None
container = self.start_file_streaming(event, content_length)
_write = container.write
elif isinstance(event, Data):
if self.max_form_memory_size is not None and field_size is not None:
# Ensure that accumulated data events do not exceed limit.
# Also checked within single event in MultipartDecoder.
field_size += len(event.data)

if field_size > self.max_form_memory_size:
raise RequestEntityTooLarge()

_write(event.data)
if not event.more_data:
if isinstance(current_part, Field):
Expand Down
2 changes: 2 additions & 0 deletions src/werkzeug/sansio/multipart.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,8 @@ def receive_data(self, data: bytes | None) -> None:
self.max_form_memory_size is not None
and len(self.buffer) + len(data) > self.max_form_memory_size
):
# Ensure that data within single event does not exceed limit.
# Also checked across accumulated events in MultiPartParser.
raise RequestEntityTooLarge()
else:
self.buffer.extend(data)
Expand Down
12 changes: 12 additions & 0 deletions tests/test_formparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,3 +456,15 @@ def test_file_rfc2231_filename_continuations(self):
) as request:
assert request.files["rfc2231"].filename == "a b c d e f.txt"
assert request.files["rfc2231"].read() == b"file contents"


def test_multipart_max_form_memory_size() -> None:
"""max_form_memory_size is tracked across multiple data events."""
data = b"--bound\r\nContent-Disposition: form-field; name=a\r\n\r\n"
data += b"a" * 15 + b"\r\n--bound--"
# The buffer size is less than the max size, so multiple data events will be
# returned. The field size is greater than the max.
parser = formparser.MultiPartParser(max_form_memory_size=10, buffer_size=5)

with pytest.raises(RequestEntityTooLarge):
parser.parse(io.BytesIO(data), b"bound", None)

0 comments on commit 50cfeeb

Please sign in to comment.