diff --git a/CHANGES.rst b/CHANGES.rst index c8ac40f..282b360 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,9 @@ +0.19.7 +------ + +* Security Fix how ``max_form_memory_size`` is applied when parsing large + non-file fields. https://github.com/advisories/GHSA-q34m-jh98-gwm2 + 0.19.6 2024-05-19 ----------------- diff --git a/pyproject.toml b/pyproject.toml index 1e22852..9d71aca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "Quart" -version = "0.19.6" +version = "0.19.7.dev" description = "A Python ASGI web microframework with the same API as Flask" authors = ["pgjones "] classifiers = [ diff --git a/src/quart/formparser.py b/src/quart/formparser.py index eab878a..b8b0e52 100644 --- a/src/quart/formparser.py +++ b/src/quart/formparser.py @@ -15,6 +15,7 @@ from urllib.parse import parse_qsl from werkzeug.datastructures import Headers, MultiDict +from werkzeug.exceptions import RequestEntityTooLarge from werkzeug.formparser import default_stream_factory from werkzeug.http import parse_options_header from werkzeug.sansio.multipart import Data, Epilogue, Field, File, MultipartDecoder, NeedData @@ -173,19 +174,28 @@ async def parse( files = [] current_part: Field | File + field_size: int | None = None async for data in body: parser.receive_data(data) event = parser.next_event() while not isinstance(event, (Epilogue, NeedData)): if isinstance(event, Field): current_part = event + field_size = 0 container = [] _write = container.append elif isinstance(event, File): current_part = event + field_size = None container = self.start_file_streaming(event, content_length) _write = container.write elif isinstance(event, Data): + if field_size is not None: + field_size += len(event.data) + + if field_size > self.max_form_memory_size: + raise RequestEntityTooLarge() + _write(event.data) if not event.more_data: if isinstance(current_part, Field): diff --git a/tests/test_formparser.py b/tests/test_formparser.py new file mode 100644 index 0000000..c5e85f2 --- /dev/null +++ b/tests/test_formparser.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +import pytest +from werkzeug.exceptions import RequestEntityTooLarge + +from quart.formparser import MultiPartParser +from quart.wrappers.request import Body + + +async def test_multipart_max_form_memory_size() -> None: + """max_form_memory_size is tracked across multiple data events.""" + data = b"--bound\r\nContent-Disposition: form-field; name=a\r\n\r\n" + data += b"a" * 15 + b"\r\n--bound--" + body = Body(None, None) + body.set_result(data) + # The buffer size is less than the max size, so multiple data events will be + # returned. The field size is greater than the max. + parser = MultiPartParser(max_form_memory_size=10, buffer_size=5) + + with pytest.raises(RequestEntityTooLarge): + await parser.parse(body, b"bound", 0)