Skip to content

Commit

Permalink
Don't apply max_form_parts to non-multipart forms
Browse files Browse the repository at this point in the history
This matches Flask's (Werkzeug's) behaviour. This is applicable as url
encoded forms are quick to parse and the number of parts is therefore
not something that needs restricting.
  • Loading branch information
pgjones committed Dec 24, 2024
1 parent 2d36598 commit ba0e497
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 18 deletions.
12 changes: 4 additions & 8 deletions src/quart/formparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,14 +113,10 @@ async def _parse_urlencoded(
content_length: int | None,
options: dict[str, str],
) -> tuple[MultiDict, MultiDict]:
try:
form = parse_qsl(
(await body).decode(),
keep_blank_values=True,
max_num_fields=self.max_form_parts,
)
except ValueError:
raise RequestEntityTooLarge() from None
form = parse_qsl(
(await body).decode(),
keep_blank_values=True,
)
return self.cls(form), self.cls()

parse_functions: dict[str, ParserFunc] = {
Expand Down
10 changes: 0 additions & 10 deletions tests/test_formparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import pytest
from werkzeug.exceptions import RequestEntityTooLarge

from quart.formparser import FormDataParser
from quart.formparser import MultiPartParser
from quart.wrappers.request import Body

Expand All @@ -20,12 +19,3 @@ async def test_multipart_max_form_memory_size() -> None:

with pytest.raises(RequestEntityTooLarge):
await parser.parse(body, b"bound", 0)


async def test_formparser_max_num_parts() -> None:
parser = FormDataParser(max_form_parts=1)
body = Body(None, None)
body.set_result(b"param1=data1&param2=data2&param3=data3")

with pytest.raises(RequestEntityTooLarge):
await parser.parse(body, "application/x-url-encoded", None)

0 comments on commit ba0e497

Please sign in to comment.