Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-starlette for 
openSUSE:Factory checked in at 2023-02-25 19:54:54
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-starlette (Old)
 and      /work/SRC/openSUSE:Factory/.python-starlette.new.31432 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-starlette"

Sat Feb 25 19:54:54 2023 rev:17 rq:1067461 version:0.25.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-starlette/python-starlette.changes        
2023-02-13 16:41:30.567579938 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-starlette.new.31432/python-starlette.changes 
    2023-02-25 19:55:05.843261288 +0100
@@ -1,0 +2,10 @@
+Thu Feb 16 16:22:35 UTC 2023 - David Anes <david.a...@suse.com>
+
+- Update to 0.25.0:
+  * Fixed
+    - Limit the number of fields and files when parsing 
+    multipart/form-data on the MultipartParser.
+
+- Fixed upstream: reenable tests for tests for i586 and armv7l.
+
+-------------------------------------------------------------------

Old:
----
  starlette-0.24.0.tar.gz

New:
----
  starlette-0.25.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-starlette.spec ++++++
--- /var/tmp/diff_new_pack.Kz9ufr/_old  2023-02-25 19:55:06.935268017 +0100
+++ /var/tmp/diff_new_pack.Kz9ufr/_new  2023-02-25 19:55:06.943268066 +0100
@@ -27,7 +27,7 @@
 
 %define skip_python2 1
 Name:           python-starlette%{psuffix}
-Version:        0.24.0
+Version:        0.25.0
 Release:        0
 Summary:        Lightweight ASGI framework/toolkit
 License:        BSD-3-Clause

++++++ starlette-0.24.0.tar.gz -> starlette-0.25.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/starlette-0.24.0/docs/release-notes.md 
new/starlette-0.25.0/docs/release-notes.md
--- old/starlette-0.24.0/docs/release-notes.md  2023-02-06 17:01:14.000000000 
+0100
+++ new/starlette-0.25.0/docs/release-notes.md  2023-02-14 10:06:03.000000000 
+0100
@@ -1,3 +1,10 @@
+## 0.25.0
+
+February 14, 2023
+
+### Fix
+* Limit the number of fields and files when parsing `multipart/form-data` on 
the `MultipartParser` 
[8c74c2c](https://github.com/encode/starlette/commit/8c74c2c8dba7030154f8af18e016136bea1938fa)
 and [#2036](https://github.com/encode/starlette/pull/2036).
+
 ## 0.24.0
 
 February 6, 2023
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/starlette-0.24.0/docs/requests.md 
new/starlette-0.25.0/docs/requests.md
--- old/starlette-0.24.0/docs/requests.md       2023-02-06 17:01:14.000000000 
+0100
+++ new/starlette-0.25.0/docs/requests.md       2023-02-14 10:06:03.000000000 
+0100
@@ -114,13 +114,25 @@
 
 Request files are normally sent as multipart form data (`multipart/form-data`).
 
+Signature: `request.form(max_files=1000, max_fields=1000)`
+
+You can configure the number of maximum fields or files with the parameters 
`max_files` and `max_fields`:
+
+```python
+async with request.form(max_files=1000, max_fields=1000):
+    ...
+```
+
+!!! info
+    These limits are for security reasons, allowing an unlimited number of 
fields or files could lead to a denial of service attack by consuming a lot of 
CPU and memory parsing too many empty fields.
+
 When you call `async with request.form() as form` you receive a 
`starlette.datastructures.FormData` which is an immutable
 multidict, containing both file uploads and text input. File upload items are 
represented as instances of `starlette.datastructures.UploadFile`.
 
 `UploadFile` has the following attributes:
 
-* `filename`: A `str` with the original file name that was uploaded (e.g. 
`myimage.jpg`).
-* `content_type`: A `str` with the content type (MIME type / media type) (e.g. 
`image/jpeg`).
+* `filename`: An `str` with the original file name that was uploaded or `None` 
if its not available (e.g. `myimage.jpg`).
+* `content_type`: An `str` with the content type (MIME type / media type) or 
`None` if it's not available (e.g. `image/jpeg`).
 * `file`: A <a 
href="https://docs.python.org/3/library/tempfile.html#tempfile.SpooledTemporaryFile";
 target="_blank">`SpooledTemporaryFile`</a> (a <a 
href="https://docs.python.org/3/glossary.html#term-file-like-object"; 
target="_blank">file-like</a> object). This is the actual Python file that you 
can pass directly to other functions or libraries that expect a "file-like" 
object.
 * `headers`: A `Headers` object. Often this will only be the `Content-Type` 
header, but if additional headers were included in the multipart field they 
will be included here. Note that these headers have no relationship with the 
headers in `Request.headers`.
 * `size`: An `int` with uploaded file's size in bytes. This value is 
calculated from request's contents, making it better choice to find uploaded 
file's size than `Content-Length` header. `None` if not set.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/starlette-0.24.0/starlette/__init__.py 
new/starlette-0.25.0/starlette/__init__.py
--- old/starlette-0.24.0/starlette/__init__.py  2023-02-06 17:01:14.000000000 
+0100
+++ new/starlette-0.25.0/starlette/__init__.py  2023-02-14 10:06:03.000000000 
+0100
@@ -1 +1 @@
-__version__ = "0.24.0"
+__version__ = "0.25.0"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/starlette-0.24.0/starlette/formparsers.py 
new/starlette-0.25.0/starlette/formparsers.py
--- old/starlette-0.24.0/starlette/formparsers.py       2023-02-06 
17:01:14.000000000 +0100
+++ new/starlette-0.25.0/starlette/formparsers.py       2023-02-14 
10:06:03.000000000 +0100
@@ -1,4 +1,5 @@
 import typing
+from dataclasses import dataclass, field
 from enum import Enum
 from tempfile import SpooledTemporaryFile
 from urllib.parse import unquote_plus
@@ -21,15 +22,13 @@
     END = 5
 
 
-class MultiPartMessage(Enum):
-    PART_BEGIN = 1
-    PART_DATA = 2
-    PART_END = 3
-    HEADER_FIELD = 4
-    HEADER_VALUE = 5
-    HEADER_END = 6
-    HEADERS_FINISHED = 7
-    END = 8
+@dataclass
+class MultipartPart:
+    content_disposition: typing.Optional[bytes] = None
+    field_name: str = ""
+    data: bytes = b""
+    file: typing.Optional[UploadFile] = None
+    item_headers: typing.List[typing.Tuple[bytes, bytes]] = 
field(default_factory=list)
 
 
 def _user_safe_decode(src: bytes, codec: str) -> str:
@@ -120,46 +119,109 @@
     max_file_size = 1024 * 1024
 
     def __init__(
-        self, headers: Headers, stream: typing.AsyncGenerator[bytes, None]
+        self,
+        headers: Headers,
+        stream: typing.AsyncGenerator[bytes, None],
+        *,
+        max_files: typing.Union[int, float] = 1000,
+        max_fields: typing.Union[int, float] = 1000,
     ) -> None:
         assert (
             multipart is not None
         ), "The `python-multipart` library must be installed to use form 
parsing."
         self.headers = headers
         self.stream = stream
-        self.messages: typing.List[typing.Tuple[MultiPartMessage, bytes]] = []
+        self.max_files = max_files
+        self.max_fields = max_fields
+        self.items: typing.List[typing.Tuple[str, typing.Union[str, 
UploadFile]]] = []
+        self._current_files = 0
+        self._current_fields = 0
+        self._current_partial_header_name: bytes = b""
+        self._current_partial_header_value: bytes = b""
+        self._current_part = MultipartPart()
+        self._charset = ""
+        self._file_parts_to_write: typing.List[typing.Tuple[MultipartPart, 
bytes]] = []
+        self._file_parts_to_finish: typing.List[MultipartPart] = []
+        self._files_to_close_on_error: typing.List[SpooledTemporaryFile] = []
 
     def on_part_begin(self) -> None:
-        message = (MultiPartMessage.PART_BEGIN, b"")
-        self.messages.append(message)
+        self._current_part = MultipartPart()
 
     def on_part_data(self, data: bytes, start: int, end: int) -> None:
-        message = (MultiPartMessage.PART_DATA, data[start:end])
-        self.messages.append(message)
+        message_bytes = data[start:end]
+        if self._current_part.file is None:
+            self._current_part.data += message_bytes
+        else:
+            self._file_parts_to_write.append((self._current_part, 
message_bytes))
 
     def on_part_end(self) -> None:
-        message = (MultiPartMessage.PART_END, b"")
-        self.messages.append(message)
+        if self._current_part.file is None:
+            self.items.append(
+                (
+                    self._current_part.field_name,
+                    _user_safe_decode(self._current_part.data, self._charset),
+                )
+            )
+        else:
+            self._file_parts_to_finish.append(self._current_part)
+            # The file can be added to the items right now even though it's not
+            # finished yet, because it will be finished in the `parse()` 
method, before
+            # self.items is used in the return value.
+            self.items.append((self._current_part.field_name, 
self._current_part.file))
 
     def on_header_field(self, data: bytes, start: int, end: int) -> None:
-        message = (MultiPartMessage.HEADER_FIELD, data[start:end])
-        self.messages.append(message)
+        self._current_partial_header_name += data[start:end]
 
     def on_header_value(self, data: bytes, start: int, end: int) -> None:
-        message = (MultiPartMessage.HEADER_VALUE, data[start:end])
-        self.messages.append(message)
+        self._current_partial_header_value += data[start:end]
 
     def on_header_end(self) -> None:
-        message = (MultiPartMessage.HEADER_END, b"")
-        self.messages.append(message)
+        field = self._current_partial_header_name.lower()
+        if field == b"content-disposition":
+            self._current_part.content_disposition = 
self._current_partial_header_value
+        self._current_part.item_headers.append(
+            (field, self._current_partial_header_value)
+        )
+        self._current_partial_header_name = b""
+        self._current_partial_header_value = b""
 
     def on_headers_finished(self) -> None:
-        message = (MultiPartMessage.HEADERS_FINISHED, b"")
-        self.messages.append(message)
+        disposition, options = parse_options_header(
+            self._current_part.content_disposition
+        )
+        try:
+            self._current_part.field_name = _user_safe_decode(
+                options[b"name"], self._charset
+            )
+        except KeyError:
+            raise MultiPartException(
+                'The Content-Disposition header field "name" must be ' 
"provided."
+            )
+        if b"filename" in options:
+            self._current_files += 1
+            if self._current_files > self.max_files:
+                raise MultiPartException(
+                    f"Too many files. Maximum number of files is 
{self.max_files}."
+                )
+            filename = _user_safe_decode(options[b"filename"], self._charset)
+            tempfile = SpooledTemporaryFile(max_size=self.max_file_size)
+            self._files_to_close_on_error.append(tempfile)
+            self._current_part.file = UploadFile(
+                file=tempfile,  # type: ignore[arg-type]
+                size=0,
+                filename=filename,
+                headers=Headers(raw=self._current_part.item_headers),
+            )
+        else:
+            self._current_fields += 1
+            if self._current_fields > self.max_fields:
+                raise MultiPartException(
+                    f"Too many fields. Maximum number of fields is 
{self.max_fields}."
+                )
+            self._current_part.file = None
 
     def on_end(self) -> None:
-        message = (MultiPartMessage.END, b"")
-        self.messages.append(message)
+        pass
 
     async def parse(self) -> FormData:
         # Parse the Content-Type header to get the multipart boundary.
@@ -167,6 +229,7 @@
         charset = params.get(b"charset", "utf-8")
         if type(charset) == bytes:
             charset = charset.decode("latin-1")
+        self._charset = charset
         try:
             boundary = params[b"boundary"]
         except KeyError:
@@ -186,68 +249,28 @@
 
         # Create the parser.
         parser = multipart.MultipartParser(boundary, callbacks)
-        header_field = b""
-        header_value = b""
-        content_disposition = None
-        field_name = ""
-        data = b""
-        file: typing.Optional[UploadFile] = None
-
-        items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = 
[]
-        item_headers: typing.List[typing.Tuple[bytes, bytes]] = []
-
-        # Feed the parser with data from the request.
-        async for chunk in self.stream:
-            parser.write(chunk)
-            messages = list(self.messages)
-            self.messages.clear()
-            for message_type, message_bytes in messages:
-                if message_type == MultiPartMessage.PART_BEGIN:
-                    content_disposition = None
-                    data = b""
-                    item_headers = []
-                elif message_type == MultiPartMessage.HEADER_FIELD:
-                    header_field += message_bytes
-                elif message_type == MultiPartMessage.HEADER_VALUE:
-                    header_value += message_bytes
-                elif message_type == MultiPartMessage.HEADER_END:
-                    field = header_field.lower()
-                    if field == b"content-disposition":
-                        content_disposition = header_value
-                    item_headers.append((field, header_value))
-                    header_field = b""
-                    header_value = b""
-                elif message_type == MultiPartMessage.HEADERS_FINISHED:
-                    disposition, options = 
parse_options_header(content_disposition)
-                    try:
-                        field_name = _user_safe_decode(options[b"name"], 
charset)
-                    except KeyError:
-                        raise MultiPartException(
-                            'The Content-Disposition header field "name" must 
be '
-                            "provided."
-                        )
-                    if b"filename" in options:
-                        filename = _user_safe_decode(options[b"filename"], 
charset)
-                        tempfile = 
SpooledTemporaryFile(max_size=self.max_file_size)
-                        file = UploadFile(
-                            file=tempfile,  # type: ignore[arg-type]
-                            size=0,
-                            filename=filename,
-                            headers=Headers(raw=item_headers),
-                        )
-                    else:
-                        file = None
-                elif message_type == MultiPartMessage.PART_DATA:
-                    if file is None:
-                        data += message_bytes
-                    else:
-                        await file.write(message_bytes)
-                elif message_type == MultiPartMessage.PART_END:
-                    if file is None:
-                        items.append((field_name, _user_safe_decode(data, 
charset)))
-                    else:
-                        await file.seek(0)
-                        items.append((field_name, file))
+        try:
+            # Feed the parser with data from the request.
+            async for chunk in self.stream:
+                parser.write(chunk)
+                # Write file data, it needs to use await with the UploadFile 
methods
+                # that call the corresponding file methods *in a threadpool*,
+                # otherwise, if they were called directly in the callback 
methods above
+                # (regular, non-async functions), that would block the event 
loop in
+                # the main thread.
+                for part, data in self._file_parts_to_write:
+                    assert part.file  # for type checkers
+                    await part.file.write(data)
+                for part in self._file_parts_to_finish:
+                    assert part.file  # for type checkers
+                    await part.file.seek(0)
+                self._file_parts_to_write.clear()
+                self._file_parts_to_finish.clear()
+        except MultiPartException as exc:
+            # Close all the files if there was an error.
+            for file in self._files_to_close_on_error:
+                file.close()
+            raise exc
 
         parser.finalize()
-        return FormData(items)
+        return FormData(self.items)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/starlette-0.24.0/starlette/requests.py 
new/starlette-0.25.0/starlette/requests.py
--- old/starlette-0.24.0/starlette/requests.py  2023-02-06 17:01:14.000000000 
+0100
+++ new/starlette-0.25.0/starlette/requests.py  2023-02-14 10:06:03.000000000 
+0100
@@ -244,7 +244,12 @@
             self._json = json.loads(body)
         return self._json
 
-    async def _get_form(self) -> FormData:
+    async def _get_form(
+        self,
+        *,
+        max_files: typing.Union[int, float] = 1000,
+        max_fields: typing.Union[int, float] = 1000,
+    ) -> FormData:
         if self._form is None:
             assert (
                 parse_options_header is not None
@@ -254,7 +259,12 @@
             content_type, _ = parse_options_header(content_type_header)
             if content_type == b"multipart/form-data":
                 try:
-                    multipart_parser = MultiPartParser(self.headers, 
self.stream())
+                    multipart_parser = MultiPartParser(
+                        self.headers,
+                        self.stream(),
+                        max_files=max_files,
+                        max_fields=max_fields,
+                    )
                     self._form = await multipart_parser.parse()
                 except MultiPartException as exc:
                     if "app" in self.scope:
@@ -267,8 +277,15 @@
                 self._form = FormData()
         return self._form
 
-    def form(self) -> AwaitableOrContextManager[FormData]:
-        return AwaitableOrContextManagerWrapper(self._get_form())
+    def form(
+        self,
+        *,
+        max_files: typing.Union[int, float] = 1000,
+        max_fields: typing.Union[int, float] = 1000,
+    ) -> AwaitableOrContextManager[FormData]:
+        return AwaitableOrContextManagerWrapper(
+            self._get_form(max_files=max_files, max_fields=max_fields)
+        )
 
     async def close(self) -> None:
         if self._form is not None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/starlette-0.24.0/tests/test_formparsers.py 
new/starlette-0.25.0/tests/test_formparsers.py
--- old/starlette-0.24.0/tests/test_formparsers.py      2023-02-06 
17:01:14.000000000 +0100
+++ new/starlette-0.25.0/tests/test_formparsers.py      2023-02-14 
10:06:03.000000000 +0100
@@ -98,6 +98,29 @@
     await response(scope, receive, send)
 
 
+def make_app_max_parts(max_files: int = 1000, max_fields: int = 1000):
+    async def app(scope, receive, send):
+        request = Request(scope, receive)
+        data = await request.form(max_files=max_files, max_fields=max_fields)
+        output: typing.Dict[str, typing.Any] = {}
+        for key, value in data.items():
+            if isinstance(value, UploadFile):
+                content = await value.read()
+                output[key] = {
+                    "filename": value.filename,
+                    "size": value.size,
+                    "content": content.decode(),
+                    "content_type": value.content_type,
+                }
+            else:
+                output[key] = value
+        await request.close()
+        response = JSONResponse(output)
+        await response(scope, receive, send)
+
+    return app
+
+
 def test_multipart_request_data(tmpdir, test_client_factory):
     client = test_client_factory(app)
     response = client.post("/", data={"some": "data"}, files=FORCE_MULTIPART)
@@ -460,3 +483,202 @@
         assert (
             res.text == 'The Content-Disposition header field "name" must be 
provided.'
         )
+
+
+@pytest.mark.parametrize(
+    "app,expectation",
+    [
+        (app, pytest.raises(MultiPartException)),
+        (Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
+    ],
+)
+def test_too_many_fields_raise(app, expectation, test_client_factory):
+    client = test_client_factory(app)
+    fields = []
+    for i in range(1001):
+        fields.append(
+            "--B\r\n" f'Content-Disposition: form-data; name="N{i}";\r\n\r\n' 
"\r\n"
+        )
+    data = "".join(fields).encode("utf-8")
+    with expectation:
+        res = client.post(
+            "/",
+            data=data,
+            headers={"Content-Type": ("multipart/form-data; boundary=B")},
+        )
+        assert res.status_code == 400
+        assert res.text == "Too many fields. Maximum number of fields is 1000."
+
+
+@pytest.mark.parametrize(
+    "app,expectation",
+    [
+        (app, pytest.raises(MultiPartException)),
+        (Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
+    ],
+)
+def test_too_many_files_raise(app, expectation, test_client_factory):
+    client = test_client_factory(app)
+    fields = []
+    for i in range(1001):
+        fields.append(
+            "--B\r\n"
+            f'Content-Disposition: form-data; name="N{i}"; 
filename="F{i}";\r\n\r\n'
+            "\r\n"
+        )
+    data = "".join(fields).encode("utf-8")
+    with expectation:
+        res = client.post(
+            "/",
+            data=data,
+            headers={"Content-Type": ("multipart/form-data; boundary=B")},
+        )
+        assert res.status_code == 400
+        assert res.text == "Too many files. Maximum number of files is 1000."
+
+
+@pytest.mark.parametrize(
+    "app,expectation",
+    [
+        (app, pytest.raises(MultiPartException)),
+        (Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
+    ],
+)
+def test_too_many_files_single_field_raise(app, expectation, 
test_client_factory):
+    client = test_client_factory(app)
+    fields = []
+    for i in range(1001):
+        # This uses the same field name "N" for all files, equivalent to a
+        # multifile upload form field
+        fields.append(
+            "--B\r\n"
+            f'Content-Disposition: form-data; name="N"; 
filename="F{i}";\r\n\r\n'
+            "\r\n"
+        )
+    data = "".join(fields).encode("utf-8")
+    with expectation:
+        res = client.post(
+            "/",
+            data=data,
+            headers={"Content-Type": ("multipart/form-data; boundary=B")},
+        )
+        assert res.status_code == 400
+        assert res.text == "Too many files. Maximum number of files is 1000."
+
+
+@pytest.mark.parametrize(
+    "app,expectation",
+    [
+        (app, pytest.raises(MultiPartException)),
+        (Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
+    ],
+)
+def test_too_many_files_and_fields_raise(app, expectation, 
test_client_factory):
+    client = test_client_factory(app)
+    fields = []
+    for i in range(1001):
+        fields.append(
+            "--B\r\n"
+            f'Content-Disposition: form-data; name="F{i}"; 
filename="F{i}";\r\n\r\n'
+            "\r\n"
+        )
+        fields.append(
+            "--B\r\n" f'Content-Disposition: form-data; name="N{i}";\r\n\r\n' 
"\r\n"
+        )
+    data = "".join(fields).encode("utf-8")
+    with expectation:
+        res = client.post(
+            "/",
+            data=data,
+            headers={"Content-Type": ("multipart/form-data; boundary=B")},
+        )
+        assert res.status_code == 400
+        assert res.text == "Too many files. Maximum number of files is 1000."
+
+
+@pytest.mark.parametrize(
+    "app,expectation",
+    [
+        (make_app_max_parts(max_fields=1), pytest.raises(MultiPartException)),
+        (
+            Starlette(routes=[Mount("/", 
app=make_app_max_parts(max_fields=1))]),
+            does_not_raise(),
+        ),
+    ],
+)
+def test_max_fields_is_customizable_low_raises(app, expectation, 
test_client_factory):
+    client = test_client_factory(app)
+    fields = []
+    for i in range(2):
+        fields.append(
+            "--B\r\n" f'Content-Disposition: form-data; name="N{i}";\r\n\r\n' 
"\r\n"
+        )
+    data = "".join(fields).encode("utf-8")
+    with expectation:
+        res = client.post(
+            "/",
+            data=data,
+            headers={"Content-Type": ("multipart/form-data; boundary=B")},
+        )
+        assert res.status_code == 400
+        assert res.text == "Too many fields. Maximum number of fields is 1."
+
+
+@pytest.mark.parametrize(
+    "app,expectation",
+    [
+        (make_app_max_parts(max_files=1), pytest.raises(MultiPartException)),
+        (
+            Starlette(routes=[Mount("/", 
app=make_app_max_parts(max_files=1))]),
+            does_not_raise(),
+        ),
+    ],
+)
+def test_max_files_is_customizable_low_raises(app, expectation, 
test_client_factory):
+    client = test_client_factory(app)
+    fields = []
+    for i in range(2):
+        fields.append(
+            "--B\r\n"
+            f'Content-Disposition: form-data; name="F{i}"; 
filename="F{i}";\r\n\r\n'
+            "\r\n"
+        )
+    data = "".join(fields).encode("utf-8")
+    with expectation:
+        res = client.post(
+            "/",
+            data=data,
+            headers={"Content-Type": ("multipart/form-data; boundary=B")},
+        )
+        assert res.status_code == 400
+        assert res.text == "Too many files. Maximum number of files is 1."
+
+
+def test_max_fields_is_customizable_high(test_client_factory):
+    client = test_client_factory(make_app_max_parts(max_fields=2000, 
max_files=2000))
+    fields = []
+    for i in range(2000):
+        fields.append(
+            "--B\r\n" f'Content-Disposition: form-data; name="N{i}";\r\n\r\n' 
"\r\n"
+        )
+        fields.append(
+            "--B\r\n"
+            f'Content-Disposition: form-data; name="F{i}"; 
filename="F{i}";\r\n\r\n'
+            "\r\n"
+        )
+    data = "".join(fields).encode("utf-8")
+    data += b"--B--\r\n"
+    res = client.post(
+        "/",
+        data=data,
+        headers={"Content-Type": ("multipart/form-data; boundary=B")},
+    )
+    assert res.status_code == 200
+    res_data = res.json()
+    assert res_data["N1999"] == ""
+    assert res_data["F1999"] == {
+        "filename": "F1999",
+        "size": 0,
+        "content": "",
+        "content_type": None,
+    }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/starlette-0.24.0/tests/test_responses.py 
new/starlette-0.25.0/tests/test_responses.py
--- old/starlette-0.24.0/tests/test_responses.py        2023-02-06 
17:01:14.000000000 +0100
+++ new/starlette-0.25.0/tests/test_responses.py        2023-02-14 
10:06:03.000000000 +0100
@@ -293,7 +293,7 @@
 
 def test_set_cookie(test_client_factory, monkeypatch):
     # Mock time used as a reference for `Expires` by stdlib `SimpleCookie`.
-    mocked_now = dt.datetime(2100, 1, 22, 12, 0, 0, tzinfo=dt.timezone.utc)
+    mocked_now = dt.datetime(2037, 1, 22, 12, 0, 0, tzinfo=dt.timezone.utc)
     monkeypatch.setattr(time, "time", lambda: mocked_now.timestamp())
 
     async def app(scope, receive, send):
@@ -316,7 +316,7 @@
     assert response.text == "Hello, world!"
     assert (
         response.headers["set-cookie"]
-        == "mycookie=myvalue; Domain=localhost; expires=Fri, 22 Jan 2100 
12:00:10 GMT; "
+        == "mycookie=myvalue; Domain=localhost; expires=Thu, 22 Jan 2037 
12:00:10 GMT; "
         "HttpOnly; Max-Age=10; Path=/; SameSite=none; Secure"
     )
 
@@ -325,15 +325,15 @@
     "expires",
     [
         pytest.param(
-            dt.datetime(2100, 1, 22, 12, 0, 10, tzinfo=dt.timezone.utc), 
id="datetime"
+            dt.datetime(2037, 1, 22, 12, 0, 10, tzinfo=dt.timezone.utc), 
id="datetime"
         ),
-        pytest.param("Fri, 22 Jan 2100 12:00:10 GMT", id="str"),
+        pytest.param("Thu, 22 Jan 2037 12:00:10 GMT", id="str"),
         pytest.param(10, id="int"),
     ],
 )
 def test_expires_on_set_cookie(test_client_factory, monkeypatch, expires):
     # Mock time used as a reference for `Expires` by stdlib `SimpleCookie`.
-    mocked_now = dt.datetime(2100, 1, 22, 12, 0, 0, tzinfo=dt.timezone.utc)
+    mocked_now = dt.datetime(2037, 1, 22, 12, 0, 0, tzinfo=dt.timezone.utc)
     monkeypatch.setattr(time, "time", lambda: mocked_now.timestamp())
 
     async def app(scope, receive, send):
@@ -344,7 +344,7 @@
     client = test_client_factory(app)
     response = client.get("/")
     cookie: SimpleCookie = SimpleCookie(response.headers.get("set-cookie"))
-    assert cookie["mycookie"]["expires"] == "Fri, 22 Jan 2100 12:00:10 GMT"
+    assert cookie["mycookie"]["expires"] == "Thu, 22 Jan 2037 12:00:10 GMT"
 
 
 def test_delete_cookie(test_client_factory):

Reply via email to