add upload file size limit

This commit is contained in:
Nancy Wang 2025-12-04 14:45:15 +00:00
parent 66bc00ca83
commit ce484fd61d
6 changed files with 60 additions and 5 deletions

View File

@ -20,8 +20,9 @@ from fastapi.exception_handlers import (
http_exception_handler,
request_validation_exception_handler,
websocket_request_validation_exception_handler,
request_entity_too_large_exception_handler,
)
from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError
from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError, RequestEntityTooLarge
from fastapi.logger import logger
from fastapi.middleware.asyncexitstack import AsyncExitStackMiddleware
from fastapi.openapi.docs import (
@ -992,6 +993,11 @@ class FastAPI(Starlette):
# Starlette still has incorrect type specification for the handlers
websocket_request_validation_exception_handler, # type: ignore
)
self.exception_handlers.setdefault(
RequestEntityTooLarge,
request_entity_too_large_exception_handler,
)
self.user_middleware: List[Middleware] = (
[] if middleware is None else list(middleware)

View File

@ -881,6 +881,8 @@ def _should_embed_body_fields(fields: List[ModelField]) -> bool:
return False
from fastapi.exceptions import RequestEntityTooLarge
async def _extract_form_body(
body_fields: List[ModelField],
received_body: FormData,
@ -892,10 +894,28 @@ async def _extract_form_body(
field_info = field.field_info
if (
isinstance(field_info, (params.File, temp_pydantic_v1_params.File))
and is_bytes_field(field)
and isinstance(value, UploadFile)
):
value = await value.read()
#If a file size limit is defined through max_size
max_size = getattr(field_info, "max_size", None)
if max_size is not None:
CHUNK = 8192
total = 0
content = bytearray()
while True:
chunk = await value.read(CHUNK)
if not chunk:
break
total += len(chunk)
if total > max_size:
raise RequestEntityTooLarge(
f"Uploaded file '{field.alias}' exceeded max size={max_size} bytes"
)
content.extend(chunk)
value = bytes(content)
else:
value = await value.read()
elif (
is_bytes_sequence_field(field)
and isinstance(field_info, (params.File, temp_pydantic_v1_params.File))

View File

@ -1,5 +1,5 @@
from fastapi.encoders import jsonable_encoder
from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError
from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError, RequestEntityTooLarge
from fastapi.utils import is_body_allowed_for_status_code
from fastapi.websockets import WebSocket
from starlette.exceptions import HTTPException
@ -8,6 +8,12 @@ from starlette.responses import JSONResponse, Response
from starlette.status import WS_1008_POLICY_VIOLATION
async def request_entity_too_large_exception_handler(request, exc: Exception):
return JSONResponse(
status_code=413,
content={"detail": str(exc) or "Uploaded file too large"},
)
async def http_exception_handler(request: Request, exc: HTTPException) -> Response:
headers = getattr(exc, "headers", None)
if not is_body_allowed_for_status_code(exc.status_code):

View File

@ -182,3 +182,8 @@ class ResponseValidationError(ValidationException):
for err in self._errors:
message += f" {err}\n"
return message
class RequestEntityTooLarge(Exception):
"""Raised when uploaded content exceeds the configured max_size."""
pass

View File

@ -725,6 +725,7 @@ class File(Form): # type: ignore[misc]
deprecated: Union[deprecated, str, bool, None] = None,
include_in_schema: bool = True,
json_schema_extra: Union[Dict[str, Any], None] = None,
max_size: Optional[int] = None,
**extra: Any,
):
super().__init__(
@ -760,6 +761,7 @@ class File(Form): # type: ignore[misc]
json_schema_extra=json_schema_extra,
**extra,
)
self.max_size = max_size
@dataclass(frozen=True)

18
main.py
View File

@ -1,6 +1,6 @@
from typing import Union
from fastapi import FastAPI
from fastapi import FastAPI, UploadFile, File
from pydantic import BaseModel
app = FastAPI()
@ -25,3 +25,19 @@ def read_item(item_id: int, q: Union[str, None] = None):
@app.put("/items/{item_id}")
def update_item(item_id: int, item: Item):
return {"item_name": item.name, "item_id": item_id}
@app.post("/upload")
async def upload_file(file: UploadFile = File(max_size=500)):
total_bytes = 0
# Safest: process in chunks instead of reading whole file
while True:
chunk = await file.read(1024 * 1024) # 1MB
if not chunk:
break
total_bytes += len(chunk)
return {"filename": file.filename, "size": total_bytes}