mirror of https://github.com/tiangolo/fastapi.git
Merge 866f28361b into 71a17b5932
This commit is contained in:
commit
7517fffed7
Binary file not shown.
|
After Width: | Height: | Size: 230 KiB |
|
|
@ -186,3 +186,17 @@ In this case, there are 3 query parameters:
|
||||||
You could also use `Enum`s the same way as with [Path Parameters](path-params.md#predefined-values){.internal-link target=_blank}.
|
You could also use `Enum`s the same way as with [Path Parameters](path-params.md#predefined-values){.internal-link target=_blank}.
|
||||||
|
|
||||||
///
|
///
|
||||||
|
|
||||||
|
## Free Form Query Parameters { #free-form-query-parameters }
|
||||||
|
|
||||||
|
Sometimes you want to receive some query parameters, but you don't know in advance what they are called. **FastAPI** provides support for this use case as well.
|
||||||
|
|
||||||
|
=== "Python 3.10+"
|
||||||
|
|
||||||
|
```Python hl_lines="8"
|
||||||
|
{!> ../../../docs_src/query_params/tutorial007_py310.py!}
|
||||||
|
```
|
||||||
|
|
||||||
|
And when you open your browser at <a href="http://127.0.0.1:8000/docs" class="external-link" target="_blank">http://127.0.0.1:8000/docs</a>, you will that OpenAPI supports this format of query parameter:
|
||||||
|
|
||||||
|
<img src="/img/tutorial/path-params/image01.png">
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
from typing import Annotated, Dict, List
|
||||||
|
|
||||||
|
from fastapi import FastAPI, Query
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/query/mixed-type-params")
|
||||||
|
def get_mixed_mapping_mixed_type_query_params(
|
||||||
|
query: Annotated[int, Query()] = None,
|
||||||
|
mapping_query_str: Annotated[Dict[str, str], Query()] = None,
|
||||||
|
mapping_query_int: Annotated[Dict[str, int], Query()] = None,
|
||||||
|
sequence_mapping_int: Annotated[Dict[str, List[int]], Query()] = None,
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
"query": query,
|
||||||
|
"mapping_query_str": mapping_query_str,
|
||||||
|
"mapping_query_int": mapping_query_int,
|
||||||
|
"sequence_mapping_int": sequence_mapping_int,
|
||||||
|
}
|
||||||
|
|
@ -24,8 +24,11 @@ from .main import get_schema_from_model_field as get_schema_from_model_field
|
||||||
from .main import is_bytes_field as is_bytes_field
|
from .main import is_bytes_field as is_bytes_field
|
||||||
from .main import is_bytes_sequence_field as is_bytes_sequence_field
|
from .main import is_bytes_sequence_field as is_bytes_sequence_field
|
||||||
from .main import is_scalar_field as is_scalar_field
|
from .main import is_scalar_field as is_scalar_field
|
||||||
|
from .main import is_scalar_mapping_field as is_scalar_mapping_field
|
||||||
from .main import is_scalar_sequence_field as is_scalar_sequence_field
|
from .main import is_scalar_sequence_field as is_scalar_sequence_field
|
||||||
|
from .main import is_scalar_sequence_mapping_field as is_scalar_sequence_mapping_field
|
||||||
from .main import is_sequence_field as is_sequence_field
|
from .main import is_sequence_field as is_sequence_field
|
||||||
|
from .main import omit_by_default as omit_by_default
|
||||||
from .main import serialize_sequence_value as serialize_sequence_value
|
from .main import serialize_sequence_value as serialize_sequence_value
|
||||||
from .main import (
|
from .main import (
|
||||||
with_info_plain_validator_function as with_info_plain_validator_function,
|
with_info_plain_validator_function as with_info_plain_validator_function,
|
||||||
|
|
@ -39,6 +42,12 @@ from .shared import PYDANTIC_V2 as PYDANTIC_V2
|
||||||
from .shared import PYDANTIC_VERSION_MINOR_TUPLE as PYDANTIC_VERSION_MINOR_TUPLE
|
from .shared import PYDANTIC_VERSION_MINOR_TUPLE as PYDANTIC_VERSION_MINOR_TUPLE
|
||||||
from .shared import annotation_is_pydantic_v1 as annotation_is_pydantic_v1
|
from .shared import annotation_is_pydantic_v1 as annotation_is_pydantic_v1
|
||||||
from .shared import field_annotation_is_scalar as field_annotation_is_scalar
|
from .shared import field_annotation_is_scalar as field_annotation_is_scalar
|
||||||
|
from .shared import (
|
||||||
|
field_annotation_is_scalar_mapping as field_annotation_is_scalar_mapping,
|
||||||
|
)
|
||||||
|
from .shared import (
|
||||||
|
field_annotation_is_scalar_sequence_mapping as field_annotation_is_scalar_sequence_mapping,
|
||||||
|
)
|
||||||
from .shared import (
|
from .shared import (
|
||||||
is_uploadfile_or_nonable_uploadfile_annotation as is_uploadfile_or_nonable_uploadfile_annotation,
|
is_uploadfile_or_nonable_uploadfile_annotation as is_uploadfile_or_nonable_uploadfile_annotation,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,7 @@ if PYDANTIC_V2:
|
||||||
from .v2 import Validator as Validator
|
from .v2 import Validator as Validator
|
||||||
from .v2 import evaluate_forwardref as evaluate_forwardref
|
from .v2 import evaluate_forwardref as evaluate_forwardref
|
||||||
from .v2 import get_missing_field_error as get_missing_field_error
|
from .v2 import get_missing_field_error as get_missing_field_error
|
||||||
|
from .v2 import omit_by_default as omit_by_default
|
||||||
from .v2 import (
|
from .v2 import (
|
||||||
with_info_plain_validator_function as with_info_plain_validator_function,
|
with_info_plain_validator_function as with_info_plain_validator_function,
|
||||||
)
|
)
|
||||||
|
|
@ -44,6 +45,7 @@ else:
|
||||||
from .v1 import Validator as Validator
|
from .v1 import Validator as Validator
|
||||||
from .v1 import evaluate_forwardref as evaluate_forwardref
|
from .v1 import evaluate_forwardref as evaluate_forwardref
|
||||||
from .v1 import get_missing_field_error as get_missing_field_error
|
from .v1 import get_missing_field_error as get_missing_field_error
|
||||||
|
from .v1 import omit_by_default as omit_by_default
|
||||||
from .v1 import ( # type: ignore[assignment]
|
from .v1 import ( # type: ignore[assignment]
|
||||||
with_info_plain_validator_function as with_info_plain_validator_function,
|
with_info_plain_validator_function as with_info_plain_validator_function,
|
||||||
)
|
)
|
||||||
|
|
@ -209,6 +211,30 @@ def is_sequence_field(field: ModelField) -> bool:
|
||||||
return v2.is_sequence_field(field) # type: ignore[arg-type]
|
return v2.is_sequence_field(field) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
|
||||||
|
def is_scalar_mapping_field(field: ModelField) -> bool:
|
||||||
|
if isinstance(field, may_v1.ModelField):
|
||||||
|
from fastapi._compat import v1
|
||||||
|
|
||||||
|
return v1.is_scalar_mapping_field(field)
|
||||||
|
else:
|
||||||
|
assert PYDANTIC_V2
|
||||||
|
from . import v2
|
||||||
|
|
||||||
|
return v2.is_scalar_mapping_field(field) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
|
||||||
|
def is_scalar_sequence_mapping_field(field: ModelField) -> bool:
|
||||||
|
if isinstance(field, may_v1.ModelField):
|
||||||
|
from fastapi._compat import v1
|
||||||
|
|
||||||
|
return v1.is_scalar_sequence_mapping_field(field)
|
||||||
|
else:
|
||||||
|
assert PYDANTIC_V2
|
||||||
|
from . import v2
|
||||||
|
|
||||||
|
return v2.is_scalar_sequence_mapping_field(field) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
|
||||||
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
|
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
|
||||||
if isinstance(field, may_v1.ModelField):
|
if isinstance(field, may_v1.ModelField):
|
||||||
from fastapi._compat import v1
|
from fastapi._compat import v1
|
||||||
|
|
|
||||||
|
|
@ -144,6 +144,52 @@ def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> b
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def field_annotation_is_scalar_mapping(
|
||||||
|
annotation: Union[Type[Any], None],
|
||||||
|
) -> bool:
|
||||||
|
origin = get_origin(annotation)
|
||||||
|
if origin is Annotated:
|
||||||
|
return field_annotation_is_scalar_mapping(get_args(annotation)[0])
|
||||||
|
if origin is Union or origin is UnionType:
|
||||||
|
at_least_one_scalar_mapping = False
|
||||||
|
for arg in get_args(annotation):
|
||||||
|
if field_annotation_is_scalar_mapping(arg):
|
||||||
|
at_least_one_scalar_mapping = True
|
||||||
|
continue
|
||||||
|
elif not field_annotation_is_scalar(arg):
|
||||||
|
return False
|
||||||
|
return at_least_one_scalar_mapping
|
||||||
|
return lenient_issubclass(origin, Mapping) and all(
|
||||||
|
field_annotation_is_scalar(sub_annotation)
|
||||||
|
for sub_annotation in get_args(annotation)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def field_annotation_is_scalar_sequence_mapping(
|
||||||
|
annotation: Union[Type[Any], None],
|
||||||
|
) -> bool:
|
||||||
|
origin = get_origin(annotation)
|
||||||
|
if origin is Annotated:
|
||||||
|
return field_annotation_is_scalar_sequence_mapping(get_args(annotation)[0])
|
||||||
|
if origin is Union or origin is UnionType:
|
||||||
|
at_least_one_scalar_mapping = False
|
||||||
|
for arg in get_args(annotation):
|
||||||
|
if field_annotation_is_scalar_sequence_mapping(arg):
|
||||||
|
at_least_one_scalar_mapping = True
|
||||||
|
continue
|
||||||
|
elif not (
|
||||||
|
field_annotation_is_scalar_sequence_mapping(arg)
|
||||||
|
or field_annotation_is_scalar_mapping(arg)
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
return at_least_one_scalar_mapping
|
||||||
|
return lenient_issubclass(origin, Mapping) and all(
|
||||||
|
field_annotation_is_scalar_sequence(sub_annotation)
|
||||||
|
or field_annotation_is_scalar(sub_annotation)
|
||||||
|
for sub_annotation in get_args(annotation)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool:
|
def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool:
|
||||||
if lenient_issubclass(annotation, bytes):
|
if lenient_issubclass(annotation, bytes):
|
||||||
return True
|
return True
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from copy import copy
|
from copy import copy, deepcopy
|
||||||
from dataclasses import dataclass, is_dataclass
|
from dataclasses import dataclass, is_dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import (
|
from typing import (
|
||||||
|
|
@ -35,6 +35,7 @@ if not PYDANTIC_V2:
|
||||||
from pydantic.error_wrappers import ErrorWrapper as ErrorWrapper
|
from pydantic.error_wrappers import ErrorWrapper as ErrorWrapper
|
||||||
from pydantic.errors import MissingError
|
from pydantic.errors import MissingError
|
||||||
from pydantic.fields import ( # type: ignore[attr-defined]
|
from pydantic.fields import ( # type: ignore[attr-defined]
|
||||||
|
MAPPING_LIKE_SHAPES,
|
||||||
SHAPE_FROZENSET,
|
SHAPE_FROZENSET,
|
||||||
SHAPE_LIST,
|
SHAPE_LIST,
|
||||||
SHAPE_SEQUENCE,
|
SHAPE_SEQUENCE,
|
||||||
|
|
@ -82,6 +83,7 @@ else:
|
||||||
from pydantic.v1.error_wrappers import ErrorWrapper as ErrorWrapper
|
from pydantic.v1.error_wrappers import ErrorWrapper as ErrorWrapper
|
||||||
from pydantic.v1.errors import MissingError
|
from pydantic.v1.errors import MissingError
|
||||||
from pydantic.v1.fields import (
|
from pydantic.v1.fields import (
|
||||||
|
MAPPING_LIKE_SHAPES,
|
||||||
SHAPE_FROZENSET,
|
SHAPE_FROZENSET,
|
||||||
SHAPE_LIST,
|
SHAPE_LIST,
|
||||||
SHAPE_SEQUENCE,
|
SHAPE_SEQUENCE,
|
||||||
|
|
@ -144,6 +146,8 @@ sequence_shape_to_type = {
|
||||||
SHAPE_TUPLE_ELLIPSIS: list,
|
SHAPE_TUPLE_ELLIPSIS: list,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mapping_shapes = MAPPING_LIKE_SHAPES
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class GenerateJsonSchema:
|
class GenerateJsonSchema:
|
||||||
|
|
@ -219,6 +223,32 @@ def is_pv1_scalar_sequence_field(field: ModelField) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_pv1_scalar_sequence_mapping_field(field: ModelField) -> bool:
|
||||||
|
if (field.shape in mapping_shapes) and not lenient_issubclass(
|
||||||
|
field.type_, BaseModel
|
||||||
|
):
|
||||||
|
if field.sub_fields is not None:
|
||||||
|
for sub_field in field.sub_fields:
|
||||||
|
if not (
|
||||||
|
is_scalar_sequence_field(sub_field) or is_scalar_field(sub_field)
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_pv1_scalar_mapping_field(field: ModelField) -> bool:
|
||||||
|
if (field.shape in mapping_shapes) and not lenient_issubclass(
|
||||||
|
field.type_, BaseModel
|
||||||
|
):
|
||||||
|
if field.sub_fields is not None:
|
||||||
|
for sub_field in field.sub_fields:
|
||||||
|
if not is_scalar_field(sub_field):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _model_rebuild(model: Type[BaseModel]) -> None:
|
def _model_rebuild(model: Type[BaseModel]) -> None:
|
||||||
model.update_forward_refs()
|
model.update_forward_refs()
|
||||||
|
|
||||||
|
|
@ -277,6 +307,14 @@ def is_scalar_sequence_field(field: ModelField) -> bool:
|
||||||
return is_pv1_scalar_sequence_field(field)
|
return is_pv1_scalar_sequence_field(field)
|
||||||
|
|
||||||
|
|
||||||
|
def is_scalar_mapping_field(field: ModelField) -> bool:
|
||||||
|
return is_pv1_scalar_mapping_field(field)
|
||||||
|
|
||||||
|
|
||||||
|
def is_scalar_sequence_mapping_field(field: ModelField) -> bool:
|
||||||
|
return is_pv1_scalar_sequence_mapping_field(field)
|
||||||
|
|
||||||
|
|
||||||
def is_bytes_field(field: ModelField) -> bool:
|
def is_bytes_field(field: ModelField) -> bool:
|
||||||
return lenient_issubclass(field.type_, bytes) # type: ignore[no-any-return]
|
return lenient_issubclass(field.type_, bytes) # type: ignore[no-any-return]
|
||||||
|
|
||||||
|
|
@ -310,3 +348,64 @@ def create_body_model(
|
||||||
|
|
||||||
def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
|
def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
|
||||||
return list(model.__fields__.values()) # type: ignore[attr-defined]
|
return list(model.__fields__.values()) # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
|
||||||
|
def ignore_invalid(
|
||||||
|
cls: Any,
|
||||||
|
v: Dict[str, Any],
|
||||||
|
values: Dict[str, Any],
|
||||||
|
field: ModelField,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Any:
|
||||||
|
from .may_v1 import _regenerate_error_with_loc
|
||||||
|
|
||||||
|
field_copy = deepcopy(field)
|
||||||
|
field_copy.pre_validators = [
|
||||||
|
validator
|
||||||
|
for validator in field_copy.pre_validators
|
||||||
|
if getattr(validator, "__name__", "") != "ignore_invalid"
|
||||||
|
]
|
||||||
|
v, errors = field_copy.validate(v, values, loc=field.name)
|
||||||
|
if not errors:
|
||||||
|
return v
|
||||||
|
|
||||||
|
# pop the keys or elements that caused the validation errors and revalidate
|
||||||
|
for error in _regenerate_error_with_loc(errors=errors, loc_prefix=()):
|
||||||
|
loc = error["loc"][1:]
|
||||||
|
if len(loc) == 0:
|
||||||
|
continue
|
||||||
|
if isinstance(loc[0], int) and isinstance(v, list):
|
||||||
|
index = loc[0]
|
||||||
|
if 0 <= index < len(v):
|
||||||
|
v[index] = None
|
||||||
|
|
||||||
|
# Handle nested list validation errors (e.g., dict[str, list[str]])
|
||||||
|
elif isinstance(loc[0], str) and isinstance(v, dict):
|
||||||
|
key = loc[0]
|
||||||
|
if (
|
||||||
|
len(loc) > 1
|
||||||
|
and isinstance(loc[1], int)
|
||||||
|
and key in v
|
||||||
|
and isinstance(v[key], list)
|
||||||
|
):
|
||||||
|
list_index = loc[1]
|
||||||
|
v[key][list_index] = None
|
||||||
|
elif key in v:
|
||||||
|
v.pop(key)
|
||||||
|
|
||||||
|
if isinstance(v, list):
|
||||||
|
v = [el for el in v if el is not None]
|
||||||
|
|
||||||
|
if isinstance(v, dict):
|
||||||
|
for key in v.keys():
|
||||||
|
if isinstance(v[key], list):
|
||||||
|
v[key] = [el for el in v[key] if el is not None]
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
def omit_by_default(
|
||||||
|
field_info: FieldInfo,
|
||||||
|
) -> Tuple[FieldInfo, Dict[str, Callable[..., Any]]]:
|
||||||
|
"""add a wrap validator to omit invalid values by default."""
|
||||||
|
return field_info, {"ignore_invalid": Validator(ignore_invalid, pre=True)}
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
List,
|
List,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
|
@ -18,7 +19,7 @@ from typing import (
|
||||||
from fastapi._compat import may_v1, shared
|
from fastapi._compat import may_v1, shared
|
||||||
from fastapi.openapi.constants import REF_TEMPLATE
|
from fastapi.openapi.constants import REF_TEMPLATE
|
||||||
from fastapi.types import IncEx, ModelNameMap, UnionType
|
from fastapi.types import IncEx, ModelNameMap, UnionType
|
||||||
from pydantic import BaseModel, TypeAdapter, create_model
|
from pydantic import BaseModel, OnErrorOmit, TypeAdapter, WrapValidator, create_model
|
||||||
from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
|
from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
|
||||||
from pydantic import PydanticUndefinedAnnotation as PydanticUndefinedAnnotation
|
from pydantic import PydanticUndefinedAnnotation as PydanticUndefinedAnnotation
|
||||||
from pydantic import ValidationError as ValidationError
|
from pydantic import ValidationError as ValidationError
|
||||||
|
|
@ -365,6 +366,16 @@ def is_scalar_sequence_field(field: ModelField) -> bool:
|
||||||
return shared.field_annotation_is_scalar_sequence(field.field_info.annotation)
|
return shared.field_annotation_is_scalar_sequence(field.field_info.annotation)
|
||||||
|
|
||||||
|
|
||||||
|
def is_scalar_mapping_field(field: ModelField) -> bool:
|
||||||
|
return shared.field_annotation_is_scalar_mapping(field.field_info.annotation)
|
||||||
|
|
||||||
|
|
||||||
|
def is_scalar_sequence_mapping_field(field: ModelField) -> bool:
|
||||||
|
return shared.field_annotation_is_scalar_sequence_mapping(
|
||||||
|
field.field_info.annotation
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_bytes_field(field: ModelField) -> bool:
|
def is_bytes_field(field: ModelField) -> bool:
|
||||||
return shared.is_bytes_or_nonable_bytes_annotation(field.type_)
|
return shared.is_bytes_or_nonable_bytes_annotation(field.type_)
|
||||||
|
|
||||||
|
|
@ -497,3 +508,79 @@ def get_flat_models_from_fields(
|
||||||
|
|
||||||
def get_long_model_name(model: TypeModelOrEnum) -> str:
|
def get_long_model_name(model: TypeModelOrEnum) -> str:
|
||||||
return f"{model.__module__}__{model.__qualname__}".replace(".", "__")
|
return f"{model.__module__}__{model.__qualname__}".replace(".", "__")
|
||||||
|
|
||||||
|
|
||||||
|
if shared.PYDANTIC_VERSION_MINOR_TUPLE >= (2, 6):
|
||||||
|
# Omit by default for scalar mapping and scalar sequence mapping annotations
|
||||||
|
# added in Pydantic v2.6 https://github.com/pydantic/pydantic/releases/tag/v2.6.0
|
||||||
|
def _omit_by_default(annotation: Any) -> Any:
|
||||||
|
origin = getattr(annotation, "__origin__", None)
|
||||||
|
args = getattr(annotation, "__args__", ())
|
||||||
|
|
||||||
|
if origin is Union:
|
||||||
|
new_args = tuple(_omit_by_default(arg) for arg in args)
|
||||||
|
return Union[new_args]
|
||||||
|
elif origin in (list, List):
|
||||||
|
return List[_omit_by_default(args[0])] # type: ignore[misc]
|
||||||
|
elif origin in (dict, Dict):
|
||||||
|
return Dict[args[0], _omit_by_default(args[1])] # type: ignore[misc,valid-type]
|
||||||
|
else:
|
||||||
|
return OnErrorOmit[annotation] # type: ignore[misc]
|
||||||
|
|
||||||
|
def omit_by_default(
|
||||||
|
field_info: FieldInfo,
|
||||||
|
) -> Tuple[FieldInfo, Dict[str, Callable[..., Any]]]:
|
||||||
|
new_annotation = _omit_by_default(field_info.annotation)
|
||||||
|
new_field_info = copy_field_info(
|
||||||
|
field_info=field_info, annotation=new_annotation
|
||||||
|
)
|
||||||
|
return new_field_info, {}
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def ignore_invalid(v: Any, handler: Callable[[Any], Any]) -> Any:
|
||||||
|
try:
|
||||||
|
return handler(v)
|
||||||
|
except ValidationError as exc:
|
||||||
|
# pop the keys or elements that caused the validation errors and revalidate
|
||||||
|
for error in exc.errors():
|
||||||
|
loc = error["loc"]
|
||||||
|
if len(loc) == 0:
|
||||||
|
continue
|
||||||
|
if isinstance(loc[0], int) and isinstance(v, list):
|
||||||
|
index = loc[0]
|
||||||
|
if 0 <= index < len(v):
|
||||||
|
v[index] = None
|
||||||
|
|
||||||
|
# Handle nested list validation errors (e.g., dict[str, list[str]])
|
||||||
|
elif isinstance(loc[0], str) and isinstance(v, dict):
|
||||||
|
key = loc[0]
|
||||||
|
if (
|
||||||
|
len(loc) > 1
|
||||||
|
and isinstance(loc[1], int)
|
||||||
|
and key in v
|
||||||
|
and isinstance(v[key], list)
|
||||||
|
):
|
||||||
|
list_index = loc[1]
|
||||||
|
v[key][list_index] = None
|
||||||
|
elif key in v:
|
||||||
|
v.pop(key)
|
||||||
|
|
||||||
|
if isinstance(v, list):
|
||||||
|
v = [el for el in v if el is not None]
|
||||||
|
|
||||||
|
if isinstance(v, dict):
|
||||||
|
for key in v.keys():
|
||||||
|
if isinstance(v[key], list):
|
||||||
|
v[key] = [el for el in v[key] if el is not None]
|
||||||
|
|
||||||
|
return handler(v)
|
||||||
|
|
||||||
|
def omit_by_default(
|
||||||
|
field_info: FieldInfo,
|
||||||
|
) -> Tuple[FieldInfo, Dict[str, Callable[..., Any]]]:
|
||||||
|
"""add a wrap validator to omit invalid values by default."""
|
||||||
|
field_info.metadata = field_info.metadata or [] + [
|
||||||
|
WrapValidator(ignore_invalid)
|
||||||
|
]
|
||||||
|
return field_info, {}
|
||||||
|
|
|
||||||
|
|
@ -39,17 +39,24 @@ from fastapi._compat import (
|
||||||
is_bytes_field,
|
is_bytes_field,
|
||||||
is_bytes_sequence_field,
|
is_bytes_sequence_field,
|
||||||
is_scalar_field,
|
is_scalar_field,
|
||||||
|
is_scalar_mapping_field,
|
||||||
is_scalar_sequence_field,
|
is_scalar_sequence_field,
|
||||||
|
is_scalar_sequence_mapping_field,
|
||||||
is_sequence_field,
|
is_sequence_field,
|
||||||
is_uploadfile_or_nonable_uploadfile_annotation,
|
is_uploadfile_or_nonable_uploadfile_annotation,
|
||||||
is_uploadfile_sequence_annotation,
|
is_uploadfile_sequence_annotation,
|
||||||
lenient_issubclass,
|
lenient_issubclass,
|
||||||
may_v1,
|
may_v1,
|
||||||
|
omit_by_default,
|
||||||
sequence_types,
|
sequence_types,
|
||||||
serialize_sequence_value,
|
serialize_sequence_value,
|
||||||
value_is_sequence,
|
value_is_sequence,
|
||||||
)
|
)
|
||||||
from fastapi._compat.shared import annotation_is_pydantic_v1
|
from fastapi._compat.shared import (
|
||||||
|
annotation_is_pydantic_v1,
|
||||||
|
field_annotation_is_scalar_mapping,
|
||||||
|
field_annotation_is_scalar_sequence_mapping,
|
||||||
|
)
|
||||||
from fastapi.background import BackgroundTasks
|
from fastapi.background import BackgroundTasks
|
||||||
from fastapi.concurrency import (
|
from fastapi.concurrency import (
|
||||||
asynccontextmanager,
|
asynccontextmanager,
|
||||||
|
|
@ -509,6 +516,7 @@ def analyze_param(
|
||||||
and getattr(field_info, "in_", None) is None
|
and getattr(field_info, "in_", None) is None
|
||||||
):
|
):
|
||||||
field_info.in_ = params.ParamTypes.query
|
field_info.in_ = params.ParamTypes.query
|
||||||
|
|
||||||
use_annotation_from_field_info = get_annotation_from_field_info(
|
use_annotation_from_field_info = get_annotation_from_field_info(
|
||||||
use_annotation,
|
use_annotation,
|
||||||
field_info,
|
field_info,
|
||||||
|
|
@ -520,7 +528,17 @@ def analyze_param(
|
||||||
alias = param_name.replace("_", "-")
|
alias = param_name.replace("_", "-")
|
||||||
else:
|
else:
|
||||||
alias = field_info.alias or param_name
|
alias = field_info.alias or param_name
|
||||||
|
|
||||||
field_info.alias = alias
|
field_info.alias = alias
|
||||||
|
|
||||||
|
# Omit by default for scalar mapping and scalar sequence mapping query fields
|
||||||
|
class_validators: Dict[str, Callable[..., Any]] = {}
|
||||||
|
if isinstance(field_info, (params.Query, temp_pydantic_v1_params.Query)) and (
|
||||||
|
field_annotation_is_scalar_sequence_mapping(use_annotation_from_field_info)
|
||||||
|
or field_annotation_is_scalar_mapping(use_annotation_from_field_info)
|
||||||
|
):
|
||||||
|
field_info, class_validators = omit_by_default(field_info)
|
||||||
|
|
||||||
field = create_model_field(
|
field = create_model_field(
|
||||||
name=param_name,
|
name=param_name,
|
||||||
type_=use_annotation_from_field_info,
|
type_=use_annotation_from_field_info,
|
||||||
|
|
@ -529,6 +547,7 @@ def analyze_param(
|
||||||
required=field_info.default
|
required=field_info.default
|
||||||
in (RequiredParam, may_v1.RequiredParam, Undefined),
|
in (RequiredParam, may_v1.RequiredParam, Undefined),
|
||||||
field_info=field_info,
|
field_info=field_info,
|
||||||
|
class_validators=class_validators,
|
||||||
)
|
)
|
||||||
if is_path_param:
|
if is_path_param:
|
||||||
assert is_scalar_field(field=field), (
|
assert is_scalar_field(field=field), (
|
||||||
|
|
@ -538,6 +557,8 @@ def analyze_param(
|
||||||
assert (
|
assert (
|
||||||
is_scalar_field(field)
|
is_scalar_field(field)
|
||||||
or is_scalar_sequence_field(field)
|
or is_scalar_sequence_field(field)
|
||||||
|
or is_scalar_mapping_field(field)
|
||||||
|
or is_scalar_sequence_mapping_field(field)
|
||||||
or (
|
or (
|
||||||
_is_model_class(field.type_)
|
_is_model_class(field.type_)
|
||||||
# For Pydantic v1
|
# For Pydantic v1
|
||||||
|
|
@ -733,6 +754,7 @@ def _validate_value_with_model_field(
|
||||||
else:
|
else:
|
||||||
return deepcopy(field.default), []
|
return deepcopy(field.default), []
|
||||||
v_, errors_ = field.validate(value, values, loc=loc)
|
v_, errors_ = field.validate(value, values, loc=loc)
|
||||||
|
|
||||||
if _is_error_wrapper(errors_): # type: ignore[arg-type]
|
if _is_error_wrapper(errors_): # type: ignore[arg-type]
|
||||||
return None, [errors_]
|
return None, [errors_]
|
||||||
elif isinstance(errors_, list):
|
elif isinstance(errors_, list):
|
||||||
|
|
@ -746,10 +768,19 @@ def _get_multidict_value(
|
||||||
field: ModelField, values: Mapping[str, Any], alias: Union[str, None] = None
|
field: ModelField, values: Mapping[str, Any], alias: Union[str, None] = None
|
||||||
) -> Any:
|
) -> Any:
|
||||||
alias = alias or field.alias
|
alias = alias or field.alias
|
||||||
|
value: Any = None
|
||||||
if is_sequence_field(field) and isinstance(values, (ImmutableMultiDict, Headers)):
|
if is_sequence_field(field) and isinstance(values, (ImmutableMultiDict, Headers)):
|
||||||
value = values.getlist(alias)
|
value = values.getlist(alias)
|
||||||
else:
|
elif alias in values:
|
||||||
value = values.get(alias, None)
|
value = values[alias]
|
||||||
|
elif values and is_scalar_mapping_field(field) and isinstance(values, QueryParams):
|
||||||
|
value = dict(values)
|
||||||
|
elif (
|
||||||
|
values
|
||||||
|
and is_scalar_sequence_mapping_field(field)
|
||||||
|
and isinstance(values, QueryParams)
|
||||||
|
):
|
||||||
|
value = {key: values.getlist(key) for key in values.keys()}
|
||||||
if (
|
if (
|
||||||
value is None
|
value is None
|
||||||
or (
|
or (
|
||||||
|
|
@ -848,6 +879,14 @@ def request_params_to_args(
|
||||||
errors.extend(errors_)
|
errors.extend(errors_)
|
||||||
else:
|
else:
|
||||||
values[field.name] = v_
|
values[field.name] = v_
|
||||||
|
# remove keys which were captured by a mapping query field but were
|
||||||
|
# specified as individual fields
|
||||||
|
for field in fields:
|
||||||
|
if isinstance(values.get(field.name), dict) and (
|
||||||
|
is_scalar_mapping_field(field) or is_scalar_sequence_mapping_field(field)
|
||||||
|
):
|
||||||
|
for f_ in fields:
|
||||||
|
values[field.name].pop(f_.alias, None)
|
||||||
return values, errors
|
return values, errors
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import http
|
import http
|
||||||
from typing import FrozenSet, List, Optional
|
from typing import Dict, FrozenSet, List, Optional
|
||||||
|
|
||||||
from fastapi import FastAPI, Path, Query
|
from fastapi import FastAPI, Path, Query
|
||||||
|
|
||||||
|
|
@ -189,6 +189,48 @@ def get_query_param_required_type(query: int = Query()):
|
||||||
return f"foo bar {query}"
|
return f"foo bar {query}"
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/query/mapping-params")
|
||||||
|
def get_mapping_query_params(queries: Dict[str, str] = Query({})):
|
||||||
|
return {"queries": queries}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/query/mixed-params")
|
||||||
|
def get_mixed_mapping_query_params(
|
||||||
|
sequence_mapping_queries: Dict[str, List[int]] = Query({}),
|
||||||
|
mapping_query: Dict[str, str] = Query(),
|
||||||
|
query: str = Query(),
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
"queries": {
|
||||||
|
"query": query,
|
||||||
|
"mapping_query": mapping_query,
|
||||||
|
"sequence_mapping_queries": sequence_mapping_queries,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/query/mapping-sequence-params")
|
||||||
|
def get_sequence_mapping_query_params(queries: Dict[str, List[int]] = Query({})):
|
||||||
|
return {"queries": queries}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/query/mixed-type-params")
|
||||||
|
def get_mixed_mapping_mixed_type_query_params(
|
||||||
|
sequence_mapping_queries: Dict[str, List[int]] = Query({}),
|
||||||
|
mapping_query_str: Dict[str, str] = Query({}),
|
||||||
|
mapping_query_int: Dict[str, int] = Query({}),
|
||||||
|
query: int = Query(),
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
"queries": {
|
||||||
|
"query": query,
|
||||||
|
"mapping_query_str": mapping_query_str,
|
||||||
|
"mapping_query_int": mapping_query_int,
|
||||||
|
"sequence_mapping_queries": sequence_mapping_queries,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@app.get("/enum-status-code", status_code=http.HTTPStatus.CREATED)
|
@app.get("/enum-status-code", status_code=http.HTTPStatus.CREATED)
|
||||||
def get_enum_status_code():
|
def get_enum_status_code():
|
||||||
return "foo bar"
|
return "foo bar"
|
||||||
|
|
|
||||||
|
|
@ -986,7 +986,10 @@ def test_openapi_schema():
|
||||||
"required": False,
|
"required": False,
|
||||||
"schema": IsDict(
|
"schema": IsDict(
|
||||||
{
|
{
|
||||||
"anyOf": [{"type": "integer"}, {"type": "null"}],
|
"anyOf": [
|
||||||
|
{"type": "integer"},
|
||||||
|
{"type": "null"},
|
||||||
|
],
|
||||||
"title": "Query",
|
"title": "Query",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
@ -1120,6 +1123,235 @@ def test_openapi_schema():
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/query/mapping-params": {
|
||||||
|
"get": {
|
||||||
|
"operationId": "get_mapping_query_params_query_mapping_params_get",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "queries",
|
||||||
|
"required": False,
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"title": "Queries",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Successful Response",
|
||||||
|
},
|
||||||
|
"422": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/HTTPValidationError",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Validation Error",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"summary": "Get Mapping Query Params",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"/query/mapping-sequence-params": {
|
||||||
|
"get": {
|
||||||
|
"operationId": "get_sequence_mapping_query_params_query_mapping_sequence_params_get",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "queries",
|
||||||
|
"required": False,
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"items": {
|
||||||
|
"type": "integer",
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"title": "Queries",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Successful Response",
|
||||||
|
},
|
||||||
|
"422": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/HTTPValidationError",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Validation Error",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"summary": "Get Sequence Mapping Query Params",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"/query/mixed-params": {
|
||||||
|
"get": {
|
||||||
|
"operationId": "get_mixed_mapping_query_params_query_mixed_params_get",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "sequence_mapping_queries",
|
||||||
|
"required": False,
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"items": {
|
||||||
|
"type": "integer",
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"title": "Sequence Mapping Queries",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "mapping_query",
|
||||||
|
"required": True,
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"title": "Mapping Query",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "query",
|
||||||
|
"required": True,
|
||||||
|
"schema": {
|
||||||
|
"title": "Query",
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Successful Response",
|
||||||
|
},
|
||||||
|
"422": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/HTTPValidationError",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Validation Error",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"summary": "Get Mixed Mapping Query Params",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"/query/mixed-type-params": {
|
||||||
|
"get": {
|
||||||
|
"operationId": "get_mixed_mapping_mixed_type_query_params_query_mixed_type_params_get",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "sequence_mapping_queries",
|
||||||
|
"required": False,
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"items": {
|
||||||
|
"type": "integer",
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"title": "Sequence Mapping Queries",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "mapping_query_str",
|
||||||
|
"required": False,
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"title": "Mapping Query Str",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "mapping_query_int",
|
||||||
|
"required": False,
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "integer",
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"title": "Mapping Query Int",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"in": "query",
|
||||||
|
"name": "query",
|
||||||
|
"required": True,
|
||||||
|
"schema": {
|
||||||
|
"title": "Query",
|
||||||
|
"type": "integer",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Successful Response",
|
||||||
|
},
|
||||||
|
"422": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/HTTPValidationError",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": "Validation Error",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"summary": "Get Mixed Mapping Mixed Type Query Params",
|
||||||
|
},
|
||||||
|
},
|
||||||
"/enum-status-code": {
|
"/enum-status-code": {
|
||||||
"get": {
|
"get": {
|
||||||
"responses": {
|
"responses": {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,13 @@
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi import FastAPI, Query
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_sequence():
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
@app.get("/items/")
|
||||||
|
def read_items(q: Dict[str, List[List[str]]] = Query(default=None)):
|
||||||
|
pass # pragma: no cover
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Dict, List, Tuple
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from fastapi import FastAPI, Query
|
from fastapi import FastAPI, Query
|
||||||
|
|
@ -39,15 +39,3 @@ def test_invalid_dict():
|
||||||
@app.get("/items/")
|
@app.get("/items/")
|
||||||
def read_items(q: Dict[str, Item] = Query(default=None)):
|
def read_items(q: Dict[str, Item] = Query(default=None)):
|
||||||
pass # pragma: no cover
|
pass # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_simple_dict():
|
|
||||||
with pytest.raises(AssertionError):
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
class Item(BaseModel):
|
|
||||||
title: str
|
|
||||||
|
|
||||||
@app.get("/items/")
|
|
||||||
def read_items(q: Optional[dict] = Query(default=None)):
|
|
||||||
pass # pragma: no cover
|
|
||||||
|
|
|
||||||
|
|
@ -392,12 +392,6 @@ def test_query_param_required_int_query_foo():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_query_frozenset_query_1_query_1_query_2():
|
|
||||||
response = client.get("/query/frozenset/?query=1&query=1&query=2")
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.json() == "1,2"
|
|
||||||
|
|
||||||
|
|
||||||
def test_query_list():
|
def test_query_list():
|
||||||
response = client.get("/query/list/?device_ids=1&device_ids=2")
|
response = client.get("/query/list/?device_ids=1&device_ids=2")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
@ -419,3 +413,36 @@ def test_query_list_default_empty():
|
||||||
response = client.get("/query/list-default/")
|
response = client.get("/query/list-default/")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json() == []
|
assert response.json() == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_query_frozenset_query_1_query_1_query_2():
|
||||||
|
response = client.get("/query/frozenset/?query=1&query=1&query=2")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == "1,2"
|
||||||
|
|
||||||
|
|
||||||
|
def test_mapping_query():
|
||||||
|
response = client.get("/query/mapping-params/?foo=fuzz&bar=buzz")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"queries": {"bar": "buzz", "foo": "fuzz"}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_sequence_mapping_query():
|
||||||
|
response = client.get("/query/mapping-sequence-params/?foo=1&foo=2")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"queries": {"foo": [1, 2]}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_mapping_with_non_mapping_query():
|
||||||
|
response = client.get("/query/mixed-params/?foo=1&foo=2&bar=3&query=fizz")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"queries": {
|
||||||
|
"query": "fizz",
|
||||||
|
"mapping_query": {"foo": "2", "bar": "3"},
|
||||||
|
"sequence_mapping_queries": {
|
||||||
|
"foo": [1, 2],
|
||||||
|
"bar": [3],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,36 @@
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from tests.utils import needs_py310
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="client")
|
||||||
|
def get_client():
|
||||||
|
from docs_src.query_params.tutorial007_py310 import app
|
||||||
|
|
||||||
|
c = TestClient(app)
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
@needs_py310
|
||||||
|
def test_foo_needy_very(client: TestClient):
|
||||||
|
response = client.get("/query/mixed-type-params?query=1&query=2&foo=bar&foo=baz")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"query": 2,
|
||||||
|
"mapping_query_str": {"foo": "baz"},
|
||||||
|
"mapping_query_int": {},
|
||||||
|
"sequence_mapping_int": {"foo": []},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@needs_py310
|
||||||
|
def test_just_string_not_scalar_mapping(client: TestClient):
|
||||||
|
response = client.get("/query/mixed-type-params?&query=2&foo=1&bar=3&foo=2&foo=baz")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"query": 2,
|
||||||
|
"mapping_query_str": {"bar": "3", "foo": "baz"},
|
||||||
|
"mapping_query_int": {"bar": 3},
|
||||||
|
"sequence_mapping_int": {"bar": [3], "foo": [1, 2]},
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue