mirror of https://github.com/tiangolo/fastapi.git
Tests for coverage and sqlalchemy
* avoid connection issues * Add tests for coverage (coordinate)
This commit is contained in:
parent
e7c7b02a82
commit
29ccbbc119
|
|
@ -38,7 +38,7 @@ try:
|
|||
from pydantic_extra_types import coordinate
|
||||
|
||||
encoders_by_extra_type: dict[type[Any], Callable[[Any], Any]] = {
|
||||
coordinate.Coordinate: str,
|
||||
coordinate.Coordinate: lambda o: {"latitude": o.latitude, "longitude": o.longitude},
|
||||
et_color.Color: str,
|
||||
}
|
||||
except ImportError:
|
||||
|
|
@ -328,6 +328,24 @@ def jsonable_encoder(
|
|||
encoded_dict[encoded_key] = encoded_value
|
||||
return encoded_dict
|
||||
|
||||
# Check if it's a named tuple, and if so, encode it as a dict (instead of a list) if `named_tuple_as_dict` is `True`.
|
||||
if (
|
||||
named_tuple_as_dict
|
||||
and getattr(obj, "_asdict", None) is not None
|
||||
and callable(obj._asdict)
|
||||
):
|
||||
return jsonable_encoder(
|
||||
obj._asdict(),
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
custom_encoder=custom_encoder,
|
||||
sqlalchemy_safe=sqlalchemy_safe,
|
||||
)
|
||||
|
||||
# Note that we check for `Sequence` and not `list` because we want to support any kind of sequence, like `list`, `tuple`, `set`, etc.
|
||||
# Also, we check that it's not a `bytes` object, because `bytes` is also a `Sequence`, but we want to rely on the TYPE_ENCODERS for `bytes` and avoid code duplication.
|
||||
if isinstance(obj, (Sequence, GeneratorType)) and not isinstance(obj, bytes):
|
||||
|
|
@ -348,23 +366,6 @@ def jsonable_encoder(
|
|||
)
|
||||
return encoded_list
|
||||
|
||||
if (
|
||||
named_tuple_as_dict
|
||||
and getattr(obj, "_asdict", None) is not None
|
||||
and callable(obj._asdict)
|
||||
):
|
||||
return jsonable_encoder(
|
||||
obj._asdict(),
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
custom_encoder=custom_encoder,
|
||||
sqlalchemy_safe=sqlalchemy_safe,
|
||||
)
|
||||
|
||||
if type(obj) in encoders_by_extra_type:
|
||||
return encoders_by_extra_type[type(obj)](obj)
|
||||
if type(obj) in ENCODERS_BY_TYPE:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import warnings
|
||||
from collections import deque
|
||||
from collections import deque, namedtuple
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
|
|
@ -7,7 +7,7 @@ from decimal import Decimal
|
|||
from enum import Enum
|
||||
from math import isinf, isnan
|
||||
from pathlib import PurePath, PurePosixPath, PureWindowsPath
|
||||
from typing import Optional, TypedDict, Union
|
||||
from typing import NamedTuple, Optional, TypedDict, Union
|
||||
|
||||
import pytest
|
||||
from fastapi._compat import Undefined
|
||||
|
|
@ -326,4 +326,104 @@ def test_encode_sequence():
|
|||
return len(self._items)
|
||||
|
||||
seq = SequenceModel(["item1", "item2", "item3"])
|
||||
assert len(seq) == 3
|
||||
assert jsonable_encoder(seq) == ["item1", "item2", "item3"]
|
||||
|
||||
|
||||
def test_encode_bytes():
|
||||
assert jsonable_encoder(b"hello") == "hello"
|
||||
|
||||
|
||||
def test_encode_bytes_in_dict():
|
||||
data = {"content": b"hello", "name": "test"}
|
||||
assert jsonable_encoder(data) == {"content": "hello", "name": "test"}
|
||||
|
||||
|
||||
def test_encode_list_of_bytes():
|
||||
data = [b"hello", b"world"]
|
||||
assert jsonable_encoder(data) == ["hello", "world"]
|
||||
|
||||
|
||||
def test_encode_generator():
|
||||
def gen():
|
||||
yield 1
|
||||
yield 2
|
||||
yield 3
|
||||
|
||||
assert jsonable_encoder(gen()) == [1, 2, 3]
|
||||
|
||||
|
||||
def test_encode_generator_of_bytes():
|
||||
def gen():
|
||||
yield b"hello"
|
||||
yield b"world"
|
||||
|
||||
assert jsonable_encoder(gen()) == ["hello", "world"]
|
||||
|
||||
|
||||
def test_encode_named_tuple_as_list():
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
p = Point(1, 2)
|
||||
assert jsonable_encoder(p) == [1, 2]
|
||||
|
||||
|
||||
def test_encode_named_tuple_as_dict():
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
p = Point(1, 2)
|
||||
assert jsonable_encoder(p, named_tuple_as_dict=True) == {"x": 1, "y": 2}
|
||||
|
||||
|
||||
def test_encode_typed_named_tuple_as_list():
|
||||
class Point(NamedTuple):
|
||||
x: int
|
||||
y: int
|
||||
|
||||
p = Point(1, 2)
|
||||
assert jsonable_encoder(p) == [1, 2]
|
||||
|
||||
|
||||
def test_encode_typed_named_tuple_as_dict():
|
||||
class Point(NamedTuple):
|
||||
x: int
|
||||
y: int
|
||||
|
||||
p = Point(1, 2)
|
||||
assert jsonable_encoder(p, named_tuple_as_dict=True) == {"x": 1, "y": 2}
|
||||
|
||||
|
||||
def test_encode_sqlalchemy_safe_filters_sa_keys():
|
||||
data = {"name": "test", "_sa_instance_state": "internal"}
|
||||
assert jsonable_encoder(data, sqlalchemy_safe=True) == {"name": "test"}
|
||||
assert jsonable_encoder(data, sqlalchemy_safe=False) == {
|
||||
"name": "test",
|
||||
"_sa_instance_state": "internal",
|
||||
}
|
||||
|
||||
|
||||
def test_encode_sqlalchemy_row_as_list():
|
||||
sa = pytest.importorskip("sqlalchemy")
|
||||
engine = sa.create_engine("sqlite:///:memory:")
|
||||
with engine.connect() as conn:
|
||||
row = conn.execute(sa.text("SELECT 1 AS x, 2 AS y")).fetchone()
|
||||
engine.dispose()
|
||||
assert row is not None
|
||||
assert jsonable_encoder(row) == [1, 2]
|
||||
|
||||
|
||||
def test_encode_sqlalchemy_row_as_dict():
|
||||
sa = pytest.importorskip("sqlalchemy")
|
||||
engine = sa.create_engine("sqlite:///:memory:")
|
||||
with engine.connect() as conn:
|
||||
row = conn.execute(sa.text("SELECT 1 AS x, 2 AS y")).fetchone()
|
||||
engine.dispose()
|
||||
assert row is not None
|
||||
assert jsonable_encoder(row, named_tuple_as_dict=True) == {"x": 1, "y": 2}
|
||||
|
||||
|
||||
def test_encode_pydantic_extra_types_coordinate():
|
||||
coordinate = pytest.importorskip("pydantic_extra_types.coordinate")
|
||||
coord = coordinate.Coordinate(latitude=1.0, longitude=2.0)
|
||||
assert jsonable_encoder(coord) != str(coord)
|
||||
assert jsonable_encoder(coord) == {"latitude": 1.0, "longitude": 2.0}
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue