Skip to content

[Backport 8.x] replace JSONType with Any #1857

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions elasticsearch_dsl/_async/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from ..async_connections import get_connection
from ..response import Response
from ..search_base import MultiSearchBase, SearchBase
from ..utils import _R, AsyncUsingType, AttrDict, JSONType
from ..utils import _R, AsyncUsingType, AttrDict


class AsyncSearch(SearchBase[_R]):
Expand Down Expand Up @@ -108,9 +108,9 @@ async def scan(self) -> AsyncIterator[_R]:
async for hit in async_scan(
es, query=self.to_dict(), index=self._index, **self._params
):
yield self._get_result(cast(AttrDict[JSONType], hit))
yield self._get_result(cast(AttrDict[Any], hit))

async def delete(self) -> AttrDict[JSONType]:
async def delete(self) -> AttrDict[Any]:
"""
delete() executes the query by delegating to delete_by_query()
"""
Expand All @@ -120,7 +120,7 @@ async def delete(self) -> AttrDict[JSONType]:

return AttrDict(
cast(
Dict[str, JSONType],
Dict[str, Any],
await es.delete_by_query(
index=self._index, body=self.to_dict(), **self._params
),
Expand Down Expand Up @@ -214,5 +214,5 @@ async def scan(self) -> AsyncIterator[_R]:
return
yield # a bit strange, but this forces an empty generator function

async def delete(self) -> AttrDict[JSONType]:
return AttrDict[JSONType]({})
async def delete(self) -> AttrDict[Any]:
return AttrDict[Any]({})
2 changes: 1 addition & 1 deletion elasticsearch_dsl/_async/update_by_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ async def execute(self) -> "UpdateByQueryResponse[_R]":
self,
(
await es.update_by_query(
index=self._index, **self.to_dict(), **self._params # type: ignore
index=self._index, **self.to_dict(), **self._params
)
).body,
)
Expand Down
12 changes: 6 additions & 6 deletions elasticsearch_dsl/_sync/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from ..connections import get_connection
from ..response import Response
from ..search_base import MultiSearchBase, SearchBase
from ..utils import _R, AttrDict, JSONType, UsingType
from ..utils import _R, AttrDict, UsingType


class Search(SearchBase[_R]):
Expand Down Expand Up @@ -104,9 +104,9 @@ def scan(self) -> Iterator[_R]:
es = get_connection(self._using)

for hit in scan(es, query=self.to_dict(), index=self._index, **self._params):
yield self._get_result(cast(AttrDict[JSONType], hit))
yield self._get_result(cast(AttrDict[Any], hit))

def delete(self) -> AttrDict[JSONType]:
def delete(self) -> AttrDict[Any]:
"""
delete() executes the query by delegating to delete_by_query()
"""
Expand All @@ -116,7 +116,7 @@ def delete(self) -> AttrDict[JSONType]:

return AttrDict(
cast(
Dict[str, JSONType],
Dict[str, Any],
es.delete_by_query(
index=self._index, body=self.to_dict(), **self._params
),
Expand Down Expand Up @@ -208,5 +208,5 @@ def scan(self) -> Iterator[_R]:
return
yield # a bit strange, but this forces an empty generator function

def delete(self) -> AttrDict[JSONType]:
return AttrDict[JSONType]({})
def delete(self) -> AttrDict[Any]:
return AttrDict[Any]({})
4 changes: 1 addition & 3 deletions elasticsearch_dsl/_sync/update_by_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,7 @@ def execute(self) -> "UpdateByQueryResponse[_R]":
self._response = self._response_class(
self,
(
es.update_by_query(
index=self._index, **self.to_dict(), **self._params # type: ignore
)
es.update_by_query(index=self._index, **self.to_dict(), **self._params)
).body,
)
return self._response
10 changes: 5 additions & 5 deletions elasticsearch_dsl/aggs.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
)

from .response.aggs import AggResponse, BucketData, FieldBucketData, TopHitsData
from .utils import _R, AttrDict, DslBase, JSONType
from .utils import _R, AttrDict, DslBase

if TYPE_CHECKING:
from .query import Query
Expand Down Expand Up @@ -96,10 +96,10 @@ class Agg(DslBase, Generic[_R]):
def __contains__(self, key: str) -> bool:
return False

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
d = super().to_dict()
if isinstance(d[self.name], dict):
n = cast(Dict[str, JSONType], d[self.name])
n = cast(Dict[str, Any], d[self.name])
if "meta" in n:
d["meta"] = n.pop("meta")
return d
Expand Down Expand Up @@ -170,7 +170,7 @@ def __init__(self, **params: Any):
# remember self for chaining
self._base = self

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
d = super(AggBase, self).to_dict()
if isinstance(d[self.name], dict):
n = cast(AttrDict[Any], d[self.name])
Expand All @@ -191,7 +191,7 @@ def __init__(self, filter: Optional[Union[str, "Query"]] = None, **params: Any):
params["filter"] = filter
super().__init__(**params)

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
d = super().to_dict()
if isinstance(d[self.name], dict):
n = cast(AttrDict[Any], d[self.name])
Expand Down
6 changes: 3 additions & 3 deletions elasticsearch_dsl/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from typing import Any, ClassVar, Dict, List, Optional, Union, cast

from . import async_connections, connections
from .utils import AsyncUsingType, AttrDict, DslBase, JSONType, UsingType, merge
from .utils import AsyncUsingType, AttrDict, DslBase, UsingType, merge

__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"]

Expand Down Expand Up @@ -52,7 +52,7 @@ def __init__(self, filter_name: str, builtin_type: str = "custom", **kwargs: Any
self._name = filter_name
super().__init__(**kwargs)

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
# only name to present in lists
return self._name # type: ignore

Expand Down Expand Up @@ -109,7 +109,7 @@ def __init__(self, name: str):
self._name = name
super().__init__()

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
# only name to present in lists
return self._name # type: ignore

Expand Down
4 changes: 2 additions & 2 deletions elasticsearch_dsl/document_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from .exceptions import ValidationException
from .field import Binary, Boolean, Date, Field, Float, Integer, Nested, Object, Text
from .mapping import Mapping
from .utils import DOC_META_FIELDS, JSONType, ObjectBase
from .utils import DOC_META_FIELDS, ObjectBase

if TYPE_CHECKING:
from elastic_transport import ObjectApiResponse
Expand Down Expand Up @@ -376,7 +376,7 @@ def __repr__(self) -> str:
),
)

def to_dict(self, include_meta: bool = False, skip_empty: bool = True) -> Dict[str, JSONType]: # type: ignore[override]
def to_dict(self, include_meta: bool = False, skip_empty: bool = True) -> Dict[str, Any]: # type: ignore[override]
"""
Serialize the instance into a dictionary so that it can be saved in elasticsearch.

Expand Down
6 changes: 3 additions & 3 deletions elasticsearch_dsl/faceted_search_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from .aggs import A, Agg
from .query import MatchAll, Nested, Query, Range, Terms
from .response import Response
from .utils import _R, AttrDict, JSONType
from .utils import _R, AttrDict

if TYPE_CHECKING:
from .response.aggs import BucketData
Expand Down Expand Up @@ -137,9 +137,9 @@ def add_filter(self, filter_values: List[FilterValueType]) -> Optional[Query]:
class RangeFacet(Facet[_R]):
agg_type = "range"

def _range_to_dict(self, range: Tuple[Any, Tuple[int, int]]) -> Dict[str, JSONType]:
def _range_to_dict(self, range: Tuple[Any, Tuple[int, int]]) -> Dict[str, Any]:
key, _range = range
out: Dict[str, JSONType] = {"key": key}
out: Dict[str, Any] = {"key": key}
if _range[0] is not None:
out["from"] = _range[0]
if _range[1] is not None:
Expand Down
8 changes: 4 additions & 4 deletions elasticsearch_dsl/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@

from .exceptions import ValidationException
from .query import Q
from .utils import AttrDict, AttrList, DslBase, JSONType
from .utils import AttrDict, AttrList, DslBase
from .wrappers import Range

if TYPE_CHECKING:
Expand Down Expand Up @@ -150,9 +150,9 @@ def clean(self, data: Any) -> Any:
raise ValidationException("Value required for this field.")
return data

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
d = super().to_dict()
name, value = cast(Tuple[str, Dict[str, JSONType]], d.popitem())
name, value = cast(Tuple[str, Dict[str, Any]], d.popitem())
value["type"] = name
return value

Expand All @@ -161,7 +161,7 @@ class CustomField(Field):
name = "custom"
_coerce = True

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
if isinstance(self.builtin_type, Field):
return self.builtin_type.to_dict()

Expand Down
6 changes: 3 additions & 3 deletions elasticsearch_dsl/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from copy import deepcopy
from typing import Any, ClassVar, Dict, MutableMapping, Optional, Union, overload

from .utils import DslBase, JSONType
from .utils import DslBase


@overload
Expand Down Expand Up @@ -89,7 +89,7 @@ class ScoreFunction(DslBase):
}
name: ClassVar[Optional[str]] = None

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
d = super().to_dict()
# filter and query dicts should be at the same level as us
for k in self._param_defs:
Expand All @@ -107,7 +107,7 @@ class ScriptScore(ScoreFunction):
class BoostFactor(ScoreFunction):
name = "boost_factor"

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
d = super().to_dict()
if self.name is not None:
val = d[self.name]
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch_dsl/mapping_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing_extensions import Self

from .field import Field, Nested, Text, construct_field
from .utils import DslBase, JSONType
from .utils import DslBase

META_FIELDS = frozenset(
(
Expand Down Expand Up @@ -205,7 +205,7 @@ def meta(self, name: str, params: Any = None, **kwargs: Any) -> Self:
self._meta[name] = kwargs if params is None else params
return self

def to_dict(self) -> Dict[str, JSONType]:
def to_dict(self) -> Dict[str, Any]:
meta = self._meta

# hard coded serialization of analyzers in _all
Expand Down
16 changes: 7 additions & 9 deletions elasticsearch_dsl/response/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
cast,
)

from ..utils import _R, AttrDict, AttrList, JSONType, _wrap
from ..utils import _R, AttrDict, AttrList, _wrap
from .hit import Hit, HitMeta

if TYPE_CHECKING:
Expand All @@ -40,7 +40,7 @@
__all__ = ["Response", "AggResponse", "UpdateByQueryResponse", "Hit", "HitMeta"]


class Response(AttrDict[JSONType], Generic[_R]):
class Response(AttrDict[Any], Generic[_R]):
_search: "SearchBase[_R]"
_faceted_search: "FacetedSearchBase[_R]"
_doc_class: Optional[_R]
Expand Down Expand Up @@ -92,7 +92,7 @@ def success(self) -> bool:
@property
def hits(self) -> List[_R]:
if not hasattr(self, "_hits"):
h = cast(AttrDict[JSONType], self._d_["hits"])
h = cast(AttrDict[Any], self._d_["hits"])

try:
hits = AttrList(list(map(self._search._get_result, h["hits"])))
Expand All @@ -116,7 +116,7 @@ def aggs(self) -> "AggResponse[_R]":
aggs = AggResponse[_R](
cast("Agg[_R]", self._search.aggs),
self._search,
cast(Dict[str, JSONType], self._d_.get("aggregations", {})),
cast(Dict[str, Any], self._d_.get("aggregations", {})),
)

# avoid assigning _aggs into self._d_
Expand Down Expand Up @@ -156,12 +156,10 @@ def search_after(self) -> "SearchBase[_R]":
return self._search.extra(search_after=self.hits[-1].meta.sort) # type: ignore


class AggResponse(AttrDict[JSONType], Generic[_R]):
class AggResponse(AttrDict[Any], Generic[_R]):
_meta: Dict[str, Any]

def __init__(
self, aggs: "Agg[_R]", search: "Request[_R]", data: Dict[str, JSONType]
):
def __init__(self, aggs: "Agg[_R]", search: "Request[_R]", data: Dict[str, Any]):
super(AttrDict, self).__setattr__("_meta", {"search": search, "aggs": aggs})
super().__init__(data)

Expand All @@ -177,7 +175,7 @@ def __iter__(self) -> Iterator["Agg"]: # type: ignore[override]
yield self[name]


class UpdateByQueryResponse(AttrDict[JSONType], Generic[_R]):
class UpdateByQueryResponse(AttrDict[Any], Generic[_R]):
_search: "UpdateByQueryBase[_R]"

def __init__(
Expand Down
16 changes: 8 additions & 8 deletions elasticsearch_dsl/response/aggs.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Union, cast

from ..utils import _R, AttrDict, AttrList, JSONType
from ..utils import _R, AttrDict, AttrList
from . import AggResponse, Response

if TYPE_CHECKING:
Expand All @@ -31,7 +31,7 @@ def __init__(
self,
aggs: "Agg[_R]",
search: "SearchBase[_R]",
data: Dict[str, JSONType],
data: Dict[str, Any],
field: Optional["Field"] = None,
):
super().__init__(aggs, search, data)
Expand All @@ -42,7 +42,7 @@ def __init__(
self,
aggs: "Agg[_R]",
search: "SearchBase[_R]",
data: Dict[str, JSONType],
data: Dict[str, Any],
field: Optional["Field"] = None,
):
if field:
Expand All @@ -52,9 +52,9 @@ def __init__(

class BucketData(AggResponse[_R]):
_bucket_class = Bucket
_buckets: Union[AttrDict[JSONType], AttrList]
_buckets: Union[AttrDict[Any], AttrList]

def _wrap_bucket(self, data: Dict[str, JSONType]) -> Bucket[_R]:
def _wrap_bucket(self, data: Dict[str, Any]) -> Bucket[_R]:
return self._bucket_class(
self._meta["aggs"],
self._meta["search"],
Expand All @@ -74,16 +74,16 @@ def __getitem__(self, key: Any) -> Any:
return super().__getitem__(key)

@property
def buckets(self) -> Union[AttrDict[JSONType], AttrList]:
def buckets(self) -> Union[AttrDict[Any], AttrList]:
if not hasattr(self, "_buckets"):
field = getattr(self._meta["aggs"], "field", None)
if field:
self._meta["field"] = self._meta["search"]._resolve_field(field)
bs = cast(Union[Dict[str, JSONType], List[JSONType]], self._d_["buckets"])
bs = cast(Union[Dict[str, Any], List[Any]], self._d_["buckets"])
if isinstance(bs, list):
ret = AttrList(bs, obj_wrapper=self._wrap_bucket)
else:
ret = AttrDict[JSONType]({k: self._wrap_bucket(bs[k]) for k in bs}) # type: ignore
ret = AttrDict[Any]({k: self._wrap_bucket(bs[k]) for k in bs}) # type: ignore
super(AttrDict, self).__setattr__("_buckets", ret)
return self._buckets

Expand Down
Loading
Loading