Skip to content

Commit 5e74429

Browse files
review feedback
1 parent ac47786 commit 5e74429

30 files changed

+91
-91
lines changed

elasticsearch/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,15 @@
2727
from ._version import __versionstr__
2828

2929
# Ensure that a compatible version of elastic-transport is installed.
30-
_version_groups = tuple(int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", _elastic_transport_version).groups()) # type: ignore
30+
_version_groups = tuple(int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", _elastic_transport_version).groups()) # type: ignore[union-attr]
3131
if _version_groups < (8, 0, 0) or _version_groups > (9, 0, 0):
3232
raise ImportError(
3333
"An incompatible version of elastic-transport is installed. Must be between "
3434
"v8.0.0 and v9.0.0. Install the correct version with the following command: "
3535
"$ python -m pip install 'elastic-transport>=8, <9'"
3636
)
3737

38-
_version_groups = re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore
38+
_version_groups = re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore[assignment, union-attr]
3939
_major, _minor, _patch = (int(x) for x in _version_groups)
4040
VERSION = __version__ = (_major, _minor, _patch)
4141

elasticsearch/_async/helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ async def map_actions() -> AsyncIterable[_TYPE_BULK_ACTION_HEADER_AND_BODY]:
257257
]
258258
ok: bool
259259
info: Dict[str, Any]
260-
async for data, (ok, info) in azip( # type: ignore
260+
async for data, (ok, info) in azip( # type: ignore[assignment, misc]
261261
bulk_data,
262262
_process_bulk_chunk(
263263
client,

elasticsearch/_sync/client/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ def host_mapping_to_node_config(host: Mapping[str, Union[str, int]]) -> NodeConf
232232
)
233233
options["path_prefix"] = options.pop("url_prefix")
234234

235-
return NodeConfig(**options) # type: ignore
235+
return NodeConfig(**options) # type: ignore[arg-type]
236236

237237

238238
def cloud_id_to_node_configs(cloud_id: str) -> List[NodeConfig]:

elasticsearch/dsl/__init__.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,9 +104,6 @@
104104
from .utils import AttrDict, AttrList, DslBase
105105
from .wrappers import Range
106106

107-
VERSION = (8, 17, 1)
108-
__version__ = VERSION
109-
__versionstr__ = ".".join(map(str, VERSION))
110107
__all__ = [
111108
"A",
112109
"Agg",

elasticsearch/dsl/_async/document.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -241,11 +241,11 @@ async def mget(
241241
error_ids = [doc["_id"] for doc in error_docs]
242242
message = "Required routing not provided for documents %s."
243243
message %= ", ".join(error_ids)
244-
raise RequestError(400, message, error_docs) # type: ignore
244+
raise RequestError(400, message, error_docs) # type: ignore[arg-type]
245245
if missing_docs:
246246
missing_ids = [doc["_id"] for doc in missing_docs]
247247
message = f"Documents {', '.join(missing_ids)} not found."
248-
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore
248+
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore[arg-type]
249249
return objs
250250

251251
async def delete(

elasticsearch/dsl/_sync/document.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -235,11 +235,11 @@ def mget(
235235
error_ids = [doc["_id"] for doc in error_docs]
236236
message = "Required routing not provided for documents %s."
237237
message %= ", ".join(error_ids)
238-
raise RequestError(400, message, error_docs) # type: ignore
238+
raise RequestError(400, message, error_docs) # type: ignore[arg-type]
239239
if missing_docs:
240240
missing_ids = [doc["_id"] for doc in missing_docs]
241241
message = f"Documents {', '.join(missing_ids)} not found."
242-
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore
242+
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore[arg-type]
243243
return objs
244244

245245
def delete(

elasticsearch/dsl/aggs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ def pipeline(
193193
return cast("Pipeline[_R]", self._agg(False, name, agg_type, *args, **params))
194194

195195
def result(self, search: "SearchBase[_R]", data: Any) -> AttrDict[Any]:
196-
return BucketData(self, search, data) # type: ignore
196+
return BucketData(self, search, data) # type: ignore[arg-type]
197197

198198

199199
class Bucket(AggBase[_R], Agg[_R]):

elasticsearch/dsl/analysis.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,9 @@ def _type_shortcut(
3737
return name_or_instance # type: ignore[return-value]
3838

3939
if not (type or kwargs):
40-
return cls.get_dsl_class("builtin")(name_or_instance) # type: ignore
40+
return cls.get_dsl_class("builtin")(name_or_instance) # type: ignore[no-any-return, attr-defined]
4141

42-
return cls.get_dsl_class(type, "custom")( # type: ignore
42+
return cls.get_dsl_class(type, "custom")( # type: ignore[no-any-return, attr-defined]
4343
name_or_instance, type or "custom", **kwargs
4444
)
4545

@@ -54,13 +54,13 @@ def __init__(self, filter_name: str, builtin_type: str = "custom", **kwargs: Any
5454

5555
def to_dict(self) -> Dict[str, Any]:
5656
# only name to present in lists
57-
return self._name # type: ignore
57+
return self._name # type: ignore[return-value]
5858

5959
def get_definition(self) -> Dict[str, Any]:
60-
d = super().to_dict() # type: ignore
60+
d = super().to_dict() # type: ignore[misc]
6161
d = d.pop(self.name)
6262
d["type"] = self._builtin_type
63-
return d # type: ignore
63+
return d # type: ignore[no-any-return]
6464

6565

6666
class CustomAnalysisDefinition(CustomAnalysis):
@@ -111,7 +111,7 @@ def __init__(self, name: str):
111111

112112
def to_dict(self) -> Dict[str, Any]:
113113
# only name to present in lists
114-
return self._name # type: ignore
114+
return self._name # type: ignore[return-value]
115115

116116

117117
class Analyzer(AnalysisBase, DslBase):

elasticsearch/dsl/connections.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -116,16 +116,16 @@ def get_connection(self, alias: Union[str, _T] = "default") -> _T:
116116
raise KeyError(f"There is no connection with alias {alias!r}.")
117117

118118
def _with_user_agent(self, conn: _T) -> _T:
119-
from . import __versionstr__ # this is here to avoid circular imports
119+
from elasticsearch import (
120+
__versionstr__, # this is here to avoid circular imports
121+
)
120122

121123
# try to inject our user agent
122124
if hasattr(conn, "_headers"):
123125
is_frozen = conn._headers.frozen
124126
if is_frozen:
125127
conn._headers = conn._headers.copy()
126-
conn._headers.update(
127-
{"user-agent": f"elasticsearch-dsl-py/{__versionstr__}"}
128-
)
128+
conn._headers.update({"user-agent": f"elasticsearch-py/{__versionstr__}"})
129129
if is_frozen:
130130
conn._headers.freeze()
131131
return conn

elasticsearch/dsl/document_base.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
try:
3737
from types import UnionType
3838
except ImportError:
39-
UnionType = None # type: ignore
39+
UnionType = None # type: ignore[assignment, misc]
4040

4141
from typing_extensions import dataclass_transform
4242

@@ -81,14 +81,14 @@ def __init__(self, name: str, field: Field):
8181
def __getattr__(self, attr: str) -> "InstrumentedField":
8282
try:
8383
# first let's see if this is an attribute of this object
84-
return super().__getattribute__(attr) # type: ignore
84+
return super().__getattribute__(attr) # type: ignore[no-any-return]
8585
except AttributeError:
8686
try:
8787
# next we see if we have a sub-field with this name
8888
return InstrumentedField(f"{self._name}.{attr}", self._field[attr])
8989
except KeyError:
9090
# lastly we let the wrapped field resolve this attribute
91-
return getattr(self._field, attr) # type: ignore
91+
return getattr(self._field, attr) # type: ignore[no-any-return]
9292

9393
def __pos__(self) -> str:
9494
"""Return the field name representation for ascending sort order"""
@@ -226,7 +226,7 @@ def __init__(self, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any]):
226226
field_args = [type_]
227227
elif type_ in self.type_annotation_map:
228228
# use best field type for the type hint provided
229-
field, field_kwargs = self.type_annotation_map[type_] # type: ignore
229+
field, field_kwargs = self.type_annotation_map[type_] # type: ignore[assignment]
230230

231231
if field:
232232
field_kwargs = {

elasticsearch/dsl/faceted_search_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def add_filter(self, filter_values: List[FilterValueType]) -> Optional[Query]:
9393
f |= self.get_value_filter(v)
9494
return f
9595

96-
def get_value_filter(self, filter_value: FilterValueType) -> Query: # type: ignore
96+
def get_value_filter(self, filter_value: FilterValueType) -> Query: # type: ignore[empty-body]
9797
"""
9898
Construct a filter for an individual value
9999
"""

elasticsearch/dsl/field.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -378,7 +378,7 @@ def clean(self, data: Any) -> Optional[bool]:
378378
data = self.deserialize(data)
379379
if data is None and self._required:
380380
raise ValidationException("Value required for this field.")
381-
return data # type: ignore
381+
return data # type: ignore[no-any-return]
382382

383383

384384
class Float(Field):
@@ -515,12 +515,12 @@ class Percolator(Field):
515515
_coerce = True
516516

517517
def _deserialize(self, data: Any) -> "Query":
518-
return Q(data) # type: ignore
518+
return Q(data) # type: ignore[no-any-return]
519519

520520
def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
521521
if data is None:
522522
return None
523-
return data.to_dict() # type: ignore
523+
return data.to_dict() # type: ignore[no-any-return]
524524

525525

526526
class RangeField(Field):
@@ -530,15 +530,15 @@ class RangeField(Field):
530530
def _deserialize(self, data: Any) -> Range["_SupportsComparison"]:
531531
if isinstance(data, Range):
532532
return data
533-
data = {k: self._core_field.deserialize(v) for k, v in data.items()} # type: ignore
533+
data = {k: self._core_field.deserialize(v) for k, v in data.items()} # type: ignore[union-attr]
534534
return Range(data)
535535

536536
def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
537537
if data is None:
538538
return None
539539
if not isinstance(data, collections.abc.Mapping):
540540
data = data.to_dict()
541-
return {k: self._core_field.serialize(v) for k, v in data.items()} # type: ignore
541+
return {k: self._core_field.serialize(v) for k, v in data.items()} # type: ignore[union-attr]
542542

543543

544544
class IntegerRange(RangeField):

elasticsearch/dsl/mapping_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def field(self, name: str, *args: Any, **kwargs: Any) -> Self:
6464

6565
def _collect_fields(self) -> Iterator[Field]:
6666
"""Iterate over all Field objects within, including multi fields."""
67-
fields = cast(Dict[str, Field], self.properties.to_dict()) # type: ignore
67+
fields = cast(Dict[str, Field], self.properties.to_dict()) # type: ignore[attr-defined]
6868
for f in fields.values():
6969
yield f
7070
# multi fields

elasticsearch/dsl/py.typed

Whitespace-only changes.

elasticsearch/dsl/query.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -637,7 +637,7 @@ def __init__(
637637
functions = []
638638
for name in ScoreFunction._classes:
639639
if name in kwargs:
640-
functions.append({name: kwargs.pop(name)}) # type: ignore
640+
functions.append({name: kwargs.pop(name)}) # type: ignore[arg-type]
641641
super().__init__(
642642
boost_mode=boost_mode,
643643
functions=functions,

elasticsearch/dsl/response/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -197,9 +197,9 @@ def search_after(self) -> "SearchBase[_R]":
197197
"""
198198
if len(self.hits) == 0:
199199
raise ValueError("Cannot use search_after when there are no search results")
200-
if not hasattr(self.hits[-1].meta, "sort"): # type: ignore
200+
if not hasattr(self.hits[-1].meta, "sort"): # type: ignore[attr-defined]
201201
raise ValueError("Cannot use search_after when results are not sorted")
202-
return self._search.extra(search_after=self.hits[-1].meta.sort) # type: ignore
202+
return self._search.extra(search_after=self.hits[-1].meta.sort) # type: ignore[attr-defined]
203203

204204

205205
AggregateResponseType = Union[
@@ -293,7 +293,7 @@ def __getitem__(self, attr_name: str) -> AggregateResponseType:
293293
AggregateResponseType,
294294
agg.result(self._meta["search"], self._d_[attr_name]),
295295
)
296-
return super().__getitem__(attr_name) # type: ignore
296+
return super().__getitem__(attr_name) # type: ignore[no-any-return]
297297

298298
def __iter__(self) -> Iterator[AggregateResponseType]: # type: ignore[override]
299299
for name in self._meta["aggs"]:

elasticsearch/dsl/response/aggs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def _wrap_bucket(self, data: Dict[str, Any]) -> Bucket[_R]:
6363
)
6464

6565
def __iter__(self) -> Iterator["Agg"]: # type: ignore[override]
66-
return iter(self.buckets) # type: ignore
66+
return iter(self.buckets) # type: ignore[arg-type]
6767

6868
def __len__(self) -> int:
6969
return len(self.buckets)
@@ -83,7 +83,7 @@ def buckets(self) -> Union[AttrDict[Any], AttrList[Any]]:
8383
if isinstance(bs, list):
8484
ret = AttrList(bs, obj_wrapper=self._wrap_bucket)
8585
else:
86-
ret = AttrDict[Any]({k: self._wrap_bucket(bs[k]) for k in bs}) # type: ignore
86+
ret = AttrDict[Any]({k: self._wrap_bucket(bs[k]) for k in bs}) # type: ignore[assignment]
8787
super(AttrDict, self).__setattr__("_buckets", ret)
8888
return self._buckets
8989

0 commit comments

Comments
 (0)