Skip to content

Commit b59e558

Browse files
committed
Bump black and fix lint
1 parent e0ddf2d commit b59e558

29 files changed

+50
-74
lines changed

elasticsearch/_async/helpers.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ async def _process_bulk_chunk(
6262
raise_on_error=True,
6363
ignore_status=(),
6464
*args,
65-
**kwargs
65+
**kwargs,
6666
):
6767
"""
6868
Send a bulk request to elasticsearch and process the output.
@@ -132,9 +132,8 @@ async def async_streaming_bulk(
132132
yield_ok=True,
133133
ignore_status=(),
134134
*args,
135-
**kwargs
135+
**kwargs,
136136
):
137-
138137
"""
139138
Streaming bulk consumes actions from the iterable passed in and yields
140139
results per action. For non-streaming usecases use
@@ -176,7 +175,6 @@ async def map_actions():
176175
async for bulk_data, bulk_actions in _chunk_actions(
177176
map_actions(), chunk_size, max_chunk_bytes, client.transport.serializer
178177
):
179-
180178
for attempt in range(max_retries + 1):
181179
to_retry, to_retry_data = [], []
182180
if attempt:
@@ -198,7 +196,6 @@ async def map_actions():
198196
**kwargs,
199197
),
200198
):
201-
202199
if not ok:
203200
action, info = info.popitem()
204201
# retry if retries enabled, we get 429, and we are not
@@ -292,7 +289,7 @@ async def async_scan(
292289
request_timeout=None,
293290
clear_scroll=True,
294291
scroll_kwargs=None,
295-
**kwargs
292+
**kwargs,
296293
):
297294
"""
298295
Simple abstraction on top of the
@@ -430,7 +427,6 @@ async def async_reindex(
430427
scan_kwargs={},
431428
bulk_kwargs={},
432429
):
433-
434430
"""
435431
Reindex all documents from one index that satisfy a given query
436432
to another, potentially (if `target_client` is specified) on a different cluster.

elasticsearch/_async/helpers.pyi

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def _process_bulk_chunk(
5050
raise_on_error: bool = ...,
5151
ignore_status: Optional[Union[int, Collection[int]]] = ...,
5252
*args: Any,
53-
**kwargs: Any
53+
**kwargs: Any,
5454
) -> AsyncGenerator[Tuple[bool, Any], None]: ...
5555
def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> AsyncGenerator[T, None]: ...
5656
def azip(
@@ -70,15 +70,15 @@ def async_streaming_bulk(
7070
yield_ok: bool = ...,
7171
ignore_status: Optional[Union[int, Collection[int]]] = ...,
7272
*args: Any,
73-
**kwargs: Any
73+
**kwargs: Any,
7474
) -> AsyncGenerator[Tuple[bool, Any], None]: ...
7575
async def async_bulk(
7676
client: AsyncElasticsearch,
7777
actions: Union[Iterable[Any], AsyncIterable[Any]],
7878
stats_only: bool = ...,
7979
ignore_status: Optional[Union[int, Collection[int]]] = ...,
8080
*args: Any,
81-
**kwargs: Any
81+
**kwargs: Any,
8282
) -> Tuple[int, Union[int, List[Any]]]: ...
8383
def async_scan(
8484
client: AsyncElasticsearch,
@@ -90,7 +90,7 @@ def async_scan(
9090
request_timeout: Optional[Union[float, int]] = ...,
9191
clear_scroll: bool = ...,
9292
scroll_kwargs: Optional[Mapping[str, Any]] = ...,
93-
**kwargs: Any
93+
**kwargs: Any,
9494
) -> AsyncGenerator[int, None]: ...
9595
async def async_reindex(
9696
client: AsyncElasticsearch,

elasticsearch/_async/http_aiohttp.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,6 @@ async def close(self):
8080

8181

8282
class AIOHttpConnection(AsyncConnection):
83-
8483
HTTP_CLIENT_META = ("ai", _client_meta_version(aiohttp.__version__))
8584

8685
def __init__(

elasticsearch/_async/transport.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def __init__(
6767
retry_on_timeout=False,
6868
send_get_body_as="GET",
6969
meta_header=True,
70-
**kwargs
70+
**kwargs,
7171
):
7272
"""
7373
:arg hosts: list of dictionaries, each containing keyword arguments to
@@ -166,7 +166,6 @@ async def _async_init(self):
166166

167167
# ... and we can start sniffing in the background.
168168
if self.sniffing_task is None and self.sniff_on_start:
169-
170169
# Create an asyncio.Event for future calls to block on
171170
# until the initial sniffing task completes.
172171
self._sniff_on_start_event = asyncio.Event()
@@ -467,7 +466,6 @@ async def _do_verify_elasticsearch(self, headers, timeout):
467466
# Ensure that there's only one async exec within this section
468467
# at a time to not emit unnecessary index API calls.
469468
async with self._verify_elasticsearch_lock:
470-
471469
# Product check has already been completed while we were
472470
# waiting our turn, no need to do again.
473471
if self._verified_elasticsearch is not None:

elasticsearch/_async/transport.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ class AsyncTransport(object):
6464
retry_on_timeout: bool = ...,
6565
send_get_body_as: str = ...,
6666
meta_header: bool = ...,
67-
**kwargs: Any
67+
**kwargs: Any,
6868
) -> None: ...
6969
def add_connection(self, host: Any) -> None: ...
7070
def set_connections(self, hosts: Collection[Any]) -> None: ...

elasticsearch/client/utils.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def query_params(
5252
response_mimetypes: Optional[List[str]] = ...,
5353
body_params: Optional[List[str]] = ...,
5454
body_name: Optional[str] = ...,
55-
body_required: Optional[bool] = ...
55+
body_required: Optional[bool] = ...,
5656
) -> Callable[[Callable[..., T]], Callable[..., T]]: ...
5757
def _bulk_body(
5858
serializer: Serializer, body: Union[str, bytes, Mapping[str, Any], Iterable[Any]]

elasticsearch/compat.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,13 @@
1616
# under the License.
1717

1818
import asyncio
19+
from collections.abc import Mapping
20+
from queue import Queue
21+
from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse
1922

2023
string_types = str, bytes
21-
from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse
2224

2325
map = map
24-
from queue import Queue
2526

2627

2728
def to_str(x, encoding="ascii"):
@@ -36,8 +37,6 @@ def to_bytes(x, encoding="ascii"):
3637
return x
3738

3839

39-
from collections.abc import Mapping
40-
4140
reraise_exceptions = (RecursionError, asyncio.CancelledError)
4241

4342
try:

elasticsearch/connection/base.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,9 +82,8 @@ def __init__(
8282
api_key=None,
8383
opaque_id=None,
8484
meta_header=True,
85-
**kwargs
85+
**kwargs,
8686
):
87-
8887
if cloud_id:
8988
try:
9089
_, cloud_id = cloud_id.split(":")

elasticsearch/connection/base.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ class Connection(object):
5757
api_key: Optional[Union[Tuple[str, str], List[str], str]] = ...,
5858
opaque_id: Optional[str] = ...,
5959
meta_header: bool = ...,
60-
**kwargs: Any
60+
**kwargs: Any,
6161
) -> None: ...
6262
def __repr__(self) -> str: ...
6363
def __eq__(self, other: object) -> bool: ...

elasticsearch/connection/http_requests.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def __init__(
8080
cloud_id=None,
8181
api_key=None,
8282
opaque_id=None,
83-
**kwargs
83+
**kwargs,
8484
):
8585
if not REQUESTS_AVAILABLE:
8686
raise ImproperlyConfigured(
@@ -101,7 +101,7 @@ def __init__(
101101
cloud_id=cloud_id,
102102
api_key=api_key,
103103
opaque_id=opaque_id,
104-
**kwargs
104+
**kwargs,
105105
)
106106

107107
if not self.http_compress:

elasticsearch/connection/http_requests.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,5 +40,5 @@ class RequestsHttpConnection(Connection):
4040
api_key: Optional[Any] = ...,
4141
opaque_id: Optional[str] = ...,
4242
meta_header: bool = ...,
43-
**kwargs: Any
43+
**kwargs: Any,
4444
) -> None: ...

elasticsearch/connection/http_urllib3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def __init__(
122122
cloud_id=None,
123123
api_key=None,
124124
opaque_id=None,
125-
**kwargs
125+
**kwargs,
126126
):
127127
# Initialize headers before calling super().__init__().
128128
self.headers = urllib3.make_headers(keep_alive=True)
@@ -136,7 +136,7 @@ def __init__(
136136
cloud_id=cloud_id,
137137
api_key=api_key,
138138
opaque_id=opaque_id,
139-
**kwargs
139+
**kwargs,
140140
)
141141
if http_auth is not None:
142142
if isinstance(http_auth, (tuple, list)):

elasticsearch/connection/http_urllib3.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,5 +54,5 @@ class Urllib3HttpConnection(Connection):
5454
api_key: Optional[Any] = ...,
5555
opaque_id: Optional[str] = ...,
5656
meta_header: bool = ...,
57-
**kwargs: Any
57+
**kwargs: Any,
5858
) -> None: ...

elasticsearch/connection_pool.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ def __init__(
118118
timeout_cutoff=5,
119119
selector_class=RoundRobinSelector,
120120
randomize_hosts=True,
121-
**kwargs
121+
**kwargs,
122122
):
123123
"""
124124
:arg connections: list of tuples containing the

elasticsearch/connection_pool.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ class ConnectionPool(object):
5151
timeout_cutoff: int = ...,
5252
selector_class: Type[ConnectionSelector] = ...,
5353
randomize_hosts: bool = ...,
54-
**kwargs: Any
54+
**kwargs: Any,
5555
) -> None: ...
5656
def mark_dead(self, connection: Connection, now: Optional[float] = ...) -> None: ...
5757
def mark_live(self, connection: Connection) -> None: ...

elasticsearch/helpers/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,6 @@
1515
# specific language governing permissions and limitations
1616
# under the License.
1717

18-
import sys
19-
2018
from .._async.helpers import async_bulk, async_reindex, async_scan, async_streaming_bulk
2119
from .actions import (
2220
_chunk_actions,

elasticsearch/helpers/actions.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ def _process_bulk_chunk(
225225
raise_on_error=True,
226226
ignore_status=(),
227227
*args,
228-
**kwargs
228+
**kwargs,
229229
):
230230
"""
231231
Send a bulk request to elasticsearch and process the output.
@@ -278,9 +278,8 @@ def streaming_bulk(
278278
yield_ok=True,
279279
ignore_status=(),
280280
*args,
281-
**kwargs
281+
**kwargs,
282282
):
283-
284283
"""
285284
Streaming bulk consumes actions from the iterable passed in and yields
286285
results per action. For non-streaming usecases use
@@ -319,7 +318,6 @@ def streaming_bulk(
319318
for bulk_data, bulk_actions in _chunk_actions(
320319
actions, chunk_size, max_chunk_bytes, client.transport.serializer
321320
):
322-
323321
for attempt in range(max_retries + 1):
324322
to_retry, to_retry_data = [], []
325323
if attempt:
@@ -336,10 +334,9 @@ def streaming_bulk(
336334
raise_on_error,
337335
ignore_status,
338336
*args,
339-
**kwargs
337+
**kwargs,
340338
),
341339
):
342-
343340
if not ok:
344341
action, info = info.popitem()
345342
# retry if retries enabled, we get 429, and we are not
@@ -431,7 +428,7 @@ def parallel_bulk(
431428
expand_action_callback=expand_action,
432429
ignore_status=(),
433430
*args,
434-
**kwargs
431+
**kwargs,
435432
):
436433
"""
437434
Parallel version of the bulk helper run in multiple threads at once.
@@ -477,7 +474,7 @@ def _setup_queues(self):
477474
bulk_chunk[0],
478475
ignore_status=ignore_status,
479476
*args,
480-
**kwargs
477+
**kwargs,
481478
)
482479
),
483480
_chunk_actions(
@@ -502,7 +499,7 @@ def scan(
502499
request_timeout=None,
503500
clear_scroll=True,
504501
scroll_kwargs=None,
505-
**kwargs
502+
**kwargs,
506503
):
507504
"""
508505
Simple abstraction on top of the
@@ -624,7 +621,7 @@ def scan(
624621
scroll_id=scroll_id,
625622
ignore=(404,),
626623
params={"__elastic_client_meta": (("h", "s"),)},
627-
**transport_kwargs
624+
**transport_kwargs,
628625
)
629626

630627

@@ -640,7 +637,6 @@ def reindex(
640637
scan_kwargs={},
641638
bulk_kwargs={},
642639
):
643-
644640
"""
645641
Reindex all documents from one index that satisfy a given query
646642
to another, potentially (if `target_client` is specified) on a different cluster.
@@ -713,5 +709,5 @@ def _change_doc_index(hits, index, op_type):
713709
target_client,
714710
_change_doc_index(docs, target_index, op_type),
715711
chunk_size=chunk_size,
716-
**kwargs
712+
**kwargs,
717713
)

elasticsearch/helpers/actions.pyi

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def _process_bulk_chunk(
4747
raise_on_exception: bool = ...,
4848
raise_on_error: bool = ...,
4949
*args: Any,
50-
**kwargs: Any
50+
**kwargs: Any,
5151
) -> Generator[Tuple[bool, Any], None, None]: ...
5252
def streaming_bulk(
5353
client: Elasticsearch,
@@ -63,15 +63,15 @@ def streaming_bulk(
6363
yield_ok: bool = ...,
6464
ignore_status: Optional[Union[int, Collection[int]]] = ...,
6565
*args: Any,
66-
**kwargs: Any
66+
**kwargs: Any,
6767
) -> Generator[Tuple[bool, Any], None, None]: ...
6868
def bulk(
6969
client: Elasticsearch,
7070
actions: Iterable[Any],
7171
stats_only: bool = ...,
7272
ignore_status: Optional[Union[int, Collection[int]]] = ...,
7373
*args: Any,
74-
**kwargs: Any
74+
**kwargs: Any,
7575
) -> Tuple[int, Union[int, List[Any]]]: ...
7676
def parallel_bulk(
7777
client: Elasticsearch,
@@ -83,7 +83,7 @@ def parallel_bulk(
8383
expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ...,
8484
ignore_status: Optional[Union[int, Collection[int]]] = ...,
8585
*args: Any,
86-
**kwargs: Any
86+
**kwargs: Any,
8787
) -> Generator[Tuple[bool, Any], None, None]: ...
8888
def scan(
8989
client: Elasticsearch,
@@ -95,7 +95,7 @@ def scan(
9595
request_timeout: Optional[Union[float, int]] = ...,
9696
clear_scroll: bool = ...,
9797
scroll_kwargs: Optional[Mapping[str, Any]] = ...,
98-
**kwargs: Any
98+
**kwargs: Any,
9999
) -> Generator[Any, None, None]: ...
100100
def reindex(
101101
client: Elasticsearch,

0 commit comments

Comments
 (0)