From 5eca7936df54e31852af12ac97f83b6675a7de7e Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Tue, 18 Jun 2024 21:39:32 +0400 Subject: [PATCH 1/3] Support Python 3.9 to 3.13 --- .buildkite/generatesteps.py | 4 +- .buildkite/run-tests | 2 +- .buildkite/teardown-tests | 2 +- .github/Dockerfile | 2 +- .github/workflows/tests.yml | 5 +- docs/guide/configuration.asciidoc | 2 +- docs/guide/getting-started.asciidoc | 2 +- noxfile.py | 2 +- pyproject.toml | 9 +- .../test_async/test_server/test_helpers.py | 155 ++++++----- .../test_server/test_helpers.py | 249 ++++++++++-------- 11 files changed, 235 insertions(+), 199 deletions(-) diff --git a/.buildkite/generatesteps.py b/.buildkite/generatesteps.py index b1de838..b054287 100644 --- a/.buildkite/generatesteps.py +++ b/.buildkite/generatesteps.py @@ -14,7 +14,7 @@ def benchmark_to_steps(python, connection_class): "env": { "PYTHON_VERSION": f"{python}", "PYTHON_CONNECTION_CLASS": f"{connection_class}", - # TEMPORARY for 3.11 + # For development versions # https://github.com/aio-libs/aiohttp/issues/6600 "AIOHTTP_NO_EXTENSIONS": 1, # https://github.com/aio-libs/frozenlist/issues/285 @@ -53,7 +53,7 @@ def benchmark_to_steps(python, connection_class): if __name__ == "__main__": steps = [] - for python in ["3.7", "3.8", "3.9", "3.10", "3.11"]: + for python in ["3.9", "3.10", "3.11", "3.12"]: for connection_class in ["urllib3", "requests"]: steps.extend(benchmark_to_steps(python, connection_class)) print(yaml.dump({"steps": steps}, Dumper=yaml.Dumper, sort_keys=False)) diff --git a/.buildkite/run-tests b/.buildkite/run-tests index 322bd39..c2a7497 100755 --- a/.buildkite/run-tests +++ b/.buildkite/run-tests @@ -3,7 +3,7 @@ set -euo pipefail # Default environment variables export FORCE_COLOR=1 -export PYTHON_VERSION="${PYTHON_VERSION:=3.9}" +export PYTHON_VERSION="${PYTHON_VERSION:=3.12}" export PYTHON_CONNECTION_CLASS="${PYTHON_CONNECTION_CLASS:=urllib3}" export EC_PROJECT_NAME="$EC_PROJECT_PREFIX-$BUILDKITE_JOB_ID" buildkite-agent meta-data set $EC_PROJECT_PREFIX $EC_PROJECT_NAME diff --git a/.buildkite/teardown-tests b/.buildkite/teardown-tests index fac9391..81b70e5 100644 --- a/.buildkite/teardown-tests +++ b/.buildkite/teardown-tests @@ -2,7 +2,7 @@ set -euo pipefail # Default environment variables -export PYTHON_VERSION="${PYTHON_VERSION:=3.9}" +export PYTHON_VERSION="${PYTHON_VERSION:=3.12}" export PYTHON_CONNECTION_CLASS="${PYTHON_CONNECTION_CLASS:=urllib3}" export EC_PROJECT_NAME=$(buildkite-agent meta-data get $EC_PROJECT_PREFIX) diff --git a/.github/Dockerfile b/.github/Dockerfile index 4fef09d..341f3ca 100644 --- a/.github/Dockerfile +++ b/.github/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION=3.9 +ARG PYTHON_VERSION=3.12 FROM python:${PYTHON_VERSION} WORKDIR /code/elasticsearch-serverless-python diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 66a1417..d0900d7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -59,7 +59,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] experimental: [false] nox-session: [""] runs-on: ["ubuntu-latest"] @@ -74,6 +74,7 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} + allow-prereleases: true - name: Install dependencies run: | python -m pip install nox @@ -83,7 +84,7 @@ jobs: env: PYTHON_VERSION: ${{ matrix.python-version }} NOX_SESSION: ${{ matrix.nox-session }} - # TEMPORARY for 3.11 + # For development versions # https://github.com/aio-libs/aiohttp/issues/6600 AIOHTTP_NO_EXTENSIONS: 1 # https://github.com/aio-libs/frozenlist/issues/285 diff --git a/docs/guide/configuration.asciidoc b/docs/guide/configuration.asciidoc index e4d8584..61bc44e 100644 --- a/docs/guide/configuration.asciidoc +++ b/docs/guide/configuration.asciidoc @@ -52,7 +52,7 @@ es = Elasticsearch( [discrete] ==== TLS versions -Configuring the minimum TLS version to connect to is done via the `ssl_version` parameter. By default this is set to a minimum value of TLSv1.2. In Python 3.7+ you can use the new `ssl.TLSVersion` enumeration to specify versions. +Configuring the minimum TLS version to connect to is done via the `ssl_version` parameter. By default this is set to a minimum value of TLSv1.2. You can use the new `ssl.TLSVersion` enumeration to specify versions. [source,python] ------------------------------------ diff --git a/docs/guide/getting-started.asciidoc b/docs/guide/getting-started.asciidoc index 7a9149d..904bc3b 100644 --- a/docs/guide/getting-started.asciidoc +++ b/docs/guide/getting-started.asciidoc @@ -8,7 +8,7 @@ operations with it. [discrete] === Requirements -* https://www.python.org/[Python] 3.7 or newer +* https://www.python.org/[Python] 3.9 or newer * https://pip.pypa.io/en/stable/[`pip`], installed by default alongside Python [discrete] diff --git a/noxfile.py b/noxfile.py index 3332867..669db2f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -31,7 +31,7 @@ INSTALL_ENV = {"AIOHTTP_NO_EXTENSIONS": "1"} -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11"]) +@nox.session(python=["3.9", "3.10", "3.11", "3.12"]) def test(session): session.install(".[dev]", env=INSTALL_ENV) diff --git a/pyproject.toml b/pyproject.toml index 618488a..b6c9001 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ version = "0.2.0.20231031" description = "Python client for Elasticsearch Serverless" readme = "README.rst" license = "Apache-2.0" -requires-python = ">=3.7, <4" +requires-python = ">=3.9" authors = [ { name = "Elastic Client Library Maintainers", email = "client-libs@elastic.co" }, ] @@ -22,13 +22,12 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", ] keywords = [ "elasticsearch", diff --git a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py index d0aac44..601fe35 100644 --- a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py @@ -498,9 +498,10 @@ async def test_scroll_error(self, async_client, scan_teardown): bulk.append({"value": x}) await async_client.bulk(operations=bulk, refresh=True) - with patch.object( - async_client, "options", return_value=async_client - ), patch.object(async_client, "scroll", MockScroll()): + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object(async_client, "scroll", MockScroll()), + ): data = [ x async for x in helpers.async_scan( @@ -514,9 +515,10 @@ async def test_scroll_error(self, async_client, scan_teardown): assert len(data) == 3 assert data[-1] == {"scroll_data": 42} - with patch.object( - async_client, "options", return_value=async_client - ), patch.object(async_client, "scroll", MockScroll()): + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object(async_client, "scroll", MockScroll()), + ): with pytest.raises(ScanError): data = [ x @@ -532,9 +534,10 @@ async def test_scroll_error(self, async_client, scan_teardown): assert data[-1] == {"scroll_data": 42} async def test_initial_search_error(self, async_client, scan_teardown): - with patch.object( - async_client, "options", return_value=async_client - ), patch.object(async_client, "clear_scroll", new_callable=AsyncMock): + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object(async_client, "clear_scroll", new_callable=AsyncMock), + ): with patch.object( async_client, "search", @@ -590,15 +593,16 @@ async def test_initial_search_error(self, async_client, scan_teardown): assert mock_scroll.calls == [] async def test_no_scroll_id_fast_route(self, async_client, scan_teardown): - with patch.object( - async_client, "options", return_value=async_client - ), patch.object(async_client, "scroll") as scroll_mock, patch.object( - async_client, - "search", - MockResponse(ObjectApiResponse(body={"no": "_scroll_id"}, meta=None)), - ), patch.object( - async_client, "clear_scroll" - ) as clear_mock: + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object(async_client, "scroll") as scroll_mock, + patch.object( + async_client, + "search", + MockResponse(ObjectApiResponse(body={"no": "_scroll_id"}, meta=None)), + ), + patch.object(async_client, "clear_scroll") as clear_mock, + ): data = [ x async for x in helpers.async_scan(async_client, index="test_index") ] @@ -615,9 +619,10 @@ async def test_logger(self, logger_mock, async_client, scan_teardown): bulk.append({"value": x}) await async_client.bulk(operations=bulk, refresh=True) - with patch.object( - async_client, "options", return_value=async_client - ), patch.object(async_client, "scroll", MockScroll()): + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object(async_client, "scroll", MockScroll()), + ): _ = [ x async for x in helpers.async_scan( @@ -630,9 +635,10 @@ async def test_logger(self, logger_mock, async_client, scan_teardown): ] logger_mock.warning.assert_called() - with patch.object( - async_client, "options", return_value=async_client - ), patch.object(async_client, "scroll", MockScroll()): + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object(async_client, "scroll", MockScroll()), + ): try: _ = [ x @@ -660,11 +666,12 @@ async def test_clear_scroll(self, async_client, scan_teardown): bulk.append({"value": x}) await async_client.bulk(operations=bulk, refresh=True) - with patch.object( - async_client, "options", return_value=async_client - ), patch.object( - async_client, "clear_scroll", wraps=async_client.clear_scroll - ) as spy: + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object( + async_client, "clear_scroll", wraps=async_client.clear_scroll + ) as spy, + ): _ = [ x async for x in helpers.async_scan( @@ -702,20 +709,21 @@ async def test_clear_scroll(self, async_client, scan_teardown): async def test_scan_auth_kwargs_forwarded( self, async_client, scan_teardown, kwargs ): - with patch.object( - async_client, "options", return_value=async_client - ) as options, patch.object( - async_client, - "search", - return_value=MockResponse( - ObjectApiResponse( - body={ - "_scroll_id": "scroll_id", - "_shards": {"successful": 5, "total": 5, "skipped": 0}, - "hits": {"hits": [{"search_data": 1}]}, - }, - meta=None, - ) + with ( + patch.object(async_client, "options", return_value=async_client) as options, + patch.object( + async_client, + "search", + return_value=MockResponse( + ObjectApiResponse( + body={ + "_scroll_id": "scroll_id", + "_shards": {"successful": 5, "total": 5, "skipped": 0}, + "hits": {"hits": [{"search_data": 1}]}, + }, + meta=None, + ) + ), ), ): with patch.object( @@ -755,20 +763,21 @@ async def test_scan_auth_kwargs_forwarded( async def test_scan_auth_kwargs_favor_scroll_kwargs_option( self, async_client, scan_teardown ): - with patch.object( - async_client, "options", return_value=async_client - ) as options, patch.object( - async_client, - "search", - return_value=MockResponse( - ObjectApiResponse( - body={ - "_scroll_id": "scroll_id", - "_shards": {"successful": 5, "total": 5, "skipped": 0}, - "hits": {"hits": [{"search_data": 1}]}, - }, - meta=None, - ) + with ( + patch.object(async_client, "options", return_value=async_client) as options, + patch.object( + async_client, + "search", + return_value=MockResponse( + ObjectApiResponse( + body={ + "_scroll_id": "scroll_id", + "_shards": {"successful": 5, "total": 5, "skipped": 0}, + "hits": {"hits": [{"search_data": 1}]}, + }, + meta=None, + ) + ), ), ): with patch.object( @@ -832,21 +841,23 @@ async def test_scan_auth_kwargs_favor_scroll_kwargs_option( ], ) async def test_scan_from_keyword_is_aliased(async_client, scan_kwargs): - with patch.object(async_client, "options", return_value=async_client), patch.object( - async_client, - "search", - return_value=MockResponse( - ObjectApiResponse( - body={ - "_scroll_id": "dummy_id", - "_shards": {"successful": 5, "total": 5}, - "hits": {"hits": []}, - }, - meta=None, - ) - ), - ) as search_mock, patch.object( - async_client, "clear_scroll", return_value=MockResponse(None) + with ( + patch.object(async_client, "options", return_value=async_client), + patch.object( + async_client, + "search", + return_value=MockResponse( + ObjectApiResponse( + body={ + "_scroll_id": "dummy_id", + "_shards": {"successful": 5, "total": 5}, + "hits": {"hits": []}, + }, + meta=None, + ) + ), + ) as search_mock, + patch.object(async_client, "clear_scroll", return_value=MockResponse(None)), ): [ x diff --git a/test_elasticsearch_serverless/test_server/test_helpers.py b/test_elasticsearch_serverless/test_server/test_helpers.py index f0e6fce..f804866 100644 --- a/test_elasticsearch_serverless/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_server/test_helpers.py @@ -474,9 +474,10 @@ def test_scroll_error(sync_client): bulk.append({"value": x}) sync_client.bulk(operations=bulk, refresh=True) - with patch.object(sync_client, "options", return_value=sync_client), patch.object( - sync_client, "scroll" - ) as scroll_mock: + with ( + patch.object(sync_client, "options", return_value=sync_client), + patch.object(sync_client, "scroll") as scroll_mock, + ): scroll_mock.side_effect = mock_scroll_responses data = list( helpers.scan( @@ -506,21 +507,25 @@ def test_scroll_error(sync_client): def test_initial_search_error(sync_client): - with patch.object( - sync_client, - "search", - return_value=ObjectApiResponse( - meta=None, - raw={ - "_scroll_id": "dummy_id", - "_shards": {"successful": 4, "total": 5, "skipped": 0}, - "hits": {"hits": [{"search_data": 1}]}, - }, + with ( + patch.object( + sync_client, + "search", + return_value=ObjectApiResponse( + meta=None, + raw={ + "_scroll_id": "dummy_id", + "_shards": {"successful": 4, "total": 5, "skipped": 0}, + "hits": {"hits": [{"search_data": 1}]}, + }, + ), ), - ), patch.object(sync_client, "options", return_value=sync_client): - with patch.object(sync_client, "scroll") as scroll_mock, patch.object( - sync_client, "clear_scroll" - ) as clear_scroll_mock: + patch.object(sync_client, "options", return_value=sync_client), + ): + with ( + patch.object(sync_client, "scroll") as scroll_mock, + patch.object(sync_client, "clear_scroll") as clear_scroll_mock, + ): scroll_mock.side_effect = mock_scroll_responses data = list( helpers.scan( @@ -538,9 +543,10 @@ def test_initial_search_error(sync_client): scroll_id="dummy_id", ) - with patch.object(sync_client, "scroll") as scroll_mock, patch.object( - sync_client, "clear_scroll" - ) as clear_scroll_mock: + with ( + patch.object(sync_client, "scroll") as scroll_mock, + patch.object(sync_client, "clear_scroll") as clear_scroll_mock, + ): scroll_mock.side_effect = mock_scroll_responses with pytest.raises(ScanError): data = list( @@ -558,15 +564,16 @@ def test_initial_search_error(sync_client): def test_no_scroll_id_fast_route(sync_client): - with patch.object( - sync_client, - "search", - return_value=ObjectApiResponse(meta=None, raw={"no": "_scroll_id"}), - ) as search_mock, patch.object(sync_client, "scroll") as scroll_mock, patch.object( - sync_client, "clear_scroll" - ) as clear_scroll_mock, patch.object( - sync_client, "options", return_value=sync_client - ) as options: + with ( + patch.object( + sync_client, + "search", + return_value=ObjectApiResponse(meta=None, raw={"no": "_scroll_id"}), + ) as search_mock, + patch.object(sync_client, "scroll") as scroll_mock, + patch.object(sync_client, "clear_scroll") as clear_scroll_mock, + patch.object(sync_client, "options", return_value=sync_client) as options, + ): data = list(helpers.scan(sync_client, index="test_index")) assert data == [] @@ -595,32 +602,37 @@ def test_no_scroll_id_fast_route(sync_client): def test_scan_auth_kwargs_forwarded(sync_client, kwargs): ((key, val),) = kwargs.items() - with patch.object( - sync_client, "options", return_value=sync_client - ) as options, patch.object( - sync_client, - "search", - return_value=ObjectApiResponse( - meta=None, - raw={ - "_scroll_id": "scroll_id", - "_shards": {"successful": 5, "total": 5, "skipped": 0}, - "hits": {"hits": [{"search_data": 1}]}, - }, + with ( + patch.object(sync_client, "options", return_value=sync_client) as options, + patch.object( + sync_client, + "search", + return_value=ObjectApiResponse( + meta=None, + raw={ + "_scroll_id": "scroll_id", + "_shards": {"successful": 5, "total": 5, "skipped": 0}, + "hits": {"hits": [{"search_data": 1}]}, + }, + ), ), - ), patch.object( - sync_client, - "scroll", - return_value=ObjectApiResponse( - meta=None, - raw={ - "_scroll_id": "scroll_id", - "_shards": {"successful": 5, "total": 5, "skipped": 0}, - "hits": {"hits": []}, - }, + patch.object( + sync_client, + "scroll", + return_value=ObjectApiResponse( + meta=None, + raw={ + "_scroll_id": "scroll_id", + "_shards": {"successful": 5, "total": 5, "skipped": 0}, + "hits": {"hits": []}, + }, + ), + ), + patch.object( + sync_client, + "clear_scroll", + return_value=ObjectApiResponse(meta=None, raw={}), ), - ), patch.object( - sync_client, "clear_scroll", return_value=ObjectApiResponse(meta=None, raw={}) ): data = list(helpers.scan(sync_client, index="test_index", **kwargs)) @@ -635,32 +647,37 @@ def test_scan_auth_kwargs_forwarded(sync_client, kwargs): def test_scan_auth_kwargs_favor_scroll_kwargs_option(sync_client): - with patch.object( - sync_client, "options", return_value=sync_client - ) as options_mock, patch.object( - sync_client, - "search", - return_value=ObjectApiResponse( - raw={ - "_scroll_id": "scroll_id", - "_shards": {"successful": 5, "total": 5, "skipped": 0}, - "hits": {"hits": [{"search_data": 1}]}, - }, - meta=None, - ), - ) as search_mock, patch.object( - sync_client, - "scroll", - return_value=ObjectApiResponse( - raw={ - "_scroll_id": "scroll_id", - "_shards": {"successful": 5, "total": 5, "skipped": 0}, - "hits": {"hits": []}, - }, - meta=None, + with ( + patch.object(sync_client, "options", return_value=sync_client) as options_mock, + patch.object( + sync_client, + "search", + return_value=ObjectApiResponse( + raw={ + "_scroll_id": "scroll_id", + "_shards": {"successful": 5, "total": 5, "skipped": 0}, + "hits": {"hits": [{"search_data": 1}]}, + }, + meta=None, + ), + ) as search_mock, + patch.object( + sync_client, + "scroll", + return_value=ObjectApiResponse( + raw={ + "_scroll_id": "scroll_id", + "_shards": {"successful": 5, "total": 5, "skipped": 0}, + "hits": {"hits": []}, + }, + meta=None, + ), + ) as scroll_mock, + patch.object( + sync_client, + "clear_scroll", + return_value=ObjectApiResponse(raw={}, meta=None), ), - ) as scroll_mock, patch.object( - sync_client, "clear_scroll", return_value=ObjectApiResponse(raw={}, meta=None) ): data = list( helpers.scan( @@ -694,13 +711,11 @@ def test_log_warning_on_shard_failures(sync_client): bulk.append({"value": x}) sync_client.bulk(operations=bulk, refresh=True) - with patch( - "elasticsearch_serverless.helpers.actions.logger" - ) as logger_mock, patch.object( - sync_client, "options", return_value=sync_client - ), patch.object( - sync_client, "scroll" - ) as scroll_mock: + with ( + patch("elasticsearch_serverless.helpers.actions.logger") as logger_mock, + patch.object(sync_client, "options", return_value=sync_client), + patch.object(sync_client, "scroll") as scroll_mock, + ): scroll_mock.side_effect = mock_scroll_responses list( helpers.scan( @@ -736,9 +751,12 @@ def test_clear_scroll(sync_client): bulk.append({"value": x}) sync_client.bulk(operations=bulk, refresh=True) - with patch.object(sync_client, "options", return_value=sync_client), patch.object( - sync_client, "clear_scroll", wraps=sync_client.clear_scroll - ) as clear_scroll_mock: + with ( + patch.object(sync_client, "options", return_value=sync_client), + patch.object( + sync_client, "clear_scroll", wraps=sync_client.clear_scroll + ) as clear_scroll_mock, + ): list(helpers.scan(sync_client, index="test_index", size=2)) clear_scroll_mock.assert_called_once() @@ -753,19 +771,22 @@ def test_clear_scroll(sync_client): def test_shards_no_skipped_field(sync_client): # Test that scan doesn't fail if 'hits.skipped' isn't available. - with patch.object(sync_client, "options", return_value=sync_client), patch.object( - sync_client, - "search", - return_value=ObjectApiResponse( - raw={ - "_scroll_id": "dummy_id", - "_shards": {"successful": 5, "total": 5}, - "hits": {"hits": [{"search_data": 1}]}, - }, - meta=None, + with ( + patch.object(sync_client, "options", return_value=sync_client), + patch.object( + sync_client, + "search", + return_value=ObjectApiResponse( + raw={ + "_scroll_id": "dummy_id", + "_shards": {"successful": 5, "total": 5}, + "hits": {"hits": [{"search_data": 1}]}, + }, + meta=None, + ), ), - ), patch.object(sync_client, "scroll") as scroll_mock, patch.object( - sync_client, "clear_scroll" + patch.object(sync_client, "scroll") as scroll_mock, + patch.object(sync_client, "clear_scroll"), ): scroll_mock.side_effect = [ ObjectApiResponse( @@ -804,18 +825,22 @@ def test_shards_no_skipped_field(sync_client): ], ) def test_scan_from_keyword_is_aliased(sync_client, scan_kwargs): - with patch.object(sync_client, "options", return_value=sync_client), patch.object( - sync_client, - "search", - return_value=ObjectApiResponse( - raw={ - "_scroll_id": "dummy_id", - "_shards": {"successful": 5, "total": 5}, - "hits": {"hits": []}, - }, - meta=None, - ), - ) as search_mock, patch.object(sync_client, "clear_scroll"): + with ( + patch.object(sync_client, "options", return_value=sync_client), + patch.object( + sync_client, + "search", + return_value=ObjectApiResponse( + raw={ + "_scroll_id": "dummy_id", + "_shards": {"successful": 5, "total": 5}, + "hits": {"hits": []}, + }, + meta=None, + ), + ) as search_mock, + patch.object(sync_client, "clear_scroll"), + ): list(helpers.scan(sync_client, index="test_index", **scan_kwargs)) assert search_mock.call_args[1]["from_"] == 1 assert "from" not in search_mock.call_args[1] From 7e1e9e9322e4fdbb6e7d68164d195db248b47087 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Thu, 20 Jun 2024 18:16:46 +0400 Subject: [PATCH 2/3] Add xfail markers --- .../test_async/test_server/test_helpers.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py index 601fe35..68d81e6 100644 --- a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py @@ -16,6 +16,7 @@ # under the License. import asyncio +import sys from datetime import datetime, timedelta, timezone from unittest.mock import MagicMock, call, patch @@ -30,6 +31,11 @@ pytestmark = [pytest.mark.asyncio] +async_bulk_xfail = pytest.mark.xfail( + sys.version_info < (3, 11), reason="Investigated in issue #62" +) + + class AsyncMock(MagicMock): async def __call__(self, *args, **kwargs): return super(AsyncMock, self).__call__(*args, **kwargs) @@ -76,6 +82,7 @@ async def test_actions_remain_unchanged(self, async_client): assert ok assert [{"_id": 1}, {"_id": 2}] == actions + @async_bulk_xfail async def test_all_documents_get_inserted(self, async_client): docs = [{"answer": x, "_id": x} for x in range(100)] async for ok, item in helpers.async_streaming_bulk( @@ -88,6 +95,7 @@ async def test_all_documents_get_inserted(self, async_client): "_source" ] + @async_bulk_xfail async def test_documents_data_types(self, async_client): async def async_gen(): for x in range(100): @@ -306,6 +314,7 @@ async def test_bulk_works_with_single_item(self, async_client): "_source" ] + @async_bulk_xfail async def test_all_documents_get_inserted(self, async_client): docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await helpers.async_bulk( @@ -319,6 +328,7 @@ async def test_all_documents_get_inserted(self, async_client): "_source" ] + @async_bulk_xfail async def test_stats_only_reports_numbers(self, async_client): docs = [{"answer": x} for x in range(100)] success, failed = await helpers.async_bulk( @@ -454,6 +464,7 @@ async def scan_teardown(async_client): class TestScan(object): + @async_bulk_xfail async def test_order_can_be_preserved(self, async_client, scan_teardown): bulk = [] for x in range(100): @@ -475,6 +486,7 @@ async def test_order_can_be_preserved(self, async_client, scan_teardown): assert list(map(str, range(100))) == list(d["_id"] for d in docs) assert list(range(100)) == list(d["_source"]["answer"] for d in docs) + @async_bulk_xfail async def test_all_documents_are_read(self, async_client, scan_teardown): bulk = [] for x in range(100): @@ -886,6 +898,7 @@ async def reindex_setup(async_client): class TestReindex(object): + @async_bulk_xfail async def test_reindex_passes_kwargs_to_scan_and_bulk( self, async_client, reindex_setup ): @@ -907,6 +920,7 @@ async def test_reindex_passes_kwargs_to_scan_and_bulk( await async_client.get(index="prod_index", id=42) )["_source"] + @async_bulk_xfail async def test_reindex_accepts_a_query(self, async_client, reindex_setup): await helpers.async_reindex( async_client, @@ -926,6 +940,7 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup): await async_client.get(index="prod_index", id=42) )["_source"] + @async_bulk_xfail async def test_all_documents_get_moved(self, async_client, reindex_setup): await helpers.async_reindex( async_client, "test_index", "prod_index", bulk_kwargs={"refresh": True} @@ -976,6 +991,7 @@ async def reindex_data_stream_setup(async_client): class TestAsyncDataStreamReindex(object): @pytest.mark.parametrize("op_type", [None, "create"]) + @async_bulk_xfail async def test_reindex_index_datastream( self, op_type, async_client, reindex_data_stream_setup ): @@ -995,6 +1011,7 @@ async def test_reindex_index_datastream( ] ) + @async_bulk_xfail async def test_reindex_index_datastream_op_type_index( self, async_client, reindex_data_stream_setup ): From 8a5555f1edba7806c0650e9bf9870f4ee540da74 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Fri, 21 Jun 2024 17:28:56 +0400 Subject: [PATCH 3/3] Skip failing cluster/cluster_info test --- .../test_async/test_server/test_rest_api_spec.py | 2 ++ .../test_server/test_rest_api_spec.py | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/test_elasticsearch_serverless/test_async/test_server/test_rest_api_spec.py b/test_elasticsearch_serverless/test_async/test_server/test_rest_api_spec.py index f0b5db1..d058ba4 100644 --- a/test_elasticsearch_serverless/test_async/test_server/test_rest_api_spec.py +++ b/test_elasticsearch_serverless/test_async/test_server/test_rest_api_spec.py @@ -251,5 +251,7 @@ def async_runner(async_client_factory): async def test_rest_api_spec(test_spec, async_runner): if test_spec.get("fail", False): pytest.xfail("Manually marked as failing in 'FAILING_TESTS'") + elif test_spec.get("skip", False): + pytest.xfail("Manually skipped") async_runner.use_spec(test_spec) await async_runner.run() diff --git a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py index 4e7446a..bb750d4 100644 --- a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py +++ b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py @@ -572,6 +572,9 @@ def remove_implicit_resolver(cls, tag_to_remove): # Skip either 'test_name' or 'test_name[x]' if pytest_test_name in FAILING_TESTS or pytest_param_id in FAILING_TESTS: pytest_param["fail"] = True + # https://github.com/elastic/elasticsearch-serverless-python/issues/63 + elif pytest_param_id == "cluster/cluster_info[0]": + pytest_param["skip"] = True YAML_TEST_SPECS.append(pytest.param(pytest_param, id=pytest_param_id)) @@ -593,5 +596,7 @@ def _pytest_param_sort_key(param: pytest.param) -> Tuple[Union[str, int], ...]: def test_rest_api_spec(test_spec, sync_runner): if test_spec.get("fail", False): pytest.xfail("Manually marked as failing in 'FAILING_TESTS'") + elif test_spec.get("skip", False): + pytest.skip("Manually marked as skipped") sync_runner.use_spec(test_spec) sync_runner.run()