From 92937c819cdf5c0cb9b0cb6b1b5edd3174c3ab70 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 5 Sep 2023 15:04:43 -0500 Subject: [PATCH 01/15] Buildkite test runner for REST test suite With fixes for several test breakages. Integration tests will always run against a real serverless instance that has its own certs. That instance is created using qaf, one per test suite run (so several are created for each matrix-based test run), and then deleted when tests are done running. --- .buildkite/rest-tests.yaml | 48 +++ .buildkite/run-tests | 77 +++++ .ci/run-elasticsearch.sh | 35 -- catalog-info.yaml | 44 +++ .../_async/client/_base.py | 5 +- .../_sync/client/_base.py | 5 +- test_elasticsearch_serverless/conftest.py | 13 +- .../test_async/test_server/conftest.py | 9 +- .../test_async/test_server/test_helpers.py | 20 +- .../test_server/test_mapbox_vector_tile.py | 8 +- .../test_async/test_transport.py | 4 +- .../test_client/test_options.py | 34 +- .../test_client/test_serializers.py | 12 +- .../test_serializer.py | 8 +- .../test_server/conftest.py | 12 +- .../test_server/test_helpers.py | 28 +- .../test_server/test_mapbox_vector_tile.py | 9 +- .../test_server/test_responses.py | 2 +- .../test_server/test_rest_api_spec.py | 12 +- .../test_transport.py | 4 +- test_elasticsearch_serverless/utils.py | 313 ++---------------- 21 files changed, 279 insertions(+), 423 deletions(-) create mode 100644 .buildkite/rest-tests.yaml create mode 100755 .buildkite/run-tests delete mode 100755 .ci/run-elasticsearch.sh create mode 100644 catalog-info.yaml diff --git a/.buildkite/rest-tests.yaml b/.buildkite/rest-tests.yaml new file mode 100644 index 0000000..62bbf09 --- /dev/null +++ b/.buildkite/rest-tests.yaml @@ -0,0 +1,48 @@ +--- +steps: + - label: ":elasticsearch: :python: ES Serverless ({{ matrix.python }}/{{ matrix.connection_class }}) Python Test Suite: {{ matrix.suite }}" + agents: + provider: gcp + env: + PYTHON_VERSION: "{{ matrix.python }}" + TEST_SUITE: "{{ matrix.suite }}" + PYTHON_CONNECTION_CLASS: "{{ matrix.connection_class }}" + # TEMPORARY for 3.11 + # https://github.com/aio-libs/aiohttp/issues/6600 + AIOHTTP_NO_EXTENSIONS: 1 + # https://github.com/aio-libs/frozenlist/issues/285 + FROZENLIST_NO_EXTENSIONS: 1 + # https://github.com/aio-libs/yarl/issues/680 + YARL_NO_EXTENSIONS: 1 + EC_REGISTER_BACKEND: "appex-qa-team-cluster" + EC_ENV: "qa" + EC_REGION: "aws-eu-west-1" + EC_PROJECT_NAME: "esv-client-python-test-{{ matrix.python }}-{{ matrix.suite }}-{{ matrix.connection_class }}" + matrix: + setup: + suite: + - free + - platinum + python: + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + connection_class: + - urllib3 + - requests + command: ./.buildkite/run-tests + artifact_paths: "junit/*-junit.xml" + - wait: ~ + continue_on_failure: true + - label: ":junit: Test results" + agents: + provider: gcp + image: family/core-ubuntu-2204 + plugins: + - junit-annotate#v2.4.1: + artifacts: "junit/*-junit.xml" + job-uuid-file-pattern: "(.*)-junit.xml" + fail-build-on-error: true + failure-format: file diff --git a/.buildkite/run-tests b/.buildkite/run-tests new file mode 100755 index 0000000..3ef4af6 --- /dev/null +++ b/.buildkite/run-tests @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +# Default environment variables +export TEST_SUITE="${TEST_SUITE:=platinum}" +export PYTHON_VERSION="${PYTHON_VERSION:=3.9}" +export PYTHON_CONNECTION_CLASS="${PYTHON_CONNECTION_CLASS:=urllib3}" + +set -euo pipefail + +# fetch cloud creds used by qaf +CLOUD_ACCESS_KEY=$(vault read -field="$EC_ENV" secret/ci/elastic-elasticsearch-serverless-python/cloud-access) +echo "{\"api_key\":{\"$EC_ENV\":\"$CLOUD_ACCESS_KEY\"}}" > "$(pwd)/cloud.json" + +# ensure serverless instance is deleted even if script errors +cleanup() { + echo -e "--- :elasticsearch: Tear down serverless instance EC_PROJECT_NAME" + docker run --rm \ + -e EC_REGISTER_BACKEND \ + -e EC_ENV \ + -e EC_REGION \ + -e EC_PROJECT_NAME \ + -e VAULT_TOKEN \ + -v "$(pwd)/cloud.json:/root/.elastic/cloud.json" \ + docker.elastic.co/employees/dolaru/qaf:latest \ + bash -c 'qaf elastic-cloud projects delete' +} +trap cleanup EXIT + +echo -e "--- :elasticsearch: Start serverless instance" + +deployment=$(docker run --rm \ + -e EC_REGISTER_BACKEND \ + -e EC_ENV \ + -e EC_REGION \ + -e EC_PROJECT_NAME \ + -e VAULT_TOKEN \ + -v "$(pwd)/cloud.json:/root/.elastic/cloud.json" \ + docker.elastic.co/employees/dolaru/qaf:latest \ + bash -c 'qaf elastic-cloud projects create --project-type elasticsearch && \ + qaf elastic-cloud projects describe --as-json --show-credentials') \ + +ES_API_SECRET_KEY=$(echo "$deployment" | jq -r '.credentials.api_key') +ELASTICSEARCH_URL=$(echo "$deployment" | jq -r '.elasticsearch.url') +export ELASTICSEARCH_URL + +echo -e "--- :computer: Environment variables" +echo -e "ELASTICSEARCH_URL $ELASTICSEARCH_URL" +echo -e "STACK_VERSION $STACK_VERSION" +echo -e "TEST_SUITE $TEST_SUITE" +echo -e "PYTHON_VERSION $PYTHON_VERSION" +echo -e "PYTHON_CONNECTION_CLASS $PYTHON_CONNECTION_CLASS" + +echo -e "--- :docker: Build elasticsearch-serverless-python container" + +docker build \ + --file .ci/Dockerfile \ + --tag elasticsearch-serverless-python \ + --build-arg "PYTHON_VERSION=$PYTHON_VERSION" \ + . + +echo -e "--- :docker: :python: Run integration tests for Python $PYTHON_VERSION" + +GITHUB_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch-serverless-python/github-token) +export GITHUB_TOKEN + +docker run \ + -e ELASTICSEARCH_URL \ + -e "ES_API_KEY=$ES_API_SECRET_KEY" \ + -e PYTHON_CONNECTION_CLASS \ + -e STACK_VERSION \ + -e TEST_SUITE \ + -e GITHUB_TOKEN \ + --name elasticsearch-serverless-python-tests \ + --volume "$(pwd)/junit:/code/elasticsearch-serverless-python/junit" \ + --rm \ + elasticsearch-serverless-python \ + nox -s "test-$PYTHON_VERSION" diff --git a/.ci/run-elasticsearch.sh b/.ci/run-elasticsearch.sh deleted file mode 100755 index 9bf49a0..0000000 --- a/.ci/run-elasticsearch.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash -# -environment=($(cat <<-END - --env ELASTIC_PASSWORD=changeme - --env node.name=elasticsearch-serverless - --env cluster.name=elasticsearch-serverless - --env cluster.initial_master_nodes=elasticsearch-serverless - --env discovery.seed_hosts=instance - --env cluster.routing.allocation.disk.threshold_enabled=false - --env bootstrap.memory_lock=true - --env node.attr.testattr=test - --env path.repo=/tmp - --env repositories.url.allowed_urls=http://snapshot.test* - --env action.destructive_requires_name=false - --env ingest.geoip.downloader.enabled=false - --env cluster.deprecation_indexing.enabled=false - --env xpack.security.enabled=false - --env xpack.security.http.ssl.enabled=false -END -)) - -export DETACH=${DETACH-false} - -docker run \ - --name elasticsearch-serverless \ - --network elastic \ - --env "ES_JAVA_OPTS=-Des.serverless=true -Xms1g -Xmx1g -da:org.elasticsearch.xpack.ccr.index.engine.FollowingEngineAssertions" \ - "${environment[@]}" \ - --volume serverless-data:/usr/share/elasticsearch/data \ - --publish 9200:9200 \ - --ulimit nofile=65536:65536 \ - --ulimit memlock=-1:-1 \ - --detach=$DETACH \ - --rm \ - docker.elastic.co/elasticsearch/elasticsearch:$STACK_VERSION; diff --git a/catalog-info.yaml b/catalog-info.yaml new file mode 100644 index 0000000..608bdf0 --- /dev/null +++ b/catalog-info.yaml @@ -0,0 +1,44 @@ +--- +# yaml-language-server: $schema=https://json.schemastore.org/catalog-info.json +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: elasticsearch-serverless-python +spec: + type: library + owner: group:clients-team + lifecycle: alpha + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: elasticsearch-serverless-python-rest-tests + description: elasticsearch-serverless-python - rest tests +spec: + type: buildkite-pipeline + owner: group:clients-team + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: elasticsearch-serverless-python - rest tests + spec: + repository: elastic/elasticsearch-serverless-python + pipeline_file: .buildkite/rest-tests.yaml + teams: + clients-team: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + provider_settings: + build_pull_requests: true + build_branches: true + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: '!main' + schedules: + main_semi_daily: + branch: 'main' + cronline: '0 */12 * * *' diff --git a/elasticsearch_serverless/_async/client/_base.py b/elasticsearch_serverless/_async/client/_base.py index cb4d41c..8b9bbd1 100644 --- a/elasticsearch_serverless/_async/client/_base.py +++ b/elasticsearch_serverless/_async/client/_base.py @@ -31,7 +31,6 @@ ) from elastic_transport.client_utils import DEFAULT, DefaultType -from ..._version import __versionstr__ from ...compat import warn_stacklevel from ...exceptions import ( HTTP_EXCEPTIONS, @@ -42,9 +41,7 @@ from .utils import _base64_auth_header, _quote_query _WARNING_RE = re.compile(r"\"([^\"]*)\"") -_COMPAT_MIMETYPE_TEMPLATE = "application/vnd.elasticsearch+%s; compatible-with=" + str( - __versionstr__.partition(".")[0] -) +_COMPAT_MIMETYPE_TEMPLATE = "application/vnd.elasticsearch+%s; compatible-with=8" _COMPAT_MIMETYPE_RE = re.compile(r"application/(json|x-ndjson|vnd\.mapbox-vector-tile)") _COMPAT_MIMETYPE_SUB = _COMPAT_MIMETYPE_TEMPLATE % (r"\g<1>",) diff --git a/elasticsearch_serverless/_sync/client/_base.py b/elasticsearch_serverless/_sync/client/_base.py index 56ff243..0f83ae3 100644 --- a/elasticsearch_serverless/_sync/client/_base.py +++ b/elasticsearch_serverless/_sync/client/_base.py @@ -31,7 +31,6 @@ ) from elastic_transport.client_utils import DEFAULT, DefaultType -from ..._version import __versionstr__ from ...compat import warn_stacklevel from ...exceptions import ( HTTP_EXCEPTIONS, @@ -42,9 +41,7 @@ from .utils import _base64_auth_header, _quote_query _WARNING_RE = re.compile(r"\"([^\"]*)\"") -_COMPAT_MIMETYPE_TEMPLATE = "application/vnd.elasticsearch+%s; compatible-with=" + str( - __versionstr__.partition(".")[0] -) +_COMPAT_MIMETYPE_TEMPLATE = "application/vnd.elasticsearch+%s; compatible-with=8" _COMPAT_MIMETYPE_RE = re.compile(r"application/(json|x-ndjson|vnd\.mapbox-vector-tile)") _COMPAT_MIMETYPE_SUB = _COMPAT_MIMETYPE_TEMPLATE % (r"\g<1>",) diff --git a/test_elasticsearch_serverless/conftest.py b/test_elasticsearch_serverless/conftest.py index 3399b8a..9256ec5 100644 --- a/test_elasticsearch_serverless/conftest.py +++ b/test_elasticsearch_serverless/conftest.py @@ -21,7 +21,7 @@ from elasticsearch_serverless import Elasticsearch -from .utils import CA_CERTS, es_url, es_version +from .utils import es_api_key, es_url, es_version @pytest.fixture(scope="session") @@ -33,11 +33,14 @@ def elasticsearch_url(): @pytest.fixture(scope="session") -def ca_certs(): - return CA_CERTS +def elasticsearch_api_key(): + try: + return es_api_key() + except RuntimeError as e: + pytest.skip(str(e)) @pytest.fixture(scope="session") -def elasticsearch_version(elasticsearch_url, ca_certs) -> Tuple[int, ...]: +def elasticsearch_version(elasticsearch_url) -> Tuple[int, ...]: """Returns the version of the current Elasticsearch cluster""" - return es_version(Elasticsearch(elasticsearch_url, ca_certs=ca_certs)) + return es_version(Elasticsearch(elasticsearch_url)) diff --git a/test_elasticsearch_serverless/test_async/test_server/conftest.py b/test_elasticsearch_serverless/test_async/test_server/conftest.py index 23646d2..642e688 100644 --- a/test_elasticsearch_serverless/test_async/test_server/conftest.py +++ b/test_elasticsearch_serverless/test_async/test_server/conftest.py @@ -16,17 +16,18 @@ # under the License. import pytest +import pytest_asyncio import elasticsearch_serverless -from ...utils import CA_CERTS, wipe_cluster +from ...utils import wipe_cluster pytestmark = pytest.mark.asyncio -@pytest.fixture(scope="function") +@pytest_asyncio.fixture(scope="function") @pytest.mark.usefixtures("sync_client") -async def async_client(elasticsearch_url): +async def async_client(elasticsearch_url, elasticsearch_api_key): # 'sync_client' fixture is used for the guaranteed wipe_cluster() call. if not hasattr(elasticsearch_serverless, "AsyncElasticsearch"): @@ -38,7 +39,7 @@ async def async_client(elasticsearch_url): client = None try: client = elasticsearch_serverless.AsyncElasticsearch( - elasticsearch_url, request_timeout=3, ca_certs=CA_CERTS + elasticsearch_url, api_key=elasticsearch_api_key, request_timeout=3 ) yield client finally: diff --git a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py index d7b1f7b..caa6660 100644 --- a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py @@ -125,7 +125,6 @@ async def test_all_errors_from_chunk_are_raised_on_failure(self, async_client): await async_client.indices.create( index="i", mappings={"properties": {"a": {"type": "integer"}}}, - settings={"number_of_shards": 1, "number_of_replicas": 0}, ) await async_client.cluster.health(wait_for_status="yellow") @@ -214,11 +213,11 @@ async def test_rejected_documents_are_retried(self, async_client): chunk_size=1, max_retries=1, initial_backoff=0, + refresh=True, ) ] assert 3 == len(results) assert [True, True, True] == [r[0] for r in results] - await async_client.indices.refresh(index="i") res = await async_client.search(index="i") assert {"value": 3, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called @@ -253,11 +252,11 @@ async def test_rejected_documents_are_retried_at_most_max_retries_times( chunk_size=1, max_retries=1, initial_backoff=0, + refresh=True, ) ] assert 3 == len(results) assert [False, True, True] == [r[0] for r in results] - await async_client.indices.refresh(index="i") res = await async_client.search(index="i") assert {"value": 2, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called @@ -334,7 +333,6 @@ async def test_errors_are_reported_correctly(self, async_client): await async_client.indices.create( index="i", mappings={"properties": {"a": {"type": "integer"}}}, - settings={"number_of_shards": 1, "number_of_replicas": 0}, ) await async_client.cluster.health(wait_for_status="yellow") @@ -358,7 +356,6 @@ async def test_error_is_raised(self, async_client): await async_client.indices.create( index="i", mappings={"properties": {"a": {"type": "integer"}}}, - settings={"number_of_shards": 1, "number_of_replicas": 0}, ) await async_client.cluster.health(wait_for_status="yellow") @@ -402,7 +399,6 @@ async def test_errors_are_collected_properly(self, async_client): await async_client.indices.create( index="i", mappings={"properties": {"a": {"type": "integer"}}}, - settings={"number_of_shards": 1, "number_of_replicas": 0}, ) await async_client.cluster.health(wait_for_status="yellow") @@ -911,8 +907,8 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup): "test_index", "prod_index", query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, + bulk_kwargs={"refresh": True}, ) - await async_client.indices.refresh() assert await async_client.indices.exists(index="prod_index") assert ( @@ -925,8 +921,7 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup): )["_source"] async def test_all_documents_get_moved(self, async_client, reindex_setup): - await helpers.async_reindex(async_client, "test_index", "prod_index") - await async_client.indices.refresh() + await helpers.async_reindex(async_client, "test_index", "prod_index", bulk_kwargs={"refresh": True}) assert await async_client.indices.exists(index="prod_index") assert ( @@ -948,7 +943,6 @@ async def test_all_documents_get_moved(self, async_client, reindex_setup): @pytest.fixture(scope="function") async def parent_reindex_setup(async_client): body = { - "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { "properties": { "question_answer": { @@ -969,8 +963,8 @@ async def parent_reindex_setup(async_client): id=47, routing=42, body={"some": "data", "question_answer": {"name": "answer", "parent": 42}}, + refresh=True, ) - await async_client.indices.refresh(index="test-index") class TestParentChildReindex: @@ -1028,8 +1022,7 @@ async def reindex_data_stream_setup(async_client): "data_stream": {}, }, ) - await async_client.indices.create_data_stream(name="py-test-stream") - await async_client.indices.refresh() + await async_client.indices.create_data_stream(name="py-test-stream", refresh=True) yield @@ -1046,7 +1039,6 @@ async def test_reindex_index_datastream( bulk_kwargs={"refresh": True}, op_type=op_type, ) - # await async_client.indices.refresh() assert await async_client.indices.exists(index="py-test-stream") assert ( 50 diff --git a/test_elasticsearch_serverless/test_async/test_server/test_mapbox_vector_tile.py b/test_elasticsearch_serverless/test_async/test_server/test_mapbox_vector_tile.py index b6b52e9..027cd98 100644 --- a/test_elasticsearch_serverless/test_async/test_server/test_mapbox_vector_tile.py +++ b/test_elasticsearch_serverless/test_async/test_server/test_mapbox_vector_tile.py @@ -73,8 +73,8 @@ async def mvt_setup(async_client): ) -async def test_mapbox_vector_tile_error(elasticsearch_url, mvt_setup, ca_certs): - client = AsyncElasticsearch(elasticsearch_url, ca_certs=ca_certs) +async def test_mapbox_vector_tile_error(elasticsearch_url, mvt_setup): + client = AsyncElasticsearch(elasticsearch_url) await client.search_mvt( index="museums", zoom=13, @@ -113,13 +113,13 @@ async def test_mapbox_vector_tile_error(elasticsearch_url, mvt_setup, ca_certs): } -async def test_mapbox_vector_tile_response(elasticsearch_url, mvt_setup, ca_certs): +async def test_mapbox_vector_tile_response(elasticsearch_url, mvt_setup): try: import mapbox_vector_tile except ImportError: return pytest.skip("Requires the 'mapbox-vector-tile' package") - client = AsyncElasticsearch(elasticsearch_url, ca_certs=ca_certs) + client = AsyncElasticsearch(elasticsearch_url) resp = await client.search_mvt( index="museums", diff --git a/test_elasticsearch_serverless/test_async/test_transport.py b/test_elasticsearch_serverless/test_async/test_transport.py index 3edf2ee..2665630 100644 --- a/test_elasticsearch_serverless/test_async/test_transport.py +++ b/test_elasticsearch_serverless/test_async/test_transport.py @@ -243,7 +243,7 @@ async def test_client_meta_header_not_sent(self): calls = client.transport.node_pool.get().calls assert 1 == len(calls) assert calls[0][1]["headers"] == { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } async def test_body_surrogates_replaced_encoded_into_bytes(self): @@ -350,7 +350,7 @@ async def test_unsupported_product_error(headers): { "body": None, "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "request_timeout": DEFAULT, }, diff --git a/test_elasticsearch_serverless/test_client/test_options.py b/test_elasticsearch_serverless/test_client/test_options.py index 3c147c0..7686123 100644 --- a/test_elasticsearch_serverless/test_client/test_options.py +++ b/test_elasticsearch_serverless/test_client/test_options.py @@ -139,7 +139,7 @@ def test_options_passed_to_perform_request(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, } @@ -157,7 +157,7 @@ def test_options_passed_to_perform_request(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, "request_timeout": 1, @@ -182,7 +182,7 @@ def test_options_passed_to_perform_request(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, "request_timeout": 1, @@ -209,7 +209,7 @@ async def test_options_passed_to_async_perform_request(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, } @@ -227,7 +227,7 @@ async def test_options_passed_to_async_perform_request(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, "request_timeout": 1, @@ -252,7 +252,7 @@ async def test_options_passed_to_async_perform_request(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, "request_timeout": 1, @@ -294,7 +294,7 @@ def test_http_headers_overrides(self): assert call["headers"] == { "key": "val", - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } client.options(headers={"key1": "val"}).indices.get(index="2") @@ -303,7 +303,7 @@ def test_http_headers_overrides(self): assert call["headers"] == { "key": "val", "key1": "val", - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } client.options(headers={"key": "val2"}).indices.get(index="3") @@ -311,7 +311,7 @@ def test_http_headers_overrides(self): assert call["headers"] == { "key": "val2", - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } client = Elasticsearch( @@ -338,14 +338,14 @@ def test_user_agent_override(self): call = calls[("GET", "/1")][0] assert call["headers"] == { "user-agent": "custom1", - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } client.indices.get(index="2", headers={"user-agent": "custom2"}) call = calls[("GET", "/2")][0] assert call["headers"] == { "user-agent": "custom2", - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } client = Elasticsearch( @@ -359,14 +359,14 @@ def test_user_agent_override(self): call = calls[("GET", "/1")][0] assert call["headers"] == { "user-agent": "custom3", - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } client.indices.get(index="2", headers={"user-agent": "custom4"}) call = calls[("GET", "/2")][0] assert call["headers"] == { "user-agent": "custom4", - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } def test_options_timeout_parameters(self): @@ -387,7 +387,7 @@ def test_options_timeout_parameters(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, "request_timeout": 1, @@ -416,7 +416,7 @@ def test_options_timeout_parameters(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, "request_timeout": 2, @@ -440,7 +440,7 @@ def test_options_timeout_parameters(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, } @@ -461,7 +461,7 @@ def test_options_timeout_parameters(self): assert call.pop("client_meta") is DEFAULT assert call == { "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "body": None, "request_timeout": 1, diff --git a/test_elasticsearch_serverless/test_client/test_serializers.py b/test_elasticsearch_serverless/test_client/test_serializers.py index 8a63a1b..6115e43 100644 --- a/test_elasticsearch_serverless/test_client/test_serializers.py +++ b/test_elasticsearch_serverless/test_client/test_serializers.py @@ -29,30 +29,30 @@ def test_compat_mode_on_by_default(self): self.client.get(index="test0", id="1") assert len(calls) == 1 assert calls[("GET", "/test0/_doc/1")][0]["headers"] == { - "Accept": "application/vnd.elasticsearch+json; compatible-with=0" + "Accept": "application/vnd.elasticsearch+json; compatible-with=8" } # Search with body self.client.search(index="test1", query={"match_all": {}}) assert len(calls) == 2 assert calls[("POST", "/test1/_search")][0]["headers"] == { - "Accept": "application/vnd.elasticsearch+json; compatible-with=0", - "Content-Type": "application/vnd.elasticsearch+json; compatible-with=0", + "Accept": "application/vnd.elasticsearch+json; compatible-with=8", + "Content-Type": "application/vnd.elasticsearch+json; compatible-with=8", } # Search without body self.client.search(index="test2") assert len(calls) == 3 assert calls[("POST", "/test2/_search")][0]["headers"] == { - "Accept": "application/vnd.elasticsearch+json; compatible-with=0", + "Accept": "application/vnd.elasticsearch+json; compatible-with=8", } # Bulk uses x-ndjson self.client.bulk(operations=[]) assert len(calls) == 4 assert calls[("PUT", "/_bulk")][0]["headers"] == { - "Accept": "application/vnd.elasticsearch+json; compatible-with=0", - "Content-Type": "application/vnd.elasticsearch+x-ndjson; compatible-with=0", + "Accept": "application/vnd.elasticsearch+json; compatible-with=8", + "Content-Type": "application/vnd.elasticsearch+x-ndjson; compatible-with=8", } @pytest.mark.parametrize("mime_subtype", ["json", "x-ndjson"]) diff --git a/test_elasticsearch_serverless/test_serializer.py b/test_elasticsearch_serverless/test_serializer.py index 7d1bc8e..4f24573 100644 --- a/test_elasticsearch_serverless/test_serializer.py +++ b/test_elasticsearch_serverless/test_serializer.py @@ -197,8 +197,8 @@ def test_deserialize_compatibility_header(self): for content_type in ( "application/vnd.elasticsearch+json;compatible-with=7", "application/vnd.elasticsearch+json; compatible-with=7", - "application/vnd.elasticsearch+json;compatible-with=0", - "application/vnd.elasticsearch+json; compatible-with=0", + "application/vnd.elasticsearch+json;compatible-with=8", + "application/vnd.elasticsearch+json; compatible-with=8", ): assert {"some": "data"} == self.serializers.loads( '{"some":"data"}', content_type @@ -210,8 +210,8 @@ def test_deserialize_compatibility_header(self): for content_type in ( "application/vnd.elasticsearch+x-ndjson;compatible-with=7", "application/vnd.elasticsearch+x-ndjson; compatible-with=7", - "application/vnd.elasticsearch+x-ndjson;compatible-with=0", - "application/vnd.elasticsearch+x-ndjson; compatible-with=0", + "application/vnd.elasticsearch+x-ndjson;compatible-with=8", + "application/vnd.elasticsearch+x-ndjson; compatible-with=8", ): assert b'{"some":"data"}\n{"some":"data"}\n' == self.serializers.dumps( ['{"some":"data"}', {"some": "data"}], content_type diff --git a/test_elasticsearch_serverless/test_server/conftest.py b/test_elasticsearch_serverless/test_server/conftest.py index 6aee306..0dee7f3 100644 --- a/test_elasticsearch_serverless/test_server/conftest.py +++ b/test_elasticsearch_serverless/test_server/conftest.py @@ -21,7 +21,7 @@ import elasticsearch_serverless -from ..utils import CA_CERTS, wipe_cluster +from ..utils import wipe_cluster # Information about the Elasticsearch instance running, if any # Used for @@ -31,18 +31,20 @@ @pytest.fixture(scope="session") -def sync_client_factory(elasticsearch_url): +def sync_client_factory(elasticsearch_url, elasticsearch_api_key): client = None try: - # Configure the client with certificates and optionally + # Configure the client with API key and optionally # an HTTP conn class depending on 'PYTHON_CONNECTION_CLASS' envvar - kw = {"ca_certs": CA_CERTS} + kw = {'api_key': elasticsearch_api_key} if "PYTHON_CONNECTION_CLASS" in os.environ: kw["node_class"] = os.environ["PYTHON_CONNECTION_CLASS"] # We do this little dance with the URL to force # Requests to respect 'headers: None' within rest API spec tests. - client = elasticsearch_serverless.Elasticsearch(elasticsearch_url, **kw) + client = elasticsearch_serverless.Elasticsearch( + elasticsearch_url, **kw + ) # Wipe the cluster before we start testing just in case it wasn't wiped # cleanly from the previous run of pytest? diff --git a/test_elasticsearch_serverless/test_server/test_helpers.py b/test_elasticsearch_serverless/test_server/test_helpers.py index eaef082..6939d8c 100644 --- a/test_elasticsearch_serverless/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_server/test_helpers.py @@ -66,7 +66,7 @@ def test_bulk_actions_remain_unchanged(sync_client): def test_bulk_all_documents_get_inserted(sync_client): docs = [{"answer": x, "_id": x} for x in range(100)] for ok, item in helpers.streaming_bulk( - sync_client, docs, index="test-index", refresh=True + sync_client, docs, index="test-index", refresh=True, ): assert ok @@ -79,10 +79,8 @@ def test_bulk_all_errors_from_chunk_are_raised_on_failure(sync_client): index="i", body={ "mappings": {"properties": {"a": {"type": "integer"}}}, - "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) - sync_client.cluster.health(wait_for_status="yellow") try: for ok, _ in helpers.streaming_bulk( @@ -172,12 +170,12 @@ def test_bulk_rejected_documents_are_retried(sync_client): chunk_size=1, max_retries=1, initial_backoff=0, + refresh=True, ) ) assert 3 == len(results) print(results) assert [True, True, True] == [r[0] for r in results] - sync_client.indices.refresh(index="i") res = sync_client.search(index="i") assert {"value": 3, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called @@ -211,11 +209,11 @@ def test_bulk_rejected_documents_are_retried_when_bytes_or_string( chunk_size=1, max_retries=1, initial_backoff=0, + refresh=True, ) ) assert 3 == len(results) assert [True, True, True] == [r[0] for r in results] - sync_client.indices.refresh(index="i") res = sync_client.search(index="i") assert {"value": 3, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called @@ -248,11 +246,11 @@ def test_bulk_rejected_documents_are_retried_at_most_max_retries_times(sync_clie chunk_size=1, max_retries=1, initial_backoff=0, + refresh=True, ) ) assert 3 == len(results) assert [False, True, True] == [r[0] for r in results] - sync_client.indices.refresh(index="i") res = sync_client.search(index="i") assert {"value": 2, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called @@ -323,9 +321,7 @@ def test_errors_are_reported_correctly(sync_client): sync_client.indices.create( index="i", mappings={"properties": {"a": {"type": "integer"}}}, - settings={"number_of_shards": 1, "number_of_replicas": 0}, ) - sync_client.cluster.health(wait_for_status="yellow") success, failed = helpers.bulk( sync_client, @@ -348,9 +344,7 @@ def test_error_is_raised(sync_client): sync_client.indices.create( index="i", mappings={"properties": {"a": {"type": "integer"}}}, - settings={"number_of_shards": 1, "number_of_replicas": 0}, ) - sync_client.cluster.health(wait_for_status="yellow") with pytest.raises(helpers.BulkIndexError): helpers.bulk( @@ -395,9 +389,7 @@ def test_errors_are_collected_properly(sync_client): sync_client.indices.create( index="i", mappings={"properties": {"a": {"type": "integer"}}}, - settings={"number_of_shards": 1, "number_of_replicas": 0}, ) - sync_client.cluster.health(wait_for_status="yellow") success, failed = helpers.bulk( sync_client, @@ -699,7 +691,7 @@ def test_log_warning_on_shard_failures(sync_client): for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) - sync_client.bulk(operations=bulk, refresh=True) + sync_client.bulk(operations=bulk) with patch( "elasticsearch_serverless.helpers.actions.logger" @@ -868,8 +860,8 @@ def test_reindex_accepts_a_query(sync_client): "test_index", "prod_index", query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, + bulk_kwargs={"refresh": True}, ) - sync_client.indices.refresh() assert sync_client.indices.exists(index="prod_index") assert 50 == sync_client.count(index="prod_index", q="type:answers")["count"] @@ -881,8 +873,7 @@ def test_reindex_accepts_a_query(sync_client): @pytest.mark.usefixtures("reindex_setup") def test_all_documents_get_moved(sync_client): - helpers.reindex(sync_client, "test_index", "prod_index") - sync_client.indices.refresh() + helpers.reindex(sync_client, "test_index", "prod_index", bulk_kwargs={"refresh": True}) assert sync_client.indices.exists(index="prod_index") assert 50 == sync_client.count(index="prod_index", q="type:questions")["count"] @@ -896,7 +887,6 @@ def test_all_documents_get_moved(sync_client): @pytest.fixture(scope="function") def parent_child_reindex_setup(sync_client): body = { - "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { "properties": { "question_answer": { @@ -915,8 +905,8 @@ def parent_child_reindex_setup(sync_client): id=47, routing=42, body={"some": "data", "question_answer": {"name": "answer", "parent": 42}}, + refresh=True, ) - sync_client.indices.refresh(index="test-index") @pytest.mark.usefixtures("parent_child_reindex_setup") @@ -972,7 +962,6 @@ def reindex_data_stream_setup(sync_client): }, ) sync_client.indices.create_data_stream(name="py-test-stream") - sync_client.indices.refresh() @pytest.mark.usefixtures("reindex_data_stream_setup") @@ -985,7 +974,6 @@ def test_reindex_index_datastream(op_type, sync_client): query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, op_type=op_type, ) - sync_client.indices.refresh() assert sync_client.indices.exists(index="py-test-stream") assert 50 == sync_client.count(index="py-test-stream", q="type:answers")["count"] diff --git a/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py b/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py index d850e26..baf1411 100644 --- a/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py +++ b/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py @@ -67,13 +67,12 @@ def mvt_setup(sync_client): "included": True, }, ], - refresh=True, ) @pytest.mark.parametrize("node_class", ["urllib3", "requests"]) -def test_mapbox_vector_tile_error(elasticsearch_url, mvt_setup, node_class, ca_certs): - client = Elasticsearch(elasticsearch_url, node_class=node_class, ca_certs=ca_certs) +def test_mapbox_vector_tile_error(elasticsearch_url, mvt_setup, node_class): + client = Elasticsearch(elasticsearch_url, node_class=node_class) client.search_mvt( index="museums", zoom=13, @@ -114,14 +113,14 @@ def test_mapbox_vector_tile_error(elasticsearch_url, mvt_setup, node_class, ca_c @pytest.mark.parametrize("node_class", ["urllib3", "requests"]) def test_mapbox_vector_tile_response( - elasticsearch_url, mvt_setup, node_class, ca_certs + elasticsearch_url, mvt_setup, node_class ): try: import mapbox_vector_tile except ImportError: return pytest.skip("Requires the 'mapbox-vector-tile' package") - client = Elasticsearch(elasticsearch_url, node_class=node_class, ca_certs=ca_certs) + client = Elasticsearch(elasticsearch_url, node_class=node_class) resp = client.search_mvt( index="museums", diff --git a/test_elasticsearch_serverless/test_server/test_responses.py b/test_elasticsearch_serverless/test_server/test_responses.py index d633534..f5c96a5 100644 --- a/test_elasticsearch_serverless/test_server/test_responses.py +++ b/test_elasticsearch_serverless/test_server/test_responses.py @@ -17,7 +17,7 @@ def test_text_response(sync_client): - resp = sync_client.cat.tasks() + resp = sync_client.cat.aliases() assert resp.meta.status == 200 assert isinstance(resp.body, str) assert str(resp.body) == str(resp) diff --git a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py index 8144edf..cfeb119 100644 --- a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py +++ b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py @@ -42,7 +42,7 @@ from elasticsearch_serverless._sync.client.utils import _base64_auth_header from elasticsearch_serverless.compat import string_types -from ..utils import CA_CERTS, es_url, parse_version +from ..utils import es_api_key, es_url, parse_version # some params had to be changed in python, keep track of them so we can rename # those in the tests accordingly @@ -555,7 +555,7 @@ def remove_implicit_resolver(cls, tag_to_remove): try: # Construct the HTTP and Elasticsearch client http = urllib3.PoolManager(retries=10) - client = Elasticsearch(es_url(), request_timeout=3, ca_certs=CA_CERTS) + client = Elasticsearch(es_url(), api_key=es_api_key(), request_timeout=3) # Make a request to Elasticsearch for the build hash, we'll be looking for # an artifact with this same hash to download test specs for. @@ -593,7 +593,13 @@ def remove_implicit_resolver(cls, tag_to_remove): ) # Download the zip and start reading YAML from the files in memory - package_zip = zipfile.ZipFile(io.BytesIO(http.request("GET", package_url).data)) + package_zip = zipfile.ZipFile( + io.BytesIO( + http.request( + "GET", package_url, headers={"content-type": "application/json"} + ).data + ) + ) for yaml_file in package_zip.namelist(): if not re.match(r"^rest-api-spec/test/.*\.ya?ml$", yaml_file): continue diff --git a/test_elasticsearch_serverless/test_transport.py b/test_elasticsearch_serverless/test_transport.py index 8061713..095f6ec 100644 --- a/test_elasticsearch_serverless/test_transport.py +++ b/test_elasticsearch_serverless/test_transport.py @@ -253,7 +253,7 @@ def test_client_meta_header_not_sent(self): calls = client.transport.node_pool.get().calls assert 1 == len(calls) assert calls[0][1]["headers"] == { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", } def test_meta_header_type_error(self): @@ -363,7 +363,7 @@ def test_unsupported_product_error(headers): { "body": None, "headers": { - "accept": "application/vnd.elasticsearch+json; compatible-with=0", + "accept": "application/vnd.elasticsearch+json; compatible-with=8", }, "request_timeout": DEFAULT, }, diff --git a/test_elasticsearch_serverless/utils.py b/test_elasticsearch_serverless/utils.py index a081f78..e5eef23 100644 --- a/test_elasticsearch_serverless/utils.py +++ b/test_elasticsearch_serverless/utils.py @@ -21,70 +21,19 @@ from pathlib import Path from typing import Optional, Tuple -from elasticsearch_serverless import ( - AuthorizationException, - ConnectionError, - Elasticsearch, - NotFoundError, - RequestError, -) +from elasticsearch_serverless import Elasticsearch, RequestError SOURCE_DIR = Path(__file__).absolute().parent.parent -CA_CERTS = str(SOURCE_DIR / ".ci/certs/ca.crt") def es_url() -> str: - """Grabs an Elasticsearch URL which can be designated via - an environment variable otherwise falls back on localhost. + """Grabs an Elasticsearch URL that must be designated via + an environment variable, otherwise raises an exception. """ - urls_to_try = [] - - # Try user-supplied URLs before generic localhost ones. - if os.environ.get("ELASTICSEARCH_URL", ""): - urls_to_try.append(os.environ["ELASTICSEARCH_URL"]) - if os.environ.get("elasticsearch_url", ""): - urls_to_try.append(os.environ["elasticsearch_url"]) - urls_to_try.extend( - [ - "https://localhost:9200", - "http://localhost:9200", - "https://elastic:changeme@localhost:9200", - "http://elastic:changeme@localhost:9200", - ] - ) - - error = None - for url in urls_to_try: - if url.startswith("https://"): - client = Elasticsearch(url, ca_certs=CA_CERTS, verify_certs=False) - else: - client = Elasticsearch(url) - try: - # Check that we get any sort of connection first. - client.info() - - # After we get a connection let's wait for the cluster - # to be in 'yellow' state, otherwise we could start - # tests too early and get failures. - for _ in range(100): - try: - client.cluster.health(wait_for_status="yellow") - break - except ConnectionError: - time.sleep(0.1) - - except ConnectionError as e: - if error is None: - error = str(e) - else: - successful_url = url - break - else: - raise RuntimeError( - "Could not connect to Elasticsearch (tried %s): %s" - % (", ".join(urls_to_try), error) - ) - return successful_url + url = os.environ.get("ELASTICSEARCH_URL", None) + if url is None: + raise RuntimeError("No ELASTICSEARCH_URL environment variable provided") + return url def es_version(client) -> Tuple[int, ...]: @@ -114,22 +63,12 @@ def wipe_cluster(client): if isinstance(client, AsyncElasticsearch): node_config = client.transport.node_pool.get().config - client = Elasticsearch(node_config, verify_certs=False) + client = Elasticsearch([node_config]) close_after_wipe = True except ImportError: pass is_xpack = True - if is_xpack: - wipe_rollup_jobs(client) - wait_for_pending_tasks(client, filter="xpack/rollup/job") - wipe_slm_policies(client) - - # Searchable snapshot indices start in 7.8+ - if es_version(client) >= (7, 8): - wipe_searchable_snapshot_indices(client) - - wipe_snapshots(client) if is_xpack: wipe_data_streams(client) wipe_indices(client) @@ -141,72 +80,13 @@ def wipe_cluster(client): client.indices.delete_index_template(name="*") client.cluster.delete_component_template(name="*") - wipe_cluster_settings(client) - if is_xpack: - wipe_ilm_policies(client) - wipe_auto_follow_patterns(client) - wipe_tasks(client) - wipe_node_shutdown_metadata(client) - wait_for_pending_datafeeds_and_jobs(client) - wipe_calendars(client) - wipe_filters(client) wipe_transforms(client) - wait_for_cluster_state_updates_to_finish(client) if close_after_wipe: client.close() -def wipe_cluster_settings(client): - settings = client.cluster.get_settings() - new_settings = {} - for name, value in settings.items(): - if value: - new_settings.setdefault(name, {}) - for key in value.keys(): - new_settings[name][key + ".*"] = None - if new_settings: - client.cluster.put_settings(body=new_settings) - - -def wipe_rollup_jobs(client): - rollup_jobs = client.rollup.get_jobs(id="_all").get("jobs", ()) - for job in rollup_jobs: - job_id = job["config"]["id"] - client.options(ignore_status=404).rollup.stop_job( - id=job_id, wait_for_completion=True - ) - client.options(ignore_status=404).rollup.delete_job(id=job_id) - - -def wipe_snapshots(client): - """Deletes all the snapshots and repositories from the cluster""" - in_progress_snapshots = [] - - repos = client.snapshot.get_repository(name="_all") - for repo_name, repo in repos.items(): - if repo_name in {"found-snapshots"}: - continue - - if repo["type"] == "fs": - snapshots = client.snapshot.get( - repository=repo_name, snapshot="_all", ignore_unavailable=True - ) - for snapshot in snapshots["snapshots"]: - if snapshot["state"] == "IN_PROGRESS": - in_progress_snapshots.append(snapshot) - else: - client.options(ignore_status=404).snapshot.delete( - repository=repo_name, - snapshot=snapshot["snapshot"], - ) - - client.options(ignore_status=404).snapshot.delete_repository(name=repo_name) - - assert in_progress_snapshots == [] - - def wipe_data_streams(client): try: client.indices.delete_data_stream(name="*", expand_wildcards="all") @@ -215,35 +95,16 @@ def wipe_data_streams(client): def wipe_indices(client): - client.options(ignore_status=404).indices.delete( - index="*,-.ds-ilm-history-*", - expand_wildcards="all", - ) - - -def wipe_searchable_snapshot_indices(client): - cluster_metadata = client.cluster.state( - metric="metadata", - filter_path="metadata.indices.*.settings.index.store.snapshot", - ) - if cluster_metadata: - for index in cluster_metadata["metadata"]["indices"].keys(): - client.indices.delete(index=index) + indices = client.cat.indices().strip().splitlines() + if len(indices) > 0: + index_names = [i.split(" ")[2] for i in indices] + client.options(ignore_status=404).indices.delete( + index=",".join(index_names), + expand_wildcards="all", + ) def wipe_xpack_templates(client): - templates = [ - x.strip() for x in client.cat.templates(h="name").split("\n") if x.strip() - ] - for template in templates: - if is_xpack_template(template): - continue - try: - client.indices.delete_template(name=template) - except NotFoundError as e: - if f"index_template [{template}] missing" in str(e): - client.indices.delete_index_template(name=template) - # Delete component templates, need to retry because sometimes # indices aren't cleaned up in time before we issue the delete. templates = client.cluster.get_component_template()["component_templates"] @@ -266,133 +127,6 @@ def wipe_xpack_templates(client): time.sleep(0.01) -def wipe_ilm_policies(client): - for policy in client.ilm.get_lifecycle(): - if ( - policy - not in { - "ilm-history-ilm-policy", - "slm-history-ilm-policy", - "watch-history-ilm-policy", - "watch-history-ilm-policy-16", - "ml-size-based-ilm-policy", - "logs", - "metrics", - "synthetics", - "7-days-default", - "30-days-default", - "90-days-default", - "180-days-default", - "365-days-default", - ".fleet-actions-results-ilm-policy", - ".deprecation-indexing-ilm-policy", - ".monitoring-8-ilm-policy", - } - and "-history-ilm-polcy" not in policy - ): - client.ilm.delete_lifecycle(name=policy) - - -def wipe_slm_policies(client): - policies = client.slm.get_lifecycle() - for policy in policies: - if policy not in {"cloud-snapshot-policy"}: - client.slm.delete_lifecycle(policy_id=policy) - - -def wipe_auto_follow_patterns(client): - for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: - client.ccr.delete_auto_follow_pattern(name=pattern["name"]) - - -def wipe_node_shutdown_metadata(client): - try: - shutdown_status = client.shutdown.get_node() - # If response contains these two keys the feature flag isn't enabled - # on this cluster so skip this step now. - if "_nodes" in shutdown_status and "cluster_name" in shutdown_status: - return - - for shutdown_node in shutdown_status.get("nodes", []): - node_id = shutdown_node["node_id"] - client.shutdown.delete_node(node_id=node_id) - - # Elastic Cloud doesn't allow this so we skip. - except AuthorizationException: - pass - - -def wipe_tasks(client): - tasks = client.tasks.list() - for node_name, node in tasks.get("node", {}).items(): - for task_id in node.get("tasks", ()): - client.tasks.cancel(task_id=task_id, wait_for_completion=True) - - -def wait_for_pending_tasks(client, filter, timeout=30): - end_time = time.time() + timeout - while time.time() < end_time: - tasks = client.cat.tasks(detailed=True).split("\n") - if not any(filter in task for task in tasks): - break - - -def wait_for_pending_datafeeds_and_jobs(client: Elasticsearch, timeout=30): - end_time = time.time() + timeout - while time.time() < end_time: - resp = client.ml.get_datafeeds(datafeed_id="*", allow_no_match=True) - if resp["count"] == 0: - break - for datafeed in resp["datafeeds"]: - client.options(ignore_status=404).ml.delete_datafeed( - datafeed_id=datafeed["datafeed_id"] - ) - - end_time = time.time() + timeout - while time.time() < end_time: - resp = client.ml.get_jobs(job_id="*", allow_no_match=True) - if resp["count"] == 0: - break - for job in resp["jobs"]: - client.options(ignore_status=404).ml.close_job(job_id=job["job_id"]) - client.options(ignore_status=404).ml.delete_job(job_id=job["job_id"]) - - end_time = time.time() + timeout - while time.time() < end_time: - resp = client.ml.get_data_frame_analytics(id="*") - if resp["count"] == 0: - break - for job in resp["data_frame_analytics"]: - client.options(ignore_status=404).ml.stop_data_frame_analytics(id=job["id"]) - client.options(ignore_status=404).ml.delete_data_frame_analytics( - id=job["id"] - ) - - -def wipe_filters(client: Elasticsearch, timeout=30): - end_time = time.time() + timeout - while time.time() < end_time: - resp = client.ml.get_filters(filter_id="*") - if resp["count"] == 0: - break - for filter in resp["filters"]: - client.options(ignore_status=404).ml.delete_filter( - filter_id=filter["filter_id"] - ) - - -def wipe_calendars(client: Elasticsearch, timeout=30): - end_time = time.time() + timeout - while time.time() < end_time: - resp = client.ml.get_calendars(calendar_id="*") - if resp["count"] == 0: - break - for calendar in resp["calendars"]: - client.options(ignore_status=404).ml.delete_calendar( - calendar_id=calendar["calendar_id"] - ) - - def wipe_transforms(client: Elasticsearch, timeout=30): end_time = time.time() + timeout while time.time() < end_time: @@ -408,13 +142,6 @@ def wipe_transforms(client: Elasticsearch, timeout=30): ) -def wait_for_cluster_state_updates_to_finish(client, timeout=30): - end_time = time.time() + timeout - while time.time() < end_time: - if not client.cluster.pending_tasks().get("tasks", ()): - break - - def is_xpack_template(name): if name.startswith(".monitoring-"): return True @@ -451,3 +178,13 @@ def is_xpack_template(name): }: return True return False + + +def es_api_key() -> str: + """ + Gets Elasticsearch API key ID and secret from environment variables, raises ValueError if none found + """ + api_key = os.environ.get("ES_API_KEY", None) + if api_key is None: + raise RuntimeError("No ES_API_KEY environment variable provided") + return api_key From 421e60b0c14b23921469672c4f66754a235ce478 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Fri, 8 Sep 2023 15:28:34 -0500 Subject: [PATCH 02/15] Fix outdated package name references --- elasticsearch_serverless/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/elasticsearch_serverless/client.py b/elasticsearch_serverless/client.py index a140c9b..5a90dda 100644 --- a/elasticsearch_serverless/client.py +++ b/elasticsearch_serverless/client.py @@ -65,8 +65,8 @@ # This file exists for backwards compatibility. warnings.warn( - "Importing from the 'elasticsearch.client' module is deprecated. " - "Instead use 'elasticsearch' module for importing the client.", + "Importing from the 'elasticsearch_serverless.client' module is deprecated. " + "Instead use 'elasticsearch_serverless' module for importing the client.", category=DeprecationWarning, stacklevel=2, ) From da6887e46f7dfa80102cc579e7aa3b1610fcbb4c Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Fri, 8 Sep 2023 15:42:53 -0500 Subject: [PATCH 03/15] Streamline Dockerfile used by CI --- .ci/Dockerfile | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/.ci/Dockerfile b/.ci/Dockerfile index 49dfd7a..4fef09d 100644 --- a/.ci/Dockerfile +++ b/.ci/Dockerfile @@ -1,23 +1,13 @@ -ARG PYTHON_VERSION=3.8 +ARG PYTHON_VERSION=3.9 FROM python:${PYTHON_VERSION} -# Default UID/GID to 1000 -# it can be overridden at build time -ARG BUILDER_UID=1000 -ARG BUILDER_GID=1000 -ENV BUILDER_USER elastic -ENV BUILDER_GROUP elastic - WORKDIR /code/elasticsearch-serverless-python +RUN mkdir -p /code/elasticsearch-serverless-python/build -# Create user -RUN groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP} \ - && useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GROUP} -d /var/lib/elastic -m elastic 1>/dev/null 2>/dev/null \ - && mkdir /code/elasticsearch-serverless-python/build \ - && chown -R ${BUILDER_USER}:${BUILDER_GROUP} /code/ -USER ${BUILDER_USER}:${BUILDER_GROUP} -COPY --chown=$BUILDER_USER:$BUILDER_GROUP . . +COPY pyproject.toml README.rst . RUN python -m pip install \ -U --no-cache-dir \ --disable-pip-version-check \ .[dev] + +COPY . . From 1c90489c04ee6ca58f7e652b55979103295fb97f Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 19 Sep 2023 13:01:23 -0500 Subject: [PATCH 04/15] Allow transport to assume port Since serverless projects provide users with an endpoint that does not include a port, we should assume HTTPS means port 443. --- elasticsearch_serverless/_sync/client/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elasticsearch_serverless/_sync/client/utils.py b/elasticsearch_serverless/_sync/client/utils.py index e3e11b1..f950045 100644 --- a/elasticsearch_serverless/_sync/client/utils.py +++ b/elasticsearch_serverless/_sync/client/utils.py @@ -123,7 +123,7 @@ def host_to_node_config(host: _TYPE_HOST) -> NodeConfig: if isinstance(host, NodeConfig): return host elif isinstance(host, str): - return url_to_node_config(host) + return url_to_node_config(host, use_default_ports_for_scheme=True) elif isinstance(host, Mapping): return host_mapping_to_node_config(host) else: From 8e642ab168bdbac2c945401d0e6b997b7aaa316a Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Wed, 20 Sep 2023 15:11:06 -0500 Subject: [PATCH 05/15] Fetch YAML tests from elastic/serverless-clients-tests Instead of using the entire set of YAML REST tests from the elasticsearch repo. --- .../test_async/test_server/test_helpers.py | 7 +- .../test_server/conftest.py | 6 +- .../test_server/test_helpers.py | 9 ++- .../test_server/test_rest_api_spec.py | 73 ++++--------------- 4 files changed, 29 insertions(+), 66 deletions(-) diff --git a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py index caa6660..2ed0466 100644 --- a/test_elasticsearch_serverless/test_async/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_async/test_server/test_helpers.py @@ -921,7 +921,12 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup): )["_source"] async def test_all_documents_get_moved(self, async_client, reindex_setup): - await helpers.async_reindex(async_client, "test_index", "prod_index", bulk_kwargs={"refresh": True}) + await helpers.async_reindex( + async_client, + "test_index", + "prod_index", + bulk_kwargs={"refresh": "wait_for"}, + ) assert await async_client.indices.exists(index="prod_index") assert ( diff --git a/test_elasticsearch_serverless/test_server/conftest.py b/test_elasticsearch_serverless/test_server/conftest.py index 0dee7f3..2f1ba07 100644 --- a/test_elasticsearch_serverless/test_server/conftest.py +++ b/test_elasticsearch_serverless/test_server/conftest.py @@ -36,15 +36,13 @@ def sync_client_factory(elasticsearch_url, elasticsearch_api_key): try: # Configure the client with API key and optionally # an HTTP conn class depending on 'PYTHON_CONNECTION_CLASS' envvar - kw = {'api_key': elasticsearch_api_key} + kw = {"api_key": elasticsearch_api_key} if "PYTHON_CONNECTION_CLASS" in os.environ: kw["node_class"] = os.environ["PYTHON_CONNECTION_CLASS"] # We do this little dance with the URL to force # Requests to respect 'headers: None' within rest API spec tests. - client = elasticsearch_serverless.Elasticsearch( - elasticsearch_url, **kw - ) + client = elasticsearch_serverless.Elasticsearch(elasticsearch_url, **kw) # Wipe the cluster before we start testing just in case it wasn't wiped # cleanly from the previous run of pytest? diff --git a/test_elasticsearch_serverless/test_server/test_helpers.py b/test_elasticsearch_serverless/test_server/test_helpers.py index 6939d8c..4bf1967 100644 --- a/test_elasticsearch_serverless/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_server/test_helpers.py @@ -66,7 +66,10 @@ def test_bulk_actions_remain_unchanged(sync_client): def test_bulk_all_documents_get_inserted(sync_client): docs = [{"answer": x, "_id": x} for x in range(100)] for ok, item in helpers.streaming_bulk( - sync_client, docs, index="test-index", refresh=True, + sync_client, + docs, + index="test-index", + refresh=True, ): assert ok @@ -873,7 +876,9 @@ def test_reindex_accepts_a_query(sync_client): @pytest.mark.usefixtures("reindex_setup") def test_all_documents_get_moved(sync_client): - helpers.reindex(sync_client, "test_index", "prod_index", bulk_kwargs={"refresh": True}) + helpers.reindex( + sync_client, "test_index", "prod_index", bulk_kwargs={"refresh": True} + ) assert sync_client.indices.exists(index="prod_index") assert 50 == sync_client.count(index="prod_index", q="type:questions")["count"] diff --git a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py index cfeb119..67769c3 100644 --- a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py +++ b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py @@ -42,7 +42,7 @@ from elasticsearch_serverless._sync.client.utils import _base64_auth_header from elasticsearch_serverless.compat import string_types -from ..utils import es_api_key, es_url, parse_version +from ..utils import es_api_key, es_url # some params had to be changed in python, keep track of them so we can rename # those in the tests accordingly @@ -135,7 +135,6 @@ XPACK_FEATURES = None -ES_VERSION = None RUN_ASYNC_REST_API_TESTS = ( sys.version_info >= (3, 8) and os.environ.get("PYTHON_CONNECTION_CLASS") == "requests" @@ -182,16 +181,6 @@ def teardown(self): self.section("teardown") self.run_code(self._teardown_code) - def es_version(self): - global ES_VERSION - if ES_VERSION is None: - version_string = (self.client.info())["version"]["number"] - if "." not in version_string: - return () - version = version_string.strip().split(".") - ES_VERSION = tuple(int(v) if v.isdigit() else 999 for v in version) - return ES_VERSION - def section(self, name): print(("=" * 10) + " " + name + " " + ("=" * 10)) @@ -340,16 +329,6 @@ def run_skip(self, skip): continue pytest.skip(f"feature '{feature}' is not supported") - if "version" in skip: - version, reason = skip["version"], skip["reason"] - if version == "all": - pytest.skip(reason) - min_version, _, max_version = version.partition("-") - min_version = parse_version(min_version.strip()) or (0,) - max_version = parse_version(max_version.strip()) or (999,) - if min_version <= (self.es_version()) <= max_version: - pytest.skip(reason) - def run_gt(self, action): for key, value in action.items(): value = self._resolve(value) @@ -553,55 +532,32 @@ def remove_implicit_resolver(cls, tag_to_remove): # Try loading the REST API test specs from the Elastic Artifacts API try: + github_token = os.environ.get("GITHUB_TOKEN") + if github_token is None: + raise RuntimeError("GITHUB_TOKEN environment variable is not set") + # Construct the HTTP and Elasticsearch client http = urllib3.PoolManager(retries=10) client = Elasticsearch(es_url(), api_key=es_api_key(), request_timeout=3) - # Make a request to Elasticsearch for the build hash, we'll be looking for - # an artifact with this same hash to download test specs for. - client_info = client.info() - version_number = client_info["version"]["number"] - build_hash = client_info["version"]["build_hash"] - - # Now talk to the artifacts API with the 'STACK_VERSION' environment variable - resp = http.request( - "GET", - f"https://artifacts-api.elastic.co/v1/versions/{version_number}", - ) - resp = json.loads(resp.data.decode("utf-8")) - - # Look through every build and see if one matches the commit hash - # we're looking for. If not it's okay, we'll just use the latest and - # hope for the best! - builds = resp["version"]["builds"] - for build in builds: - if build["projects"]["elasticsearch"]["commit_hash"] == build_hash: - break - else: - build = builds[0] # Use the latest - - # Now we're looking for the 'rest-api-spec--sources.jar' file - # to download and extract in-memory. - packages = build["projects"]["elasticsearch"]["packages"] - for package in packages: - if re.match(r"rest-resources-zip-.*\.zip", package): - package_url = packages[package]["url"] - break - else: - raise RuntimeError( - f"Could not find the package 'rest-resources-zip-*.zip' in build {build!r}" - ) + yaml_tests_url = "https://api.github.com/repos/elastic/serverless-clients-tests/zipball/572c72b5b32f145e62a966563688e46401d3d28a" # Download the zip and start reading YAML from the files in memory package_zip = zipfile.ZipFile( io.BytesIO( http.request( - "GET", package_url, headers={"content-type": "application/json"} + "GET", + yaml_tests_url, + headers={ + "Authorization": f"Bearer {github_token}", + "Accept": "application/vnd.github+json", + }, ).data ) ) + for yaml_file in package_zip.namelist(): - if not re.match(r"^rest-api-spec/test/.*\.ya?ml$", yaml_file): + if not re.match(r"^.*\/tests\/.*\.ya?ml$", yaml_file): continue yaml_tests = list( yaml.load_all(package_zip.read(yaml_file), Loader=NoDatesSafeLoader) @@ -660,7 +616,6 @@ def _pytest_param_sort_key(param: pytest.param) -> Tuple[Union[str, int], ...]: # Sort the tests by ID so they're grouped together nicely. YAML_TEST_SPECS = sorted(YAML_TEST_SPECS, key=_pytest_param_sort_key) - if not RUN_ASYNC_REST_API_TESTS: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) From d4da1b3628a9aab30ed198afab6abea96eea85e2 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 13:02:53 -0500 Subject: [PATCH 06/15] Drop unnecessary line break --- .buildkite/run-tests | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/run-tests b/.buildkite/run-tests index 3ef4af6..76f9b87 100755 --- a/.buildkite/run-tests +++ b/.buildkite/run-tests @@ -37,7 +37,7 @@ deployment=$(docker run --rm \ -v "$(pwd)/cloud.json:/root/.elastic/cloud.json" \ docker.elastic.co/employees/dolaru/qaf:latest \ bash -c 'qaf elastic-cloud projects create --project-type elasticsearch && \ - qaf elastic-cloud projects describe --as-json --show-credentials') \ + qaf elastic-cloud projects describe --as-json --show-credentials') ES_API_SECRET_KEY=$(echo "$deployment" | jq -r '.credentials.api_key') ELASTICSEARCH_URL=$(echo "$deployment" | jq -r '.elasticsearch.url') From 5afee8d0519464fa131393358d9ebae970403d42 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 13:15:06 -0500 Subject: [PATCH 07/15] Put back a misplaced `refresh` --- test_elasticsearch_serverless/test_server/test_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test_elasticsearch_serverless/test_server/test_helpers.py b/test_elasticsearch_serverless/test_server/test_helpers.py index 4bf1967..2f7c82e 100644 --- a/test_elasticsearch_serverless/test_server/test_helpers.py +++ b/test_elasticsearch_serverless/test_server/test_helpers.py @@ -694,7 +694,7 @@ def test_log_warning_on_shard_failures(sync_client): for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) - sync_client.bulk(operations=bulk) + sync_client.bulk(operations=bulk, refresh=True) with patch( "elasticsearch_serverless.helpers.actions.logger" From c0270d56186a54fa8c2e086da0d5060197c210c1 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 13:32:00 -0500 Subject: [PATCH 08/15] More linter surprises --- .../test_server/test_mapbox_vector_tile.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py b/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py index baf1411..5ed3594 100644 --- a/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py +++ b/test_elasticsearch_serverless/test_server/test_mapbox_vector_tile.py @@ -112,9 +112,7 @@ def test_mapbox_vector_tile_error(elasticsearch_url, mvt_setup, node_class): @pytest.mark.parametrize("node_class", ["urllib3", "requests"]) -def test_mapbox_vector_tile_response( - elasticsearch_url, mvt_setup, node_class -): +def test_mapbox_vector_tile_response(elasticsearch_url, mvt_setup, node_class): try: import mapbox_vector_tile except ImportError: From 1cfbbe59d28fa5be394ab95c1f1f8c58d07f7c17 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 13:34:54 -0500 Subject: [PATCH 09/15] Pull zipball from main Was using a specific commit, which has been merged to main. --- .../test_server/test_rest_api_spec.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py index 67769c3..ae94e26 100644 --- a/test_elasticsearch_serverless/test_server/test_rest_api_spec.py +++ b/test_elasticsearch_serverless/test_server/test_rest_api_spec.py @@ -540,7 +540,9 @@ def remove_implicit_resolver(cls, tag_to_remove): http = urllib3.PoolManager(retries=10) client = Elasticsearch(es_url(), api_key=es_api_key(), request_timeout=3) - yaml_tests_url = "https://api.github.com/repos/elastic/serverless-clients-tests/zipball/572c72b5b32f145e62a966563688e46401d3d28a" + yaml_tests_url = ( + "https://api.github.com/repos/elastic/serverless-clients-tests/zipball/main" + ) # Download the zip and start reading YAML from the files in memory package_zip = zipfile.ZipFile( From 1cfc4d47654befda9fb5f7ead6df61e032cffa36 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 14:41:26 -0500 Subject: [PATCH 10/15] Push more env vars to the test container --- .buildkite/run-tests | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.buildkite/run-tests b/.buildkite/run-tests index 76f9b87..68132cb 100755 --- a/.buildkite/run-tests +++ b/.buildkite/run-tests @@ -70,6 +70,9 @@ docker run \ -e STACK_VERSION \ -e TEST_SUITE \ -e GITHUB_TOKEN \ + -e AIOHTTP_NO_EXTENSIONS \ + -e FROZENLIST_NO_EXTENSIONS \ + -e YARL_NO_EXTENSIONS \ --name elasticsearch-serverless-python-tests \ --volume "$(pwd)/junit:/code/elasticsearch-serverless-python/junit" \ --rm \ From a07785e630adbb918bc1a7a3cc9cd897ccc625d1 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 15:08:32 -0500 Subject: [PATCH 11/15] Pull qaf runs out into a function --- .buildkite/run-tests | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/.buildkite/run-tests b/.buildkite/run-tests index 68132cb..e8706e5 100755 --- a/.buildkite/run-tests +++ b/.buildkite/run-tests @@ -11,9 +11,8 @@ set -euo pipefail CLOUD_ACCESS_KEY=$(vault read -field="$EC_ENV" secret/ci/elastic-elasticsearch-serverless-python/cloud-access) echo "{\"api_key\":{\"$EC_ENV\":\"$CLOUD_ACCESS_KEY\"}}" > "$(pwd)/cloud.json" -# ensure serverless instance is deleted even if script errors -cleanup() { - echo -e "--- :elasticsearch: Tear down serverless instance EC_PROJECT_NAME" +run_qaf() { + cmd=$1 docker run --rm \ -e EC_REGISTER_BACKEND \ -e EC_ENV \ @@ -22,22 +21,20 @@ cleanup() { -e VAULT_TOKEN \ -v "$(pwd)/cloud.json:/root/.elastic/cloud.json" \ docker.elastic.co/employees/dolaru/qaf:latest \ - bash -c 'qaf elastic-cloud projects delete' + bash -c "$cmd" +} + +# ensure serverless instance is deleted even if script errors +cleanup() { + echo -e "--- :elasticsearch: Tear down serverless instance EC_PROJECT_NAME" + run_qaf 'qaf elastic-cloud projects delete' } trap cleanup EXIT echo -e "--- :elasticsearch: Start serverless instance" -deployment=$(docker run --rm \ - -e EC_REGISTER_BACKEND \ - -e EC_ENV \ - -e EC_REGION \ - -e EC_PROJECT_NAME \ - -e VAULT_TOKEN \ - -v "$(pwd)/cloud.json:/root/.elastic/cloud.json" \ - docker.elastic.co/employees/dolaru/qaf:latest \ - bash -c 'qaf elastic-cloud projects create --project-type elasticsearch && \ - qaf elastic-cloud projects describe --as-json --show-credentials') +run_qaf "qaf elastic-cloud projects create --project-type elasticsearch" +deployment=$(run_qaf "qaf elastic-cloud projects describe --as-json --show-credentials") ES_API_SECRET_KEY=$(echo "$deployment" | jq -r '.credentials.api_key') ELASTICSEARCH_URL=$(echo "$deployment" | jq -r '.elasticsearch.url') From 63f1911c565c19adeccbfa363b99b1055beb63bd Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 15:08:46 -0500 Subject: [PATCH 12/15] Ensure each build's project has a unique name --- .buildkite/rest-tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/rest-tests.yaml b/.buildkite/rest-tests.yaml index 62bbf09..973e661 100644 --- a/.buildkite/rest-tests.yaml +++ b/.buildkite/rest-tests.yaml @@ -17,7 +17,7 @@ steps: EC_REGISTER_BACKEND: "appex-qa-team-cluster" EC_ENV: "qa" EC_REGION: "aws-eu-west-1" - EC_PROJECT_NAME: "esv-client-python-test-{{ matrix.python }}-{{ matrix.suite }}-{{ matrix.connection_class }}" + EC_PROJECT_NAME: "esv-client-python-test-{{ matrix.python }}-{{ matrix.suite }}-{{ matrix.connection_class }}-{{ key }}" matrix: setup: suite: From 049c0fd2d7f1a062584a82dfef9ca505d6b3a46c Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 15:36:04 -0500 Subject: [PATCH 13/15] Remove cloud creds as part of cleanup --- .buildkite/run-tests | 1 + 1 file changed, 1 insertion(+) diff --git a/.buildkite/run-tests b/.buildkite/run-tests index e8706e5..1b62555 100755 --- a/.buildkite/run-tests +++ b/.buildkite/run-tests @@ -28,6 +28,7 @@ run_qaf() { cleanup() { echo -e "--- :elasticsearch: Tear down serverless instance EC_PROJECT_NAME" run_qaf 'qaf elastic-cloud projects delete' + rm -rf cloud.json } trap cleanup EXIT From 7e9fdae2f8fa0f1f340b4467825fdaa5abd1e76b Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 15:36:19 -0500 Subject: [PATCH 14/15] Drop unused STACK_VERSION env var --- .buildkite/run-tests | 2 -- 1 file changed, 2 deletions(-) diff --git a/.buildkite/run-tests b/.buildkite/run-tests index 1b62555..37cf092 100755 --- a/.buildkite/run-tests +++ b/.buildkite/run-tests @@ -43,7 +43,6 @@ export ELASTICSEARCH_URL echo -e "--- :computer: Environment variables" echo -e "ELASTICSEARCH_URL $ELASTICSEARCH_URL" -echo -e "STACK_VERSION $STACK_VERSION" echo -e "TEST_SUITE $TEST_SUITE" echo -e "PYTHON_VERSION $PYTHON_VERSION" echo -e "PYTHON_CONNECTION_CLASS $PYTHON_CONNECTION_CLASS" @@ -65,7 +64,6 @@ docker run \ -e ELASTICSEARCH_URL \ -e "ES_API_KEY=$ES_API_SECRET_KEY" \ -e PYTHON_CONNECTION_CLASS \ - -e STACK_VERSION \ -e TEST_SUITE \ -e GITHUB_TOKEN \ -e AIOHTTP_NO_EXTENSIONS \ From 8c8721a74459c19f894e121c60f2cbd3484e94d9 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 21 Sep 2023 15:37:55 -0500 Subject: [PATCH 15/15] Use pwd for cloud cred removal --- .buildkite/run-tests | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/run-tests b/.buildkite/run-tests index 37cf092..cdfb422 100755 --- a/.buildkite/run-tests +++ b/.buildkite/run-tests @@ -28,7 +28,7 @@ run_qaf() { cleanup() { echo -e "--- :elasticsearch: Tear down serverless instance EC_PROJECT_NAME" run_qaf 'qaf elastic-cloud projects delete' - rm -rf cloud.json + rm -rf "$(pwd)/cloud.json" } trap cleanup EXIT