From 34bfcf10a3422dd67086c6930643229a266a865f Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 09:31:00 +0400 Subject: [PATCH 1/8] [7.17] Fix CI --- {.ci => .buildkite}/Dockerfile | 11 +- .buildkite/certs/ca.crt | 20 ++ .buildkite/certs/ca.pem | 20 ++ .buildkite/certs/testnode.crt | 20 ++ .buildkite/certs/testnode.key | 27 +++ {.ci => .buildkite}/functions/cleanup.sh | 0 {.ci => .buildkite}/functions/imports.sh | 0 .../functions/wait-for-container.sh | 0 .buildkite/pipeline.yml | 26 +++ .buildkite/pull-requests.json | 9 + {.ci => .buildkite}/run-elasticsearch.sh | 6 + .buildkite/run-nox.sh | 6 + {.ci => .buildkite}/run-repository.sh | 15 +- {.ci => .buildkite}/run-tests | 12 +- .ci/certs/ca.crt | 20 -- .ci/certs/ca.pem | 20 -- .ci/certs/testnode.crt | 21 -- .ci/certs/testnode.key | 27 --- .ci/make.sh | 179 ------------------ .ci/test-matrix.yml | 32 ---- .github/workflows/unified-release.yml | 26 --- 21 files changed, 149 insertions(+), 348 deletions(-) rename {.ci => .buildkite}/Dockerfile (67%) create mode 100755 .buildkite/certs/ca.crt create mode 100644 .buildkite/certs/ca.pem create mode 100755 .buildkite/certs/testnode.crt create mode 100755 .buildkite/certs/testnode.key rename {.ci => .buildkite}/functions/cleanup.sh (100%) rename {.ci => .buildkite}/functions/imports.sh (100%) rename {.ci => .buildkite}/functions/wait-for-container.sh (100%) create mode 100644 .buildkite/pipeline.yml create mode 100644 .buildkite/pull-requests.json rename {.ci => .buildkite}/run-elasticsearch.sh (96%) create mode 100755 .buildkite/run-nox.sh rename {.ci => .buildkite}/run-repository.sh (85%) rename {.ci => .buildkite}/run-tests (65%) delete mode 100755 .ci/certs/ca.crt delete mode 100644 .ci/certs/ca.pem delete mode 100755 .ci/certs/testnode.crt delete mode 100755 .ci/certs/testnode.key delete mode 100755 .ci/make.sh delete mode 100755 .ci/test-matrix.yml delete mode 100644 .github/workflows/unified-release.yml diff --git a/.ci/Dockerfile b/.buildkite/Dockerfile similarity index 67% rename from .ci/Dockerfile rename to .buildkite/Dockerfile index c677a2372..3c429a37b 100644 --- a/.ci/Dockerfile +++ b/.buildkite/Dockerfile @@ -7,19 +7,14 @@ ARG BUILDER_UID=1000 ARG BUILDER_GID=1000 ENV BUILDER_USER elastic ENV BUILDER_GROUP elastic +ENV PATH="${PATH}:/var/lib/elastic/.local/bin" # Create user RUN groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP} \ && useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GROUP} -d /var/lib/elastic -m elastic 1>/dev/null 2>/dev/null \ && mkdir -p /code/elasticsearch-py && mkdir /code/elasticsearch-py/build \ - && chown -R ${BUILDER_USER}:${BUILDER_GROUP} /code/elasticsearch-py -COPY --chown=$BUILDER_USER:$BUILDER_GROUP . . + && chown -R ${BUILDER_USER}:${BUILDER_GROUP} /code/ WORKDIR /code/elasticsearch-py USER ${BUILDER_USER}:${BUILDER_GROUP} -COPY dev-requirements.txt . -RUN python -m pip install \ - -U --no-cache-dir \ - --disable-pip-version-check \ - nox -rdev-requirements.txt +RUN python -m pip install --disable-pip-version-check nox COPY --chown=$BUILDER_USER:$BUILDER_GROUP . . -RUN python -m pip install -U -e . \ No newline at end of file diff --git a/.buildkite/certs/ca.crt b/.buildkite/certs/ca.crt new file mode 100755 index 000000000..5ed1c9853 --- /dev/null +++ b/.buildkite/certs/ca.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDSTCCAjGgAwIBAgIUHTeTPPuZIX3wdyudMsllXa9yZ1kwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMjMwODIxMTcyNTMyWhcNMjYwODIwMTcyNTMyWjA0MTIwMAYD +VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMZs7DXbV7ovFvQ/CiqvHHZ/ +40rLyAcBQMhVBke2VVCQk3hIOPpHYt3xZgb61Oyrf14lFxny483beXaUqGThZ67Y +RsxzSOS8NUi21OLZ3xaE+p+Yx9Xe6lTMQJM4RpD/A5V35uikji1K4+F0ooJghELq +Fndmark/7SQFh6Bg8/aaf6Hpyar3WOWdQjHXgszNAv1Ez7+pPlfnCS8XNjYB5Y2n +gAayb1krMRW/3E6hRVZAig3I2H8mezL5tF8iS5aJW1WLpw4oYnbH0DdS+gpCK1lT +8GZd8Dk0QbNGpXNTu67BravVhgEoprBVMz6G1C4MiuVcBy7gA671/f46S4Tgb10C +AwEAAaNTMFEwHQYDVR0OBBYEFHVhRrHXbd5QFEgk3RFn4Y4LYo9PMB8GA1UdIwQY +MBaAFHVhRrHXbd5QFEgk3RFn4Y4LYo9PMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI +hvcNAQELBQADggEBACoGVPqeYE3IPRdSAtihIq071BfGA8vgfJWv0qiI0T+gYetX +dnebmQc5EccdEIrxD0bLKPgzd5c3ILwQy5+uo03ua7PrplwPVdeNXnU1LodAQ0Zb +GmTixXqgj8AMcvRsA7qARjXvf6w3Yyb7GO3FXRIGtqk12Vb1qnJg894CSIWrHiw0 +hRO5b7eJyrOy2s6QA6FucM/scM1Z/8D9tHfgwmrKM875VGerJORwfHCaCvF1YvBj +cIpYNnw2vFzDvRevh63sSQbZ9q3nbtD27AZSN9LKEbipSEOoBZMKG2zgDTT/Olzx +EQJ2t+Z487UuFX6+WaLZMteL2F4eh9OFWIYM3EI= +-----END CERTIFICATE----- diff --git a/.buildkite/certs/ca.pem b/.buildkite/certs/ca.pem new file mode 100644 index 000000000..5ed1c9853 --- /dev/null +++ b/.buildkite/certs/ca.pem @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDSTCCAjGgAwIBAgIUHTeTPPuZIX3wdyudMsllXa9yZ1kwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMjMwODIxMTcyNTMyWhcNMjYwODIwMTcyNTMyWjA0MTIwMAYD +VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMZs7DXbV7ovFvQ/CiqvHHZ/ +40rLyAcBQMhVBke2VVCQk3hIOPpHYt3xZgb61Oyrf14lFxny483beXaUqGThZ67Y +RsxzSOS8NUi21OLZ3xaE+p+Yx9Xe6lTMQJM4RpD/A5V35uikji1K4+F0ooJghELq +Fndmark/7SQFh6Bg8/aaf6Hpyar3WOWdQjHXgszNAv1Ez7+pPlfnCS8XNjYB5Y2n +gAayb1krMRW/3E6hRVZAig3I2H8mezL5tF8iS5aJW1WLpw4oYnbH0DdS+gpCK1lT +8GZd8Dk0QbNGpXNTu67BravVhgEoprBVMz6G1C4MiuVcBy7gA671/f46S4Tgb10C +AwEAAaNTMFEwHQYDVR0OBBYEFHVhRrHXbd5QFEgk3RFn4Y4LYo9PMB8GA1UdIwQY +MBaAFHVhRrHXbd5QFEgk3RFn4Y4LYo9PMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI +hvcNAQELBQADggEBACoGVPqeYE3IPRdSAtihIq071BfGA8vgfJWv0qiI0T+gYetX +dnebmQc5EccdEIrxD0bLKPgzd5c3ILwQy5+uo03ua7PrplwPVdeNXnU1LodAQ0Zb +GmTixXqgj8AMcvRsA7qARjXvf6w3Yyb7GO3FXRIGtqk12Vb1qnJg894CSIWrHiw0 +hRO5b7eJyrOy2s6QA6FucM/scM1Z/8D9tHfgwmrKM875VGerJORwfHCaCvF1YvBj +cIpYNnw2vFzDvRevh63sSQbZ9q3nbtD27AZSN9LKEbipSEOoBZMKG2zgDTT/Olzx +EQJ2t+Z487UuFX6+WaLZMteL2F4eh9OFWIYM3EI= +-----END CERTIFICATE----- diff --git a/.buildkite/certs/testnode.crt b/.buildkite/certs/testnode.crt new file mode 100755 index 000000000..39eb092fa --- /dev/null +++ b/.buildkite/certs/testnode.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIVAKLWEcNzTd4B0NqnrJL0xAKaS8DWMA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTIzMDgyMTE3MjcwMloXDTI2MDgyMDE3MjcwMlowEzERMA8G +A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC8 +eLXL3ZX5v8JlHcfg+96Bpq24EeiqV+7RPPKbcH80ODjkETqYUpam+TcOl2gt23p/ +rpiPSSpOX8pFdmY78wTmxo2GCQZ/db2h0gZOOYpb8HQku+hJ4bAmtzizrqWW76Wz +csen3DSUkT0bKkJTjUMmwVhRaMpfv8EIcUbrHAwc3VCj7grnFL0kdAuQa6iyBH4I +lTUYOIOVyEJ8zZ7R4BJO3QU+TRuJ5+w/QiZMeDqxtrdDL37vYQHPW7L/XISCCOMp +sA3avzFphoQXBQ8mjdB8Txkd4sH7mJTqnRp5ILhRzVpcPPgQYFeIB567B+kFeSau +aJJmc0EVgOcK5aSMtOH3AgMBAAGjYjBgMB0GA1UdDgQWBBQsZbZDudZ63h52FlU5 +N2g3pznkETAfBgNVHSMEGDAWgBR1YUax123eUBRIJN0RZ+GOC2KPTzATBgNVHREE +DDAKgghpbnN0YW5jZTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBCwUAA4IBAQAyv0Cw +OrvZn7FHHS8TJI5vTi1F43R/eSNMNL/+q/nK93KaxWJH1T4zrJhrJ9KpzkFcalXP +bu02oTh28b3o3QpS2wdwMv/Q3NLoMBEmQlG2UrELFvV43nS8LCiwCX3o11L1HZP3 +1Z/rclwxbA4OQ/ZkPcol++TDZQTM/8WkIdZmTL4UDb/ppDjX24nTOitkMRZlYAOY +mid9GGExhKrUJ0I9/A3w1hWRA1Hwc+1TFDcPphl2x2uQ9HJFBueAvuFXmIjDki1x +qrvnFZ+mneI9kR4m82MX900WF15KS35GzmMui0tsf0wbfy3Jh+WnpMlIIa2OQXw7 +prbkg9tScQSsvhC8 +-----END CERTIFICATE----- diff --git a/.buildkite/certs/testnode.key b/.buildkite/certs/testnode.key new file mode 100755 index 000000000..b7458996a --- /dev/null +++ b/.buildkite/certs/testnode.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAvHi1y92V+b/CZR3H4PvegaatuBHoqlfu0Tzym3B/NDg45BE6 +mFKWpvk3DpdoLdt6f66Yj0kqTl/KRXZmO/ME5saNhgkGf3W9odIGTjmKW/B0JLvo +SeGwJrc4s66llu+ls3LHp9w0lJE9GypCU41DJsFYUWjKX7/BCHFG6xwMHN1Qo+4K +5xS9JHQLkGuosgR+CJU1GDiDlchCfM2e0eASTt0FPk0biefsP0ImTHg6sba3Qy9+ +72EBz1uy/1yEggjjKbAN2r8xaYaEFwUPJo3QfE8ZHeLB+5iU6p0aeSC4Uc1aXDz4 +EGBXiAeeuwfpBXkmrmiSZnNBFYDnCuWkjLTh9wIDAQABAoIBAAU0iEDTI9s78pB8 +XBLYofKOuemFhRl/SDc7KbAlUT4N93RFDYs7bLG73Eto3xW1JBL2rXv3l1WGy71T +YctyEMaW4T28bhODGvOnK0lpyWp0n6CMGARCWW0YTlaYEjay866bEuyN5l3cDQX9 +Csvn8NzXJitJa51tXFVxW3YO1j7Nyc/M59oyBZ1ARYYmQqFYLEu6lvJOW0cKDFkZ +AcMVlOIxZQL/Mf+RO72aQGVuYNjqxlLIXLuE9zFR2gDFM2+l3FMUWDGHGBDFyjKU +iMk4+sSlOTFXqO9VQzua6FLFMsQT6m5PFD4uPY92KR6CPfH/NrWqwqr+jpjaU+gs +3U9GN+ECgYEA58qX7tKPk7CWdk3kyk5NsNcs/qib+heXWEubfhoU8LmSnbBQhOAz +wi//r/xm0OHGj84y66+G3T347iudrLjhr07oGM1QfjYT3kb90efLjwAfCECtyVYL +EQrWO5UeoTnmrhlB1mGL3sWaVAsVqNLz8i2H5c7sj0hxHsvM62159r8CgYEA0Cff +opJqmUpMpHm3sgjMWctylVrHBuQe5cl5Ad80pbd6mvtt4TvGXbUGNdzURfyve9DS +x1CVlj4Sz8VuelFQgYL+7/qUqZoms1aSgJpxWv8ou+wUHmlF3kVO8VKt3BNHV+8J +euSB6NG91BGguBoHgnOoVcjbDGdhJGRTojCNWskCgYEA1jE3nwDCnrbTA3XNk0ky +r9TXhmgm4r+EIpqTkL7nVOAXZVJ1xaQtECgsveKe3C2WwHLKSVMFbFMFQonZha+/ +FbHz9l9cH5U3XPL7QEpTp8xz4LtsHJ4/UbtS5vJQwKnxyjYaydGQYAb4KuunUz/F +H6kFaM6DeZB2v/+SWIfs6Z8CgYARUdAEyeP+vzTjVpFXSe4e5pOxI619wEtl2T6t +TjImO78C2DrLS9r0fxR2NNqgvCapybVQCj94EdAk44uOt+dX71thAluORRpFP8XO +14rpBGQSRtFhumaq2N95quR2dFAyW9xREmRQx+rgk1rpFplbXF48TQsU3CE0Evj2 +fM22KQKBgDhob7M9sWvXecxoyy3J17jUTcFqmqKcqGnx3ZJ7Q9CgAfjYqNNQws27 +wTuaJB0PEuCOu4t+lUHEBMIjGkBfo1bHd4EZaW04Xgbfn2j8MK2e+9GlRtedxxFg +c1JdRb5+eTgPwLcDsmMWIW357PDW7RDEI07G1ZB4SqxGTKkU7JOW +-----END RSA PRIVATE KEY----- diff --git a/.ci/functions/cleanup.sh b/.buildkite/functions/cleanup.sh similarity index 100% rename from .ci/functions/cleanup.sh rename to .buildkite/functions/cleanup.sh diff --git a/.ci/functions/imports.sh b/.buildkite/functions/imports.sh similarity index 100% rename from .ci/functions/imports.sh rename to .buildkite/functions/imports.sh diff --git a/.ci/functions/wait-for-container.sh b/.buildkite/functions/wait-for-container.sh similarity index 100% rename from .ci/functions/wait-for-container.sh rename to .buildkite/functions/wait-for-container.sh diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml new file mode 100644 index 000000000..eac73135f --- /dev/null +++ b/.buildkite/pipeline.yml @@ -0,0 +1,26 @@ +steps: + - label: ":elasticsearch: ES {{matrix.stack_version }} :python: Python {{ matrix.python }} ({{ matrix.connection_class }})" + agents: + provider: "gcp" + env: + PYTHON_VERSION: "{{ matrix.python }}" + TEST_SUITE: "platinum" + STACK_VERSION: "{{ matrix.stack_version }}" + PYTHON_CONNECTION_CLASS: "{{ matrix.connection_class }}" + matrix: + setup: + python: + - "2.7" + - "3.6" + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + connection_class: + - "Urllib3HttpConnection" + - "RequestsHttpConnection" + stack_version: + - "7.17.0-SNAPSHOT" + - "8.0.0-SNAPSHOT" + command: ./.buildkite/run-tests diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json new file mode 100644 index 000000000..8eae593f0 --- /dev/null +++ b/.buildkite/pull-requests.json @@ -0,0 +1,9 @@ +{ + "jobs": [ + { + "enabled": true, + "pipeline_slug": "elasticsearch-py-integration-tests", + "allow_org_users": true + } + ] +} diff --git a/.ci/run-elasticsearch.sh b/.buildkite/run-elasticsearch.sh similarity index 96% rename from .ci/run-elasticsearch.sh rename to .buildkite/run-elasticsearch.sh index 05046db9c..2f73ea8d1 100755 --- a/.ci/run-elasticsearch.sh +++ b/.buildkite/run-elasticsearch.sh @@ -31,6 +31,12 @@ cleanup_node $es_node_name master_node_name=${es_node_name} cluster_name=${moniker}${suffix} +BUILDKITE=${BUILDKITE-false} +# Set vm.max_map_count kernel setting to 262144 if we're in CI +if [[ "$BUILDKITE" == "true" ]]; then + sudo sysctl -w vm.max_map_count=262144 +fi + declare -a volumes environment=($(cat <<-END --env ELASTIC_PASSWORD=$elastic_password diff --git a/.buildkite/run-nox.sh b/.buildkite/run-nox.sh new file mode 100755 index 000000000..ab8f4be01 --- /dev/null +++ b/.buildkite/run-nox.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +if [[ -z "$NOX_SESSION" ]]; then + NOX_SESSION=test-${PYTHON_VERSION%-dev} +fi +nox -s $NOX_SESSION diff --git a/.ci/run-repository.sh b/.buildkite/run-repository.sh similarity index 85% rename from .ci/run-repository.sh rename to .buildkite/run-repository.sh index 014cb5ede..466b906cc 100755 --- a/.ci/run-repository.sh +++ b/.buildkite/run-repository.sh @@ -24,28 +24,25 @@ echo -e "\033[34;1mINFO:\033[0m PYTHON_CONNECTION_CLASS ${PYTHON_CONNECTION_CLAS echo -e "\033[1m>>>>> Build [elastic/elasticsearch-py container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" docker build \ - --file .ci/Dockerfile \ + --file .buildkite/Dockerfile \ --tag elastic/elasticsearch-py \ - --build-arg PYTHON_VERSION=${PYTHON_VERSION} \ + --build-arg "PYTHON_VERSION=${PYTHON_VERSION}" \ + --build-arg "BUILDER_UID=$(id -u)" \ + --build-arg "BUILDER_GID=$(id -g)" \ . echo -e "\033[1m>>>>> Run [elastic/elasticsearch-py container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" -if [[ "$STACK_VERSION" == "8.0.0-SNAPSHOT" ]]; then - export ELASTIC_CLIENT_APIVERSIONING="true" -fi - mkdir -p junit docker run \ - -u "$(id -u)" \ + -u "$(id -u):$(id -g)" \ --network=${network_name} \ --env "STACK_VERSION=${STACK_VERSION}" \ --env "ELASTICSEARCH_URL=${elasticsearch_url}" \ --env "TEST_SUITE=${TEST_SUITE}" \ --env "PYTHON_CONNECTION_CLASS=${PYTHON_CONNECTION_CLASS}" \ --env "TEST_TYPE=server" \ - --env "ELASTIC_CLIENT_APIVERSIONING=${ELASTIC_CLIENT_APIVERSIONING:-false}" \ --name elasticsearch-py \ --rm \ elastic/elasticsearch-py \ - python setup.py test + nox -s test-${PYTHON_VERSION} diff --git a/.ci/run-tests b/.buildkite/run-tests similarity index 65% rename from .ci/run-tests rename to .buildkite/run-tests index e0acf5d9a..7e7339fb6 100755 --- a/.ci/run-tests +++ b/.buildkite/run-tests @@ -8,23 +8,23 @@ export STACK_VERSION="${STACK_VERSION:=8.0.0-SNAPSHOT}" export TEST_SUITE="${TEST_SUITE:=platinum}" export PYTHON_VERSION="${PYTHON_VERSION:=3.9}" -export PYTHON_CONNECTION_CLASS="${PYTHON_CONNECTION_CLASS:=Urllib3HttpConnection}" +export PYTHON_CONNECTION_CLASS="${PYTHON_CONNECTION_CLASS:=urllib3}" script_path=$(dirname $(realpath -s $0)) source $script_path/functions/imports.sh set -euo pipefail -echo -e "\033[1m>>>>> Start [$STACK_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" -DETACH=true bash .ci/run-elasticsearch.sh +echo "--- :elasticsearch: Starting Elasticsearch" +DETACH=true bash $script_path/run-elasticsearch.sh if [[ -n "$RUNSCRIPTS" ]]; then for RUNSCRIPT in ${RUNSCRIPTS//,/ } ; do echo -e "\033[1m>>>>> Running run-$RUNSCRIPT.sh >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" CONTAINER_NAME=${RUNSCRIPT} \ DETACH=true \ - bash .ci/run-${RUNSCRIPT}.sh + bash $script_path/run-${RUNSCRIPT}.sh done fi -echo -e "\033[1m>>>>> Repository specific tests >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" -bash .ci/run-repository.sh +echo "+++ :python: Client tests" +bash $script_path/run-repository.sh diff --git a/.ci/certs/ca.crt b/.ci/certs/ca.crt deleted file mode 100755 index 71f9bfc81..000000000 --- a/.ci/certs/ca.crt +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIVAJQLm8V2LcaCTHUcoIfO+KL63nG3MA0GCSqGSIb3DQEB -CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu -ZXJhdGVkIENBMB4XDTIwMDIyNjA1NTA1N1oXDTIzMDIyNTA1NTA1N1owNDEyMDAG -A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDYyajkPvGtUOE5M1OowQfB -kWVrWjo1+LIxzgCeRHp0YztLtdVJ0sk2xoSrt2uZpxcPepdyOseLTjFJex1D2yCR -AEniIqcFif4G72nDih2LlbhpUe/+/MTryj8ZTkFTzI+eMmbQi5FFMaH+kwufmdt/ -5/w8YazO18SxxJUlzMqzfNUrhM8vvvVdxgboU7PWhk28wZHCMHQovomHmzclhRpF -N0FMktA98vHHeRjH19P7rNhifSd7hZzoH3H148HVAKoPgqnZ6vW2O2YfAWOP6ulq -cyszr57p8fS9B2wSdlWW7nVHU1JuKcYD67CxbBS23BeGFgCj4tiNrmxO8S5Yf85v -AgMBAAGjUzBRMB0GA1UdDgQWBBSWAlip9eoPmnG4p4OFZeOUBlAbNDAfBgNVHSME -GDAWgBSWAlip9eoPmnG4p4OFZeOUBlAbNDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG -SIb3DQEBCwUAA4IBAQA19qqrMTWl7YyId+LR/QIHDrP4jfxmrEELrAL58q5Epc1k -XxZLzOBSXoBfBrPdv+3XklWqXrZjKWfdkux0Xmjnl4qul+srrZDLJVZG3I7IrITh -AmQUmL9MuPiMnAcxoGZp1xpijtW8Qmd2qnambbljWfkuVaa4hcVRfrAX6TciIQ21 -bS5aeLGrPqR14h30YzDp0RMmTujEa1o6ExN0+RSTkE9m89Q6WdM69az8JW7YkWqm -I+UCG3TcLd3TXmN1zNQkq4y2ObDK4Sxy/2p6yFPI1Fds5w/zLfBOvvPQY61vEqs8 -SCCcQIe7f6NDpIRIBlty1C9IaEHj7edyHjF6rtYb ------END CERTIFICATE----- diff --git a/.ci/certs/ca.pem b/.ci/certs/ca.pem deleted file mode 100644 index 71f9bfc81..000000000 --- a/.ci/certs/ca.pem +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIVAJQLm8V2LcaCTHUcoIfO+KL63nG3MA0GCSqGSIb3DQEB -CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu -ZXJhdGVkIENBMB4XDTIwMDIyNjA1NTA1N1oXDTIzMDIyNTA1NTA1N1owNDEyMDAG -A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDYyajkPvGtUOE5M1OowQfB -kWVrWjo1+LIxzgCeRHp0YztLtdVJ0sk2xoSrt2uZpxcPepdyOseLTjFJex1D2yCR -AEniIqcFif4G72nDih2LlbhpUe/+/MTryj8ZTkFTzI+eMmbQi5FFMaH+kwufmdt/ -5/w8YazO18SxxJUlzMqzfNUrhM8vvvVdxgboU7PWhk28wZHCMHQovomHmzclhRpF -N0FMktA98vHHeRjH19P7rNhifSd7hZzoH3H148HVAKoPgqnZ6vW2O2YfAWOP6ulq -cyszr57p8fS9B2wSdlWW7nVHU1JuKcYD67CxbBS23BeGFgCj4tiNrmxO8S5Yf85v -AgMBAAGjUzBRMB0GA1UdDgQWBBSWAlip9eoPmnG4p4OFZeOUBlAbNDAfBgNVHSME -GDAWgBSWAlip9eoPmnG4p4OFZeOUBlAbNDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG -SIb3DQEBCwUAA4IBAQA19qqrMTWl7YyId+LR/QIHDrP4jfxmrEELrAL58q5Epc1k -XxZLzOBSXoBfBrPdv+3XklWqXrZjKWfdkux0Xmjnl4qul+srrZDLJVZG3I7IrITh -AmQUmL9MuPiMnAcxoGZp1xpijtW8Qmd2qnambbljWfkuVaa4hcVRfrAX6TciIQ21 -bS5aeLGrPqR14h30YzDp0RMmTujEa1o6ExN0+RSTkE9m89Q6WdM69az8JW7YkWqm -I+UCG3TcLd3TXmN1zNQkq4y2ObDK4Sxy/2p6yFPI1Fds5w/zLfBOvvPQY61vEqs8 -SCCcQIe7f6NDpIRIBlty1C9IaEHj7edyHjF6rtYb ------END CERTIFICATE----- diff --git a/.ci/certs/testnode.crt b/.ci/certs/testnode.crt deleted file mode 100755 index 41e608fd5..000000000 --- a/.ci/certs/testnode.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDYjCCAkqgAwIBAgIVAIZQH0fe5U+bGQ6m1JUBO/AQkQ/9MA0GCSqGSIb3DQEB -CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu -ZXJhdGVkIENBMB4XDTIwMDMyNzE5MTcxMVoXDTIzMDMyNzE5MTcxMVowEzERMA8G -A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDB -fco1t1+sE1gTwTVGcXKZqJTP2GjMHM0cfJE5KKfwC5B+pHADRT6FZxvepgKjEBDt -CK+2Rmotyeb15XXMSKguNhyT+2PuKvT5r05L7P91XRYXrwxG2swJPtct7A87xdFa -Ek+YRpqGGmTaux2jOELMiAmqEzoj6w/xFq+LF4SolTW4wOL2eLFkEFHBX2oCwU5T -Q+B+7E9zL45nFWlkeRGJ+ZQTnRNZ/1r4N9A9Gtj4x/H1/y4inWndikdxAb5QiEYJ -T+vbQWzHYWjz13ttHJsz+6T8rvA1jK+buHgVh4K8lV13X9k54soBqHB8va7/KIJP -g8gvd6vusEI7Bmfl1as7AgMBAAGjgYswgYgwHQYDVR0OBBYEFKnnpvuVYwtFSUis -WwN9OHLyExzJMB8GA1UdIwQYMBaAFJYCWKn16g+acbing4Vl45QGUBs0MDsGA1Ud -EQQ0MDKCCWxvY2FsaG9zdIIIaW5zdGFuY2WHBH8AAAGHEAAAAAAAAAAAAAAAAAAA -AAGCA2VzMTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBCwUAA4IBAQAPNsIoD4GBrTgR -jfvBuHS6eU16P95m16O8Mdpr4SMQgWLQUhs8aoVgfwpg2TkbCWxOe6khJOyNm7bf -fW4aFQ/OHcQV4Czz3c7eOHTWSyMlCOv+nRXd4giJZ5TOHw1zKGmKXOIvhvE6RfdF -uBBfrusk164H4iykm0Bbr/wo4d6wuebp3ZYLPw5zV0D08rsaR+3VJ9VxWuFpdm/r -2onYOohyuX9DRjAczasC+CRRQN4eHJlRfSQB8WfTKw3EloRJJDAg6SJyGiAJ++BF -hnqfNcEyKes2AWagFF9aTbEJMrzMhH+YB5F+S/PWvMUlFzcoocVKqc4pIrjKUNWO -6nbTxeAB ------END CERTIFICATE----- diff --git a/.ci/certs/testnode.key b/.ci/certs/testnode.key deleted file mode 100755 index 58227517f..000000000 --- a/.ci/certs/testnode.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAwX3KNbdfrBNYE8E1RnFymaiUz9hozBzNHHyROSin8AuQfqRw -A0U+hWcb3qYCoxAQ7QivtkZqLcnm9eV1zEioLjYck/tj7ir0+a9OS+z/dV0WF68M -RtrMCT7XLewPO8XRWhJPmEaahhpk2rsdozhCzIgJqhM6I+sP8RavixeEqJU1uMDi -9nixZBBRwV9qAsFOU0PgfuxPcy+OZxVpZHkRifmUE50TWf9a+DfQPRrY+Mfx9f8u -Ip1p3YpHcQG+UIhGCU/r20Fsx2Fo89d7bRybM/uk/K7wNYyvm7h4FYeCvJVdd1/Z -OeLKAahwfL2u/yiCT4PIL3er7rBCOwZn5dWrOwIDAQABAoIBAFcm4ICnculf4Sks -umFbUiISA81GjZV6V4zAMu1K+bGuk8vnJyjh9JJD6hK0NbXa07TgV7zDJKoxKd2S -GCgGhfIin2asMcuh/6vDIYIjYsErR3stdlsnzAVSD7v4ergSlwR6AO32xz0mAE1h -QK029yeHEstPU72/7/NIo5MD6dXAbut1MzgijZD8RQo1z21D6qmLcPTVTfkn7a3W -MY3y7XUIkA1TOyIRsH3k6F6NBWkvtXbwOUeLCJ14EvS8T9BqhIhPDZv8mQTRLDOD -tQRyC4Cnw+UhYmnMFJhj6N2jpTBv/AdoKcRC56uBJyPW+dxj6i4e7n3pQuxqRvpI -LLJJsskCgYEA4QQxzuJizLKV75rE+Qxg0Ej0Gid1aj3H5eeTZOUhm9KC8KDfPdpk -msKaNzJq/VDcqHPluGS1jYZVgZlal1nk5xKBcbQ4n297VPVd+sLtlf0bj4atlDUO -+iOVo0H7k5yWvj+TzVRlc5zjDLcnQh8i+22o3+65hIrb2zpzg/cCZJ8CgYEA3CJX -bjmWPQ0uZVIa8Wz8cJFtKT9uVl7Z3/f6HjN9I0b/9MmVlNxQVAilVwhDkzR/UawG -QeRFBJ6XWRwX0aoMq+O9VSNu/R2rtEMpIYt3LwbI3yw6GRoCdB5qeL820O+KX5Fl -/z+ZNgrHgA1yKPVf+8ke2ZtLEqPHMN+BMuq8t+UCgYEAy0MfvzQPbbuw55WWcyb0 -WZJdNzcHwKX4ajzrj4vP9VOPRtD7eINMt+QsrMnVjei6u0yeahhHTIXZvc2K4Qeq -V/YGinDzaUqqTU+synXFauUOPXO6XxQi6GC2rphPKsOcBFWoLSYc0vgYvgbA5uD7 -l8Yyc77RROKuwfWmHcJHHh8CgYBurGFSjGdJWHgr/oSHPqkIG0VLiJV7nQJjBPRd -/Lr8YnTK6BJpHf7Q0Ov3frMirjEYqakXtaExel5TMbmT8q+eN8h3pnHlleY+oclr -EQghv4J8GWs4NYhoQuZ6wH/ZuaTS+XHTS3FG51J3wcrUZtET8ICvHNE4lNjPbH8z -TysENQKBgHER1RtDFdz+O7mlWibrHk8JDgcVdZV/pBF+9cb7r/orkH9RLAHDlsAO -tuSVaQmm5eqgaAxMamBXSyw1lir07byemyuEDg0mJ1rNUGsAY8P+LWr579gvKMme -5gvrJr99JkBTV3z+TiL7dZa52eW00Ijqg2qcbHGpq3kXWWkbd8Tn ------END RSA PRIVATE KEY----- diff --git a/.ci/make.sh b/.ci/make.sh deleted file mode 100755 index 503cf5a27..000000000 --- a/.ci/make.sh +++ /dev/null @@ -1,179 +0,0 @@ -#!/usr/bin/env bash - -# ------------------------------------------------------- # -# -# Skeleton for common build entry script for all elastic -# clients. Needs to be adapted to individual client usage. -# -# Must be called: ./.ci/make.sh -# -# Version: 1.1.0 -# -# Targets: -# --------------------------- -# assemble : build client artefacts with version -# bump : bump client internals to version -# codegen : generate endpoints -# docsgen : generate documentation -# examplegen : generate the doc examples -# clean : clean workspace -# -# ------------------------------------------------------- # - -# ------------------------------------------------------- # -# Bootstrap -# ------------------------------------------------------- # - -script_path=$(dirname "$(realpath -s "$0")") -repo=$(realpath "$script_path/../") - -# shellcheck disable=SC1090 -CMD=$1 -TASK=$1 -TASK_ARGS=() -VERSION=$2 -STACK_VERSION=$VERSION -set -euo pipefail - -product="elastic/elasticsearch-py" -output_folder=".ci/output" -codegen_folder=".ci/output" -OUTPUT_DIR="$repo/${output_folder}" -REPO_BINDING="${OUTPUT_DIR}:/sln/${output_folder}" -WORKFLOW="${WORKFLOW-staging}" -mkdir -p "$OUTPUT_DIR" - -echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m" -echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m" -echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m" - -# ------------------------------------------------------- # -# Parse Command -# ------------------------------------------------------- # - -case $CMD in - clean) - echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m" - rm -rf "$output_folder" - echo -e "\033[32;1mdone.\033[0m" - exit 0 - ;; - assemble) - if [ -v $VERSION ]; then - echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m" - exit 1 - fi - echo -e "\033[36;1mTARGET: assemble artefact $VERSION\033[0m" - TASK=release - TASK_ARGS=("$VERSION" "$output_folder") - ;; - codegen) - if [ -v $VERSION ]; then - echo -e "\033[31;1mTARGET: codegen -> missing version parameter\033[0m" - exit 1 - fi - echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m" - TASK=codegen - # VERSION is BRANCH here for now - TASK_ARGS=("$VERSION" "$codegen_folder") - ;; - docsgen) - if [ -v $VERSION ]; then - echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m" - exit 1 - fi - echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m" - TASK=codegen - # VERSION is BRANCH here for now - TASK_ARGS=("$VERSION" "$codegen_folder") - ;; - examplesgen) - echo -e "\033[36;1mTARGET: generate examples\033[0m" - TASK=codegen - # VERSION is BRANCH here for now - TASK_ARGS=("$VERSION" "$codegen_folder") - ;; - bump) - if [ -v $VERSION ]; then - echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m" - exit 1 - fi - echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m" - TASK=bump - # VERSION is BRANCH here for now - TASK_ARGS=("$VERSION") - ;; - *) - echo -e "\nUsage:\n\t $CMD is not supported right now\n" - exit 1 -esac - - -# ------------------------------------------------------- # -# Build Container -# ------------------------------------------------------- # - -echo -e "\033[34;1mINFO: building $product container\033[0m" - -docker build \ - --build-arg BUILDER_UID="$(id -u)" \ - --file $repo/.ci/Dockerfile \ - --tag ${product} \ - . - -# ------------------------------------------------------- # -# Run the Container -# ------------------------------------------------------- # - -echo -e "\033[34;1mINFO: running $product container\033[0m" - -if [[ "$CMD" == "assemble" ]]; then - - # Build dists into .ci/output - docker run \ - -u "$(id -u)" \ - --rm -v $repo/.ci/output:/code/elasticsearch-py/dist \ - $product \ - /bin/bash -c "python /code/elasticsearch-py/utils/build-dists.py $VERSION" - - # Verify that there are dists in .ci/output - if compgen -G ".ci/output/*" > /dev/null; then - - # Tarball everything up in .ci/output - if [[ "$WORKFLOW" == 'snapshot' ]]; then - cd $repo/.ci/output && tar -czvf elasticsearch-py-$VERSION-SNAPSHOT.tar.gz * && cd - - else - cd $repo/.ci/output && tar -czvf elasticsearch-py-$VERSION.tar.gz * && cd - - fi - - echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m" - exit 0 - else - echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m" - exit 1 - fi -fi - -if [[ "$CMD" == "bump" ]]; then - docker run \ - --rm -v $repo:/code/elasticsearch-py \ - $product \ - /bin/bash -c "python /code/elasticsearch-py/utils/bump-version.py $VERSION" - - exit 0 -fi - -if [[ "$CMD" == "codegen" ]]; then - echo "TODO" -fi - -if [[ "$CMD" == "docsgen" ]]; then - echo "TODO" -fi - -if [[ "$CMD" == "examplesgen" ]]; then - echo "TODO" -fi - -echo "Must be called with '.ci/make.sh [command]" -exit 1 diff --git a/.ci/test-matrix.yml b/.ci/test-matrix.yml deleted file mode 100755 index 523abe712..000000000 --- a/.ci/test-matrix.yml +++ /dev/null @@ -1,32 +0,0 @@ -STACK_VERSION: - - "7.17.0-SNAPSHOT" - - "8.0.0-SNAPSHOT" - -TEST_SUITE: - - platinum - -PYTHON_VERSION: - - "2.7" - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.8" - - "3.9" - -PYTHON_CONNECTION_CLASS: - - Urllib3HttpConnection - - RequestsHttpConnection - -exclude: - # TODO: Remove for 7.16 branch - - STACK_VERSION: "8.0.0-SNAPSHOT" - PYTHON_VERSION: "3.4" - - STACK_VERSION: "8.0.0-SNAPSHOT" - PYTHON_VERSION: "3.5" - - STACK_VERSION: "8.0.0-SNAPSHOT" - PYTHON_VERSION: "3.6" - - STACK_VERSION: "8.0.0-SNAPSHOT" - PYTHON_VERSION: "3.7" - - STACK_VERSION: "8.0.0-SNAPSHOT" - PYTHON_VERSION: "3.8" diff --git a/.github/workflows/unified-release.yml b/.github/workflows/unified-release.yml deleted file mode 100644 index 7f60cac47..000000000 --- a/.github/workflows/unified-release.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Unified Release - -on: - pull_request: - paths-ignore: - - 'README.md' - push: - paths-ignore: - - 'README.md' - branches: - - main - - master - - '[0-9]+.[0-9]+' - - '[0-9]+.x' - -jobs: - assemble: - name: Assemble - runs-on: ubuntu-latest - env: - STACK_VERSION: "7.17-SNAPSHOT" - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: "Assemble ${{ env.STACK_VERSION }}" - run: "./.ci/make.sh assemble ${{ env.STACK_VERSION }}" From b58e09f9fe3bbd363a128d42dec75985baf02cd2 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 09:35:03 +0400 Subject: [PATCH 2/8] Fix Read the Docs build --- .github/workflows/ci.yml | 15 --------------- .readthedocs.yml | 15 ++++++++++----- 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2bab0c9eb..e633247a5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,21 +19,6 @@ jobs: - name: Lint the code run: nox -s lint - docs: - runs-on: ubuntu-latest - steps: - - name: Checkout Repository - uses: actions/checkout@v2 - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - name: Install dependencies - run: | - python3.7 -m pip install nox - - name: Build the docs - run: nox -s docs - test-linux: strategy: fail-fast: false diff --git a/.readthedocs.yml b/.readthedocs.yml index e09a45ed3..f98f5b245 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,10 +1,15 @@ version: 2 -sphinx: - configuration: docs/sphinx/conf.py + +build: + os: ubuntu-22.04 + tools: + # To work around https://github.com/aio-libs/aiohttp/issues/7675, we need + # to set AIOHTTP_NO_EXTENSIONS to 1 but it has to be done in + # https://readthedocs.org/dashboard/elasticsearch-py/environmentvariables/ + # because of https://github.com/readthedocs/readthedocs.org/issues/6311 + python: "3" python: - version: 3.7 install: - - method: pip - path: . - requirements: dev-requirements.txt + - path: . From 8aa24087688f7f47f74ca97e8b88d95637589de7 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 10:16:01 +0400 Subject: [PATCH 3/8] Support Python 3.6 to 3.11 --- .buildkite/pipeline.yml | 1 - .github/workflows/ci.yml | 19 ++----- dev-requirements.txt | 24 +++----- elasticsearch/__init__.py | 14 ----- elasticsearch/client/utils.py | 6 +- elasticsearch/compat.py | 60 +++++--------------- elasticsearch/compat.pyi | 1 - elasticsearch/connection/base.py | 22 ------- elasticsearch/helpers/__init__.py | 22 ++----- elasticsearch/helpers/__init__.pyi | 16 ++---- noxfile.py | 2 +- setup.py | 8 +-- test_elasticsearch/run_tests.py | 4 -- test_elasticsearch/test_client/test_utils.py | 9 --- test_elasticsearch/test_connection.py | 7 --- test_elasticsearch/test_module.py | 22 ------- 16 files changed, 44 insertions(+), 193 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index eac73135f..4a13134b3 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -10,7 +10,6 @@ steps: matrix: setup: python: - - "2.7" - "3.6" - "3.7" - "3.8" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e633247a5..ba2c76b9f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,24 +23,14 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] experimental: [false] runs-on: ["ubuntu-latest"] include: - - python-version: 2.7 - experimental: false - runs-on: "ubuntu-20.04" - - python-version: 3.5 - experimental: false - runs-on: "ubuntu-20.04" - python-version: 3.6 experimental: false runs-on: "ubuntu-20.04" - - python-version: 3.11 - experimental: true - runs-on: "ubuntu-20.04" - runs-on: ${{ matrix.runs-on }} name: test-${{ matrix.python-version }} continue-on-error: ${{ matrix.experimental }} @@ -55,5 +45,8 @@ jobs: run: | python -m pip install -r dev-requirements.txt - name: Run Tests - run: | - python setup.py test + run: python setup.py test + env: + # Workaround for development versions of Python + # https://github.com/aio-libs/aiohttp/issues/7675 + AIOHTTP_NO_EXTENSIONS: 1 diff --git a/dev-requirements.txt b/dev-requirements.txt index 59b226aa0..5a4d18334 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -7,26 +7,18 @@ sphinx jinja2 python-dateutil -# Testing the 'search_mvt' API response -mapbox-vector-tile; python_version!="3.4.*" -# For mapbox-vector-tile, package broke Python 2 support without an annotation. -# See: protocolbuffers/protobuf#8984 -protobuf<3.18; python_version!="3.4.*" +mapbox-vector-tile -# No wheels for Python 3.10 yet! -numpy; python_version<"3.10" -pandas; python_version<"3.10" +numpy +pandas -# PyYAML 5.3 dropped support for Python 3.4 while -# not amending that requirement to the package. :( -pyyaml>=5.4; python_version>="3.6" -pyyaml<5.3; python_version<"3.6" +pyyaml>=5.4 isort -black; python_version>="3.6" +black twine # Requirements for testing [async] extra -aiohttp; python_version>="3.6" -pytest-asyncio; python_version>="3.6" -unasync; python_version>="3.6" +aiohttp +pytest-asyncio +unasync diff --git a/elasticsearch/__init__.py b/elasticsearch/__init__.py index ab805c62a..b76f48bf4 100644 --- a/elasticsearch/__init__.py +++ b/elasticsearch/__init__.py @@ -88,10 +88,6 @@ ] try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - from ._async.client import AsyncElasticsearch from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection from ._async.transport import AsyncTransport @@ -104,13 +100,3 @@ ] except (ImportError, SyntaxError): pass - -# Python earlier than 3.6 is deprecated and will be removed in 8.0.0 -if sys.version_info < (3, 6): - warnings.warn( - "Support for Python 3.5 and earlier is deprecated and will be removed " - "in v8.0.0 (current instance is Python %d.%d) See https://github.com/elastic" - "/elasticsearch-py/issues/1696 for details." % sys.version_info[:2], - category=DeprecationWarning, - stacklevel=2, - ) diff --git a/elasticsearch/client/utils.py b/elasticsearch/client/utils.py index 15c3a7b47..652464596 100644 --- a/elasticsearch/client/utils.py +++ b/elasticsearch/client/utils.py @@ -25,7 +25,7 @@ from functools import wraps from .._version import __versionstr__ -from ..compat import PY2, quote, string_types, to_bytes, to_str, unquote, urlparse +from ..compat import quote, string_types, to_bytes, to_str, unquote, urlparse # parts of URL to be omitted SKIP_IN_PATH = (None, "", b"", [], ()) @@ -105,9 +105,7 @@ def _escape(value): # encode strings to utf-8 if isinstance(value, string_types): - if PY2 and isinstance(value, unicode): # noqa: F821 - return value.encode("utf-8") - if not PY2 and isinstance(value, str): + if isinstance(value, str): return value.encode("utf-8") return str(value) diff --git a/elasticsearch/compat.py b/elasticsearch/compat.py index 912d2c72a..ea58f0d1e 100644 --- a/elasticsearch/compat.py +++ b/elasticsearch/compat.py @@ -15,60 +15,30 @@ # specific language governing permissions and limitations # under the License. -import sys +import asyncio -PY2 = sys.version_info[0] == 2 +string_types = str, bytes +from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse -if PY2: - string_types = (basestring,) # noqa: F821 - from itertools import imap as map - from urllib import quote, quote_plus, unquote, urlencode +map = map +from queue import Queue - from Queue import Queue - from urlparse import urlparse - def to_str(x, encoding="ascii"): - if not isinstance(x, str): - return x.encode(encoding) - return x +def to_str(x, encoding="ascii"): + if not isinstance(x, str): + return x.decode(encoding) + return x - to_bytes = to_str -else: - string_types = str, bytes - from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse +def to_bytes(x, encoding="ascii"): + if not isinstance(x, bytes): + return x.encode(encoding) + return x - map = map - from queue import Queue - def to_str(x, encoding="ascii"): - if not isinstance(x, str): - return x.decode(encoding) - return x +from collections.abc import Mapping - def to_bytes(x, encoding="ascii"): - if not isinstance(x, bytes): - return x.encode(encoding) - return x - - -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - - -try: - reraise_exceptions = (RecursionError,) -except NameError: - reraise_exceptions = () - -try: - import asyncio - - reraise_exceptions += (asyncio.CancelledError,) -except (ImportError, AttributeError): - pass +reraise_exceptions = (RecursionError, asyncio.CancelledError) try: from threading import Lock diff --git a/elasticsearch/compat.pyi b/elasticsearch/compat.pyi index 249ff0415..a67a97045 100644 --- a/elasticsearch/compat.pyi +++ b/elasticsearch/compat.pyi @@ -18,7 +18,6 @@ import sys from typing import Callable, Tuple, Type, Union -PY2: bool string_types: Tuple[type, ...] to_str: Callable[[Union[str, bytes]], str] diff --git a/elasticsearch/connection/base.py b/elasticsearch/connection/base.py index 1e9ec893e..ffc25210e 100644 --- a/elasticsearch/connection/base.py +++ b/elasticsearch/connection/base.py @@ -29,7 +29,6 @@ import json from .. import __versionstr__ -from ..compat import PY2 from ..exceptions import ( HTTP_EXCEPTIONS, ElasticsearchWarning, @@ -259,9 +258,6 @@ def log_request_success( except AttributeError: pass - if response is not None: - response = loggable_response_body(response) - logger.info( "%s %s [status:%s request:%.3fs]", method, full_url, status_code, duration ) @@ -302,9 +298,6 @@ def log_request_fail( except AttributeError: pass - if response is not None: - response = loggable_response_body(response) - logger.debug("> %s", body) self._log_trace(method, path, body, status_code, response, duration) @@ -342,18 +335,3 @@ def _get_api_key_header_val(self, api_key): s = "{0}:{1}".format(api_key[0], api_key[1]).encode("utf-8") return "ApiKey " + binascii.b2a_base64(s).rstrip(b"\r\n").decode("utf-8") return "ApiKey " + api_key - - -def loggable_response_body(response): - # If 'response' isn't unicode we need to try converting it to - # unicode otherwise it's likely binary so should be encoded - # properly. On Python 3.x this works out fine. - if PY2 and not isinstance(response, unicode): # noqa - try: - response = response.decode("utf-8") - except (AttributeError, UnicodeError): - # Encodes unprintable characters to '\xXX' hex - # like how is done in Python 3.x in bytes.__repr__ - response = u"b" + repr(response).decode("utf-8") - - return response diff --git a/elasticsearch/helpers/__init__.py b/elasticsearch/helpers/__init__.py index a8478be7f..11a75664a 100644 --- a/elasticsearch/helpers/__init__.py +++ b/elasticsearch/helpers/__init__.py @@ -17,6 +17,7 @@ import sys +from .._async.helpers import async_bulk, async_reindex, async_scan, async_streaming_bulk from .actions import ( _chunk_actions, _process_bulk_chunk, @@ -40,21 +41,8 @@ "reindex", "_chunk_actions", "_process_bulk_chunk", + "async_scan", + "async_bulk", + "async_reindex", + "async_streaming_bulk", ] - - -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - - from .._async.helpers import ( - async_bulk, - async_reindex, - async_scan, - async_streaming_bulk, - ) - - __all__ += ["async_scan", "async_bulk", "async_reindex", "async_streaming_bulk"] -except (ImportError, SyntaxError): - pass diff --git a/elasticsearch/helpers/__init__.pyi b/elasticsearch/helpers/__init__.pyi index 648056ab0..608b7efb6 100644 --- a/elasticsearch/helpers/__init__.pyi +++ b/elasticsearch/helpers/__init__.pyi @@ -17,6 +17,10 @@ import sys +from .._async.helpers import async_bulk as async_bulk +from .._async.helpers import async_reindex as async_reindex +from .._async.helpers import async_scan as async_scan +from .._async.helpers import async_streaming_bulk as async_streaming_bulk from .actions import _chunk_actions as _chunk_actions from .actions import _process_bulk_chunk as _process_bulk_chunk from .actions import bulk as bulk @@ -27,15 +31,3 @@ from .actions import scan as scan from .actions import streaming_bulk as streaming_bulk from .errors import BulkIndexError as BulkIndexError from .errors import ScanError as ScanError - -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - - from .._async.helpers import async_bulk as async_bulk - from .._async.helpers import async_reindex as async_reindex - from .._async.helpers import async_scan as async_scan - from .._async.helpers import async_streaming_bulk as async_streaming_bulk -except (ImportError, SyntaxError): - pass diff --git a/noxfile.py b/noxfile.py index 919c68e32..d8e785d01 100644 --- a/noxfile.py +++ b/noxfile.py @@ -26,7 +26,7 @@ ) -@nox.session(python=["2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9"]) +@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]) def test(session): session.install(".") session.install("-r", "dev-requirements.txt") diff --git a/setup.py b/setup.py index 89dd43e24..7e9d58b25 100644 --- a/setup.py +++ b/setup.py @@ -95,19 +95,17 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4", + python_requires=">=3.6", install_requires=install_requires, test_suite="test_elasticsearch.run_tests.run_all", tests_require=tests_require, diff --git a/test_elasticsearch/run_tests.py b/test_elasticsearch/run_tests.py index 3936c4375..dfddcc155 100755 --- a/test_elasticsearch/run_tests.py +++ b/test_elasticsearch/run_tests.py @@ -94,10 +94,6 @@ def run_all(argv=None): ] ignores = [] - # Python 3.6+ is required for async - if sys.version_info < (3, 6): - ignores.append("test_elasticsearch/test_async/") - # GitHub Actions, run non-server tests if "GITHUB_ACTION" in environ: ignores.extend( diff --git a/test_elasticsearch/test_client/test_utils.py b/test_elasticsearch/test_client/test_utils.py index 2b30c7029..d9a407da0 100644 --- a/test_elasticsearch/test_client/test_utils.py +++ b/test_elasticsearch/test_client/test_utils.py @@ -24,7 +24,6 @@ import pytest from elasticsearch.client.utils import _bulk_body, _escape, _make_path, query_params -from elasticsearch.compat import PY2 from ..test_cases import SkipTest, TestCase @@ -446,14 +445,6 @@ def test_handles_unicode(self): "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) ) - def test_handles_utf_encoded_string(self): - if not PY2: - raise SkipTest("Only relevant for py2") - id = "中文".encode("utf-8") - self.assertEqual( - "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) - ) - class TestEscape(TestCase): def test_handles_ascii(self): diff --git a/test_elasticsearch/test_connection.py b/test_elasticsearch/test_connection.py index 3ddd0cd45..217fac692 100644 --- a/test_elasticsearch/test_connection.py +++ b/test_elasticsearch/test_connection.py @@ -31,7 +31,6 @@ from urllib3._collections import HTTPHeaderDict from elasticsearch import Elasticsearch, __versionstr__ -from elasticsearch.compat import reraise_exceptions from elasticsearch.connection import ( Connection, RequestsHttpConnection, @@ -445,9 +444,6 @@ def test_surrogatepass_into_bytes(self): status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) def test_recursion_error_reraised(self): conn = Urllib3HttpConnection() @@ -875,9 +871,6 @@ def test_surrogatepass_into_bytes(self): status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) def test_recursion_error_reraised(self): conn = RequestsHttpConnection() diff --git a/test_elasticsearch/test_module.py b/test_elasticsearch/test_module.py index 0d77e8784..a82299a1f 100644 --- a/test_elasticsearch/test_module.py +++ b/test_elasticsearch/test_module.py @@ -24,30 +24,8 @@ import elasticsearch -@pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python 3.6+") def test_no_deprecation_python3_6_and_later(): with warnings.catch_warnings(record=True) as w: importlib.reload(elasticsearch) assert len(w) == 0 - - -@pytest.mark.skipif(sys.version_info >= (3, 6), reason="Requires Python <3.6") -def test_deprecated_python3_5_and_earlier(): - - try: # Python 3.4+ - import imp - - reload = imp.reload - except ImportError: # Python 2.7 - reload = reload - - with pytest.warns(DeprecationWarning) as w: - reload(elasticsearch) - - assert len(w) == 1 - assert str(w[0].message) == ( - "Support for Python 3.5 and earlier is deprecated and will be removed " - "in v8.0.0 (current instance is Python %d.%d) See https://github.com/" - "elastic/elasticsearch-py/issues/1696 for details." % (sys.version_info[:2]) - ) From a68cd593866cdc873394c9767bac6cfc2f8c7c1c Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 11:43:02 +0400 Subject: [PATCH 4/8] Fix references to .ci --- CONTRIBUTING.md | 2 +- elasticsearch/helpers/test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e57d3eb02..6d4257863 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,7 +13,7 @@ before writing too much code. ## Running Elasticsearch locally We've provided a script to start an Elasticsearch cluster of a certain version -found at `.ci/run-elasticsearch.sh`. +found at `.buildkite/run-elasticsearch.sh`. There are several environment variables that control integration tests: diff --git a/elasticsearch/helpers/test.py b/elasticsearch/helpers/test.py index 156c5def0..fac0b0c7f 100644 --- a/elasticsearch/helpers/test.py +++ b/elasticsearch/helpers/test.py @@ -30,7 +30,7 @@ else: ELASTICSEARCH_URL = "https://elastic:changeme@localhost:9200" -CA_CERTS = join(dirname(dirname(dirname(abspath(__file__)))), ".ci/certs/ca.pem") +CA_CERTS = join(dirname(dirname(dirname(abspath(__file__)))), ".buildkite/certs/ca.pem") def get_test_client(nowait=False, **kwargs): From 0c7306e34c1451bd683f90bd01479c20ba28214d Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 16:55:56 +0400 Subject: [PATCH 5/8] Use asyncio_mode = auto to keep Python 3.6 support --- setup.cfg | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.cfg b/setup.cfg index cbdd5d4ca..38b0bbf33 100644 --- a/setup.cfg +++ b/setup.cfg @@ -15,6 +15,8 @@ ignore = E203, E266, E501, W503 [tool:pytest] junit_family=legacy addopts = -vvv -p no:logging --cov-report=term-missing --cov=elasticsearch --cov-config=.coveragerc +# When dropping Python 3.6 support, switch to strict mode +asyncio_mode = auto [tool:isort] profile=black From e0ddf2dcd2d87fda3f0e9dc70aa98f9ece0c0441 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 11:44:01 +0400 Subject: [PATCH 6/8] Stop trying getting local client --- test_elasticsearch/test_server/__init__.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/test_elasticsearch/test_server/__init__.py b/test_elasticsearch/test_server/__init__.py index c9ac5ad56..e31a1c179 100644 --- a/test_elasticsearch/test_server/__init__.py +++ b/test_elasticsearch/test_server/__init__.py @@ -30,18 +30,11 @@ def get_client(**kwargs): if client is not None and not kwargs: return client - # try and locate manual override in the local environment try: - from test_elasticsearch.local import get_client as local_get_client - - new_client = local_get_client(**kwargs) - except ImportError: - # fallback to using vanilla client - try: - new_client = test.get_test_client(**kwargs) - except SkipTest: - client = False - raise + new_client = test.get_test_client(**kwargs) + except SkipTest: + client = False + raise if not kwargs: client = new_client From b59e558e97a4a5f38d14ffcbdb09cd9cbed6fed0 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 12:02:40 +0400 Subject: [PATCH 7/8] Bump black and fix lint --- elasticsearch/_async/helpers.py | 10 +++------- elasticsearch/_async/helpers.pyi | 8 ++++---- elasticsearch/_async/http_aiohttp.py | 1 - elasticsearch/_async/transport.py | 4 +--- elasticsearch/_async/transport.pyi | 2 +- elasticsearch/client/utils.pyi | 2 +- elasticsearch/compat.py | 7 +++---- elasticsearch/connection/base.py | 3 +-- elasticsearch/connection/base.pyi | 2 +- elasticsearch/connection/http_requests.py | 4 ++-- elasticsearch/connection/http_requests.pyi | 2 +- elasticsearch/connection/http_urllib3.py | 4 ++-- elasticsearch/connection/http_urllib3.pyi | 2 +- elasticsearch/connection_pool.py | 2 +- elasticsearch/connection_pool.pyi | 2 +- elasticsearch/helpers/__init__.py | 2 -- elasticsearch/helpers/actions.py | 20 ++++++++----------- elasticsearch/helpers/actions.pyi | 10 +++++----- elasticsearch/transport.py | 6 +++--- elasticsearch/transport.pyi | 2 +- noxfile.py | 10 ++++------ .../test_async/test_connection.py | 2 +- .../test_async/test_server/test_helpers.py | 2 -- .../test_server/test_rest_api_spec.py | 1 - test_elasticsearch/test_client/test_utils.py | 2 +- test_elasticsearch/test_connection.py | 4 ++-- test_elasticsearch/test_helpers.py | 4 ++-- test_elasticsearch/test_module.py | 3 --- .../test_server/test_rest_api_spec.py | 1 - 29 files changed, 50 insertions(+), 74 deletions(-) diff --git a/elasticsearch/_async/helpers.py b/elasticsearch/_async/helpers.py index 388dc459c..11dcec1d5 100644 --- a/elasticsearch/_async/helpers.py +++ b/elasticsearch/_async/helpers.py @@ -62,7 +62,7 @@ async def _process_bulk_chunk( raise_on_error=True, ignore_status=(), *args, - **kwargs + **kwargs, ): """ Send a bulk request to elasticsearch and process the output. @@ -132,9 +132,8 @@ async def async_streaming_bulk( yield_ok=True, ignore_status=(), *args, - **kwargs + **kwargs, ): - """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -176,7 +175,6 @@ async def map_actions(): async for bulk_data, bulk_actions in _chunk_actions( map_actions(), chunk_size, max_chunk_bytes, client.transport.serializer ): - for attempt in range(max_retries + 1): to_retry, to_retry_data = [], [] if attempt: @@ -198,7 +196,6 @@ async def map_actions(): **kwargs, ), ): - if not ok: action, info = info.popitem() # retry if retries enabled, we get 429, and we are not @@ -292,7 +289,7 @@ async def async_scan( request_timeout=None, clear_scroll=True, scroll_kwargs=None, - **kwargs + **kwargs, ): """ Simple abstraction on top of the @@ -430,7 +427,6 @@ async def async_reindex( scan_kwargs={}, bulk_kwargs={}, ): - """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. diff --git a/elasticsearch/_async/helpers.pyi b/elasticsearch/_async/helpers.pyi index fca332f94..58aa348fc 100644 --- a/elasticsearch/_async/helpers.pyi +++ b/elasticsearch/_async/helpers.pyi @@ -50,7 +50,7 @@ def _process_bulk_chunk( raise_on_error: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, - **kwargs: Any + **kwargs: Any, ) -> AsyncGenerator[Tuple[bool, Any], None]: ... def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> AsyncGenerator[T, None]: ... def azip( @@ -70,7 +70,7 @@ def async_streaming_bulk( yield_ok: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, - **kwargs: Any + **kwargs: Any, ) -> AsyncGenerator[Tuple[bool, Any], None]: ... async def async_bulk( client: AsyncElasticsearch, @@ -78,7 +78,7 @@ async def async_bulk( stats_only: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Tuple[int, Union[int, List[Any]]]: ... def async_scan( client: AsyncElasticsearch, @@ -90,7 +90,7 @@ def async_scan( request_timeout: Optional[Union[float, int]] = ..., clear_scroll: bool = ..., scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any + **kwargs: Any, ) -> AsyncGenerator[int, None]: ... async def async_reindex( client: AsyncElasticsearch, diff --git a/elasticsearch/_async/http_aiohttp.py b/elasticsearch/_async/http_aiohttp.py index 5b54e1d87..9e538502a 100644 --- a/elasticsearch/_async/http_aiohttp.py +++ b/elasticsearch/_async/http_aiohttp.py @@ -80,7 +80,6 @@ async def close(self): class AIOHttpConnection(AsyncConnection): - HTTP_CLIENT_META = ("ai", _client_meta_version(aiohttp.__version__)) def __init__( diff --git a/elasticsearch/_async/transport.py b/elasticsearch/_async/transport.py index 0bfe75d19..b3e333456 100644 --- a/elasticsearch/_async/transport.py +++ b/elasticsearch/_async/transport.py @@ -67,7 +67,7 @@ def __init__( retry_on_timeout=False, send_get_body_as="GET", meta_header=True, - **kwargs + **kwargs, ): """ :arg hosts: list of dictionaries, each containing keyword arguments to @@ -166,7 +166,6 @@ async def _async_init(self): # ... and we can start sniffing in the background. if self.sniffing_task is None and self.sniff_on_start: - # Create an asyncio.Event for future calls to block on # until the initial sniffing task completes. self._sniff_on_start_event = asyncio.Event() @@ -467,7 +466,6 @@ async def _do_verify_elasticsearch(self, headers, timeout): # Ensure that there's only one async exec within this section # at a time to not emit unnecessary index API calls. async with self._verify_elasticsearch_lock: - # Product check has already been completed while we were # waiting our turn, no need to do again. if self._verified_elasticsearch is not None: diff --git a/elasticsearch/_async/transport.pyi b/elasticsearch/_async/transport.pyi index 447de8316..aa6f2f718 100644 --- a/elasticsearch/_async/transport.pyi +++ b/elasticsearch/_async/transport.pyi @@ -64,7 +64,7 @@ class AsyncTransport(object): retry_on_timeout: bool = ..., send_get_body_as: str = ..., meta_header: bool = ..., - **kwargs: Any + **kwargs: Any, ) -> None: ... def add_connection(self, host: Any) -> None: ... def set_connections(self, hosts: Collection[Any]) -> None: ... diff --git a/elasticsearch/client/utils.pyi b/elasticsearch/client/utils.pyi index 12e69a4e3..1e2f4b7ad 100644 --- a/elasticsearch/client/utils.pyi +++ b/elasticsearch/client/utils.pyi @@ -52,7 +52,7 @@ def query_params( response_mimetypes: Optional[List[str]] = ..., body_params: Optional[List[str]] = ..., body_name: Optional[str] = ..., - body_required: Optional[bool] = ... + body_required: Optional[bool] = ..., ) -> Callable[[Callable[..., T]], Callable[..., T]]: ... def _bulk_body( serializer: Serializer, body: Union[str, bytes, Mapping[str, Any], Iterable[Any]] diff --git a/elasticsearch/compat.py b/elasticsearch/compat.py index ea58f0d1e..13993268f 100644 --- a/elasticsearch/compat.py +++ b/elasticsearch/compat.py @@ -16,12 +16,13 @@ # under the License. import asyncio +from collections.abc import Mapping +from queue import Queue +from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse string_types = str, bytes -from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse map = map -from queue import Queue def to_str(x, encoding="ascii"): @@ -36,8 +37,6 @@ def to_bytes(x, encoding="ascii"): return x -from collections.abc import Mapping - reraise_exceptions = (RecursionError, asyncio.CancelledError) try: diff --git a/elasticsearch/connection/base.py b/elasticsearch/connection/base.py index ffc25210e..8fe12b0cd 100644 --- a/elasticsearch/connection/base.py +++ b/elasticsearch/connection/base.py @@ -82,9 +82,8 @@ def __init__( api_key=None, opaque_id=None, meta_header=True, - **kwargs + **kwargs, ): - if cloud_id: try: _, cloud_id = cloud_id.split(":") diff --git a/elasticsearch/connection/base.pyi b/elasticsearch/connection/base.pyi index f0149bc0b..cafcc0665 100644 --- a/elasticsearch/connection/base.pyi +++ b/elasticsearch/connection/base.pyi @@ -57,7 +57,7 @@ class Connection(object): api_key: Optional[Union[Tuple[str, str], List[str], str]] = ..., opaque_id: Optional[str] = ..., meta_header: bool = ..., - **kwargs: Any + **kwargs: Any, ) -> None: ... def __repr__(self) -> str: ... def __eq__(self, other: object) -> bool: ... diff --git a/elasticsearch/connection/http_requests.py b/elasticsearch/connection/http_requests.py index 4645bb3c1..b56169247 100644 --- a/elasticsearch/connection/http_requests.py +++ b/elasticsearch/connection/http_requests.py @@ -80,7 +80,7 @@ def __init__( cloud_id=None, api_key=None, opaque_id=None, - **kwargs + **kwargs, ): if not REQUESTS_AVAILABLE: raise ImproperlyConfigured( @@ -101,7 +101,7 @@ def __init__( cloud_id=cloud_id, api_key=api_key, opaque_id=opaque_id, - **kwargs + **kwargs, ) if not self.http_compress: diff --git a/elasticsearch/connection/http_requests.pyi b/elasticsearch/connection/http_requests.pyi index fe3351a57..5f5581692 100644 --- a/elasticsearch/connection/http_requests.pyi +++ b/elasticsearch/connection/http_requests.pyi @@ -40,5 +40,5 @@ class RequestsHttpConnection(Connection): api_key: Optional[Any] = ..., opaque_id: Optional[str] = ..., meta_header: bool = ..., - **kwargs: Any + **kwargs: Any, ) -> None: ... diff --git a/elasticsearch/connection/http_urllib3.py b/elasticsearch/connection/http_urllib3.py index d30dcb457..946eece11 100644 --- a/elasticsearch/connection/http_urllib3.py +++ b/elasticsearch/connection/http_urllib3.py @@ -122,7 +122,7 @@ def __init__( cloud_id=None, api_key=None, opaque_id=None, - **kwargs + **kwargs, ): # Initialize headers before calling super().__init__(). self.headers = urllib3.make_headers(keep_alive=True) @@ -136,7 +136,7 @@ def __init__( cloud_id=cloud_id, api_key=api_key, opaque_id=opaque_id, - **kwargs + **kwargs, ) if http_auth is not None: if isinstance(http_auth, (tuple, list)): diff --git a/elasticsearch/connection/http_urllib3.pyi b/elasticsearch/connection/http_urllib3.pyi index 13ff5d955..035af1dd4 100644 --- a/elasticsearch/connection/http_urllib3.pyi +++ b/elasticsearch/connection/http_urllib3.pyi @@ -54,5 +54,5 @@ class Urllib3HttpConnection(Connection): api_key: Optional[Any] = ..., opaque_id: Optional[str] = ..., meta_header: bool = ..., - **kwargs: Any + **kwargs: Any, ) -> None: ... diff --git a/elasticsearch/connection_pool.py b/elasticsearch/connection_pool.py index 43dc7d691..1d762bd97 100644 --- a/elasticsearch/connection_pool.py +++ b/elasticsearch/connection_pool.py @@ -118,7 +118,7 @@ def __init__( timeout_cutoff=5, selector_class=RoundRobinSelector, randomize_hosts=True, - **kwargs + **kwargs, ): """ :arg connections: list of tuples containing the diff --git a/elasticsearch/connection_pool.pyi b/elasticsearch/connection_pool.pyi index a05810f8d..8e68f1aff 100644 --- a/elasticsearch/connection_pool.pyi +++ b/elasticsearch/connection_pool.pyi @@ -51,7 +51,7 @@ class ConnectionPool(object): timeout_cutoff: int = ..., selector_class: Type[ConnectionSelector] = ..., randomize_hosts: bool = ..., - **kwargs: Any + **kwargs: Any, ) -> None: ... def mark_dead(self, connection: Connection, now: Optional[float] = ...) -> None: ... def mark_live(self, connection: Connection) -> None: ... diff --git a/elasticsearch/helpers/__init__.py b/elasticsearch/helpers/__init__.py index 11a75664a..afbafd176 100644 --- a/elasticsearch/helpers/__init__.py +++ b/elasticsearch/helpers/__init__.py @@ -15,8 +15,6 @@ # specific language governing permissions and limitations # under the License. -import sys - from .._async.helpers import async_bulk, async_reindex, async_scan, async_streaming_bulk from .actions import ( _chunk_actions, diff --git a/elasticsearch/helpers/actions.py b/elasticsearch/helpers/actions.py index ec8e33228..ce4d15c0c 100644 --- a/elasticsearch/helpers/actions.py +++ b/elasticsearch/helpers/actions.py @@ -225,7 +225,7 @@ def _process_bulk_chunk( raise_on_error=True, ignore_status=(), *args, - **kwargs + **kwargs, ): """ Send a bulk request to elasticsearch and process the output. @@ -278,9 +278,8 @@ def streaming_bulk( yield_ok=True, ignore_status=(), *args, - **kwargs + **kwargs, ): - """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -319,7 +318,6 @@ def streaming_bulk( for bulk_data, bulk_actions in _chunk_actions( actions, chunk_size, max_chunk_bytes, client.transport.serializer ): - for attempt in range(max_retries + 1): to_retry, to_retry_data = [], [] if attempt: @@ -336,10 +334,9 @@ def streaming_bulk( raise_on_error, ignore_status, *args, - **kwargs + **kwargs, ), ): - if not ok: action, info = info.popitem() # retry if retries enabled, we get 429, and we are not @@ -431,7 +428,7 @@ def parallel_bulk( expand_action_callback=expand_action, ignore_status=(), *args, - **kwargs + **kwargs, ): """ Parallel version of the bulk helper run in multiple threads at once. @@ -477,7 +474,7 @@ def _setup_queues(self): bulk_chunk[0], ignore_status=ignore_status, *args, - **kwargs + **kwargs, ) ), _chunk_actions( @@ -502,7 +499,7 @@ def scan( request_timeout=None, clear_scroll=True, scroll_kwargs=None, - **kwargs + **kwargs, ): """ Simple abstraction on top of the @@ -624,7 +621,7 @@ def scan( scroll_id=scroll_id, ignore=(404,), params={"__elastic_client_meta": (("h", "s"),)}, - **transport_kwargs + **transport_kwargs, ) @@ -640,7 +637,6 @@ def reindex( scan_kwargs={}, bulk_kwargs={}, ): - """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. @@ -713,5 +709,5 @@ def _change_doc_index(hits, index, op_type): target_client, _change_doc_index(docs, target_index, op_type), chunk_size=chunk_size, - **kwargs + **kwargs, ) diff --git a/elasticsearch/helpers/actions.pyi b/elasticsearch/helpers/actions.pyi index f5c14985d..43c8f729c 100644 --- a/elasticsearch/helpers/actions.pyi +++ b/elasticsearch/helpers/actions.pyi @@ -47,7 +47,7 @@ def _process_bulk_chunk( raise_on_exception: bool = ..., raise_on_error: bool = ..., *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Generator[Tuple[bool, Any], None, None]: ... def streaming_bulk( client: Elasticsearch, @@ -63,7 +63,7 @@ def streaming_bulk( yield_ok: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Generator[Tuple[bool, Any], None, None]: ... def bulk( client: Elasticsearch, @@ -71,7 +71,7 @@ def bulk( stats_only: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Tuple[int, Union[int, List[Any]]]: ... def parallel_bulk( client: Elasticsearch, @@ -83,7 +83,7 @@ def parallel_bulk( expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Generator[Tuple[bool, Any], None, None]: ... def scan( client: Elasticsearch, @@ -95,7 +95,7 @@ def scan( request_timeout: Optional[Union[float, int]] = ..., clear_scroll: bool = ..., scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any + **kwargs: Any, ) -> Generator[Any, None, None]: ... def reindex( client: Elasticsearch, diff --git a/elasticsearch/transport.py b/elasticsearch/transport.py index 9329b539d..d088804c0 100644 --- a/elasticsearch/transport.py +++ b/elasticsearch/transport.py @@ -87,7 +87,7 @@ def __init__( retry_on_timeout=False, send_get_body_as="GET", meta_header=True, - **kwargs + **kwargs, ): """ :arg hosts: list of dictionaries, each containing keyword arguments to @@ -247,13 +247,14 @@ def set_connections(self, hosts): :arg hosts: same as `__init__` """ + # construct the connections def _create_connection(host): # if this is not the initial setup look at the existing connection # options and identify connections that haven't changed and can be # kept around. if hasattr(self, "connection_pool"): - for (connection, old_host) in self.connection_pool.connection_opts: + for connection, old_host in self.connection_pool.connection_opts: if old_host == host: return connection @@ -542,7 +543,6 @@ def _do_verify_elasticsearch(self, headers, timeout): # Ensure that there's only one thread within this section # at a time to not emit unnecessary index API calls. with self._verify_elasticsearch_lock: - # Product check has already been completed while we were # waiting our turn, no need to do again. if self._verified_elasticsearch is not None: diff --git a/elasticsearch/transport.pyi b/elasticsearch/transport.pyi index 5265b51e3..49c8b8ef2 100644 --- a/elasticsearch/transport.pyi +++ b/elasticsearch/transport.pyi @@ -68,7 +68,7 @@ class Transport(object): retry_on_timeout: bool = ..., send_get_body_as: str = ..., meta_header: bool = ..., - **kwargs: Any + **kwargs: Any, ) -> None: ... def add_connection(self, host: Any) -> None: ... def set_connections(self, hosts: Collection[Any]) -> None: ... diff --git a/noxfile.py b/noxfile.py index d8e785d01..ddff59e8d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -36,10 +36,10 @@ def test(session): @nox.session() def format(session): - session.install("black==21.12b0", "click==8.0.4", "isort") + session.install("black~=23.0", "isort") session.run("isort", "--profile=black", *SOURCE_FILES) - session.run("black", "--target-version=py27", *SOURCE_FILES) + session.run("black", "--target-version=py36", *SOURCE_FILES) session.run("python", "utils/license-headers.py", "fix", *SOURCE_FILES) lint(session) @@ -47,12 +47,10 @@ def format(session): @nox.session() def lint(session): - session.install( - "flake8", "black==21.12b0", "click==8.0.4", "mypy", "isort", "types-requests" - ) + session.install("black~=23.0", "isort", "flake8", "mypy", "types-requests") session.run("isort", "--check", "--profile=black", *SOURCE_FILES) - session.run("black", "--target-version=py27", "--check", *SOURCE_FILES) + session.run("black", "--target-version=py36", "--check", *SOURCE_FILES) session.run("flake8", *SOURCE_FILES) session.run("python", "utils/license-headers.py", "check", *SOURCE_FILES) diff --git a/test_elasticsearch/test_async/test_connection.py b/test_elasticsearch/test_async/test_connection.py index 43a205264..53d2f1395 100644 --- a/test_elasticsearch/test_async/test_connection.py +++ b/test_elasticsearch/test_async/test_connection.py @@ -409,7 +409,7 @@ async def test_surrogatepass_into_bytes(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) status, headers, data = await con.perform_request("GET", "/") - assert u"你好\uda6a" == data + assert "你好\uda6a" == data @pytest.mark.parametrize("exception_cls", reraise_exceptions) async def test_recursion_error_reraised(self, exception_cls): diff --git a/test_elasticsearch/test_async/test_server/test_helpers.py b/test_elasticsearch/test_async/test_server/test_helpers.py index 49f74b3f5..3b44075b6 100644 --- a/test_elasticsearch/test_async/test_server/test_helpers.py +++ b/test_elasticsearch/test_async/test_server/test_helpers.py @@ -526,7 +526,6 @@ async def test_initial_search_error(self, async_client, scan_teardown): ), ): with patch.object(async_client, "scroll", MockScroll()): - data = [ x async for x in helpers.async_scan( @@ -550,7 +549,6 @@ async def test_initial_search_error(self, async_client, scan_teardown): ), ): with patch.object(async_client, "scroll", MockScroll()) as mock_scroll: - with pytest.raises(ScanError): data = [ x diff --git a/test_elasticsearch/test_async/test_server/test_rest_api_spec.py b/test_elasticsearch/test_async/test_server/test_rest_api_spec.py index 44ece58c9..a0507d207 100644 --- a/test_elasticsearch/test_async/test_server/test_rest_api_spec.py +++ b/test_elasticsearch/test_async/test_server/test_rest_api_spec.py @@ -153,7 +153,6 @@ async def run_do(self, action): # some parameters had to be renamed to not clash with python builtins, # compensate for k in PARAMS_RENAMES: - # Don't do the 'doc_type' rename for APIs that actually want 'type' if k == "type" and method in APIS_USING_TYPE_INSTEAD_OF_DOC_TYPE: continue diff --git a/test_elasticsearch/test_client/test_utils.py b/test_elasticsearch/test_client/test_utils.py index d9a407da0..f72abd28a 100644 --- a/test_elasticsearch/test_client/test_utils.py +++ b/test_elasticsearch/test_client/test_utils.py @@ -25,7 +25,7 @@ from elasticsearch.client.utils import _bulk_body, _escape, _make_path, query_params -from ..test_cases import SkipTest, TestCase +from ..test_cases import TestCase class TestQueryParams(TestCase): diff --git a/test_elasticsearch/test_connection.py b/test_elasticsearch/test_connection.py index 217fac692..79d84e3bb 100644 --- a/test_elasticsearch/test_connection.py +++ b/test_elasticsearch/test_connection.py @@ -442,7 +442,7 @@ def test_surrogatepass_into_bytes(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) + self.assertEqual("你好\uda6a", data) def test_recursion_error_reraised(self): conn = Urllib3HttpConnection() @@ -869,7 +869,7 @@ def test_surrogatepass_into_bytes(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) + self.assertEqual("你好\uda6a", data) def test_recursion_error_reraised(self): conn = RequestsHttpConnection() diff --git a/test_elasticsearch/test_helpers.py b/test_elasticsearch/test_helpers.py index e6aee4fe7..ff527e2b8 100644 --- a/test_elasticsearch/test_helpers.py +++ b/test_elasticsearch/test_helpers.py @@ -77,7 +77,7 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): class TestChunkActions(TestCase): def setup_method(self, _): - self.actions = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] + self.actions = [({"index": {}}, {"some": "datá", "i": i}) for i in range(100)] def test_expand_action(self): self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) @@ -203,7 +203,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self): ) self.assertEqual(25, len(chunks)) for chunk_data, chunk_actions in chunks: - chunk = u"".join(chunk_actions) + chunk = "".join(chunk_actions) chunk = chunk if isinstance(chunk, str) else chunk.encode("utf-8") self.assertLessEqual(len(chunk), max_byte_size) diff --git a/test_elasticsearch/test_module.py b/test_elasticsearch/test_module.py index a82299a1f..c7cde63a9 100644 --- a/test_elasticsearch/test_module.py +++ b/test_elasticsearch/test_module.py @@ -16,11 +16,8 @@ # under the License. import importlib -import sys import warnings -import pytest - import elasticsearch diff --git a/test_elasticsearch/test_server/test_rest_api_spec.py b/test_elasticsearch/test_server/test_rest_api_spec.py index 20c5c6879..47480910d 100644 --- a/test_elasticsearch/test_server/test_rest_api_spec.py +++ b/test_elasticsearch/test_server/test_rest_api_spec.py @@ -258,7 +258,6 @@ def run_do(self, action): # some parameters had to be renamed to not clash with python builtins, # compensate for k in PARAMS_RENAMES: - # Don't do the 'doc_type' rename for APIs that actually want 'type' if k == "type" and method in APIS_USING_TYPE_INSTEAD_OF_DOC_TYPE: continue From 6e37cea3638bd5c61b05e95366b2808a849c92c1 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 30 Oct 2023 14:39:33 +0400 Subject: [PATCH 8/8] Show installed packages --- noxfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index ddff59e8d..99436740c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -28,8 +28,8 @@ @nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]) def test(session): - session.install(".") - session.install("-r", "dev-requirements.txt") + session.install(".", silent=False) + session.install("-r", "dev-requirements.txt", silent=False) session.run("python", "setup.py", "test")