From 1afe8b2636034a0ceb7ca293e60be4ee99a6f0a1 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 8 Apr 2024 13:51:35 +0400 Subject: [PATCH 1/2] Remove APIs that do not exist on serverless --- docs/sphinx/api.rst | 102 --- .../_async/client/__init__.py | 36 - .../_async/client/autoscaling.py | 175 ---- elasticsearch_serverless/_async/client/ccr.py | 749 ----------------- .../_async/client/dangling_indices.py | 162 ---- .../_async/client/features.py | 88 -- .../_async/client/fleet.py | 631 -------------- elasticsearch_serverless/_async/client/ilm.py | 543 ------------ .../_async/client/migration.py | 127 --- .../_async/client/monitoring.py | 87 -- .../_async/client/nodes.py | 483 ----------- .../_async/client/rollup.py | 440 ---------- .../_async/client/searchable_snapshots.py | 265 ------ .../_async/client/shutdown.py | 229 ------ elasticsearch_serverless/_async/client/slm.py | 377 --------- .../_async/client/snapshot.py | 773 ------------------ elasticsearch_serverless/_async/client/ssl.py | 57 -- .../_async/client/text_structure.py | 158 ---- .../_async/client/watcher.py | 607 -------------- .../_async/client/xpack.py | 111 --- .../_sync/client/__init__.py | 36 - .../_sync/client/autoscaling.py | 175 ---- elasticsearch_serverless/_sync/client/ccr.py | 749 ----------------- .../_sync/client/dangling_indices.py | 162 ---- .../_sync/client/features.py | 88 -- .../_sync/client/fleet.py | 631 -------------- elasticsearch_serverless/_sync/client/ilm.py | 543 ------------ .../_sync/client/migration.py | 127 --- .../_sync/client/monitoring.py | 87 -- .../_sync/client/nodes.py | 483 ----------- .../_sync/client/rollup.py | 440 ---------- .../_sync/client/searchable_snapshots.py | 265 ------ .../_sync/client/shutdown.py | 229 ------ elasticsearch_serverless/_sync/client/slm.py | 377 --------- .../_sync/client/snapshot.py | 773 ------------------ elasticsearch_serverless/_sync/client/ssl.py | 57 -- .../_sync/client/text_structure.py | 158 ---- .../_sync/client/watcher.py | 607 -------------- .../_sync/client/xpack.py | 111 --- elasticsearch_serverless/client.py | 44 - 40 files changed, 12342 deletions(-) delete mode 100644 elasticsearch_serverless/_async/client/autoscaling.py delete mode 100644 elasticsearch_serverless/_async/client/ccr.py delete mode 100644 elasticsearch_serverless/_async/client/dangling_indices.py delete mode 100644 elasticsearch_serverless/_async/client/features.py delete mode 100644 elasticsearch_serverless/_async/client/fleet.py delete mode 100644 elasticsearch_serverless/_async/client/ilm.py delete mode 100644 elasticsearch_serverless/_async/client/migration.py delete mode 100644 elasticsearch_serverless/_async/client/monitoring.py delete mode 100644 elasticsearch_serverless/_async/client/nodes.py delete mode 100644 elasticsearch_serverless/_async/client/rollup.py delete mode 100644 elasticsearch_serverless/_async/client/searchable_snapshots.py delete mode 100644 elasticsearch_serverless/_async/client/shutdown.py delete mode 100644 elasticsearch_serverless/_async/client/slm.py delete mode 100644 elasticsearch_serverless/_async/client/snapshot.py delete mode 100644 elasticsearch_serverless/_async/client/ssl.py delete mode 100644 elasticsearch_serverless/_async/client/text_structure.py delete mode 100644 elasticsearch_serverless/_async/client/watcher.py delete mode 100644 elasticsearch_serverless/_async/client/xpack.py delete mode 100644 elasticsearch_serverless/_sync/client/autoscaling.py delete mode 100644 elasticsearch_serverless/_sync/client/ccr.py delete mode 100644 elasticsearch_serverless/_sync/client/dangling_indices.py delete mode 100644 elasticsearch_serverless/_sync/client/features.py delete mode 100644 elasticsearch_serverless/_sync/client/fleet.py delete mode 100644 elasticsearch_serverless/_sync/client/ilm.py delete mode 100644 elasticsearch_serverless/_sync/client/migration.py delete mode 100644 elasticsearch_serverless/_sync/client/monitoring.py delete mode 100644 elasticsearch_serverless/_sync/client/nodes.py delete mode 100644 elasticsearch_serverless/_sync/client/rollup.py delete mode 100644 elasticsearch_serverless/_sync/client/searchable_snapshots.py delete mode 100644 elasticsearch_serverless/_sync/client/shutdown.py delete mode 100644 elasticsearch_serverless/_sync/client/slm.py delete mode 100644 elasticsearch_serverless/_sync/client/snapshot.py delete mode 100644 elasticsearch_serverless/_sync/client/ssl.py delete mode 100644 elasticsearch_serverless/_sync/client/text_structure.py delete mode 100644 elasticsearch_serverless/_sync/client/watcher.py delete mode 100644 elasticsearch_serverless/_sync/client/xpack.py diff --git a/docs/sphinx/api.rst b/docs/sphinx/api.rst index e989557..77895da 100644 --- a/docs/sphinx/api.rst +++ b/docs/sphinx/api.rst @@ -30,36 +30,18 @@ Async Search .. autoclass:: AsyncSearchClient :members: -Autoscaling ------------ - -.. autoclass:: AutoscalingClient - :members: - Cat --- .. autoclass:: CatClient :members: -Cross-Cluster Replication (CCR) -------------------------------- - -.. autoclass:: CcrClient - :members: - Cluster ------- .. autoclass:: ClusterClient :members: -Dangling Indices ----------------- - -.. autoclass:: DanglingIndicesClient - :members: - Enrich Policies --------------- @@ -72,30 +54,12 @@ Event Query Language (EQL) .. autoclass:: EqlClient :members: -Snapshottable Features ----------------------- - -.. autoclass:: FeaturesClient - :members: - -Fleet ------ - -.. autoclass:: FleetClient - :members: - Graph Explore ------------- .. autoclass:: GraphClient :members: -Index Lifecycle Management (ILM) --------------------------------- - -.. autoclass:: IlmClient - :members: - Indices ------- @@ -120,12 +84,6 @@ Logstash .. autoclass:: LogstashClient :members: -Migration ---------- - -.. autoclass:: MigrationClient - :members: - Machine Learning (ML) --------------------- @@ -138,86 +96,26 @@ Monitoring .. autoclass:: MonitoringClient :members: -Nodes ------ - -.. autoclass:: NodesClient - :members: - -Rollup Indices --------------- - -.. autoclass:: RollupClient - :members: - -Searchable Snapshots --------------------- - -.. autoclass:: SearchableSnapshotsClient - :members: - Security -------- .. autoclass:: SecurityClient :members: -Shutdown --------- - -.. autoclass:: ShutdownClient - :members: - -Snapshot Lifecycle Management (SLM) ------------------------------------ - -.. autoclass:: SlmClient - :members: - -Snapshots ---------- - -.. autoclass:: SnapshotClient - :members: - SQL --- .. autoclass:: SqlClient :members: -TLS/SSL -------- - -.. autoclass:: SslClient - :members: - Tasks ----- .. autoclass:: TasksClient :members: -Text Structure --------------- - -.. autoclass:: TextStructureClient - :members: - Transforms ---------- .. autoclass:: TransformClient :members: - -Watcher -------- - -.. autoclass:: WatcherClient - :members: - -X-Pack ------- - -.. autoclass:: XPackClient - :members: diff --git a/elasticsearch_serverless/_async/client/__init__.py b/elasticsearch_serverless/_async/client/__init__.py index f592ec8..aec5b40 100644 --- a/elasticsearch_serverless/_async/client/__init__.py +++ b/elasticsearch_serverless/_async/client/__init__.py @@ -35,38 +35,22 @@ from ...serializer import DEFAULT_SERIALIZERS from ._base import BaseClient, resolve_auth_headers from .async_search import AsyncSearchClient -from .autoscaling import AutoscalingClient from .cat import CatClient -from .ccr import CcrClient from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient from .enrich import EnrichClient from .eql import EqlClient -from .features import FeaturesClient -from .fleet import FleetClient from .graph import GraphClient -from .ilm import IlmClient from .indices import IndicesClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient -from .migration import MigrationClient from .ml import MlClient -from .monitoring import MonitoringClient -from .nodes import NodesClient from .query_ruleset import QueryRulesetClient -from .rollup import RollupClient from .search_application import SearchApplicationClient -from .searchable_snapshots import SearchableSnapshotsClient from .security import SecurityClient -from .shutdown import ShutdownClient -from .slm import SlmClient -from .snapshot import SnapshotClient from .sql import SqlClient -from .ssl import SslClient from .synonyms import SynonymsClient from .tasks import TasksClient -from .text_structure import TextStructureClient from .transform import TransformClient from .utils import ( _TYPE_HOST, @@ -78,8 +62,6 @@ is_requests_http_auth, is_requests_node_class, ) -from .watcher import WatcherClient -from .xpack import XPackClient logger = logging.getLogger("elasticsearch") @@ -294,42 +276,24 @@ def __init__( # namespaced clients for compatibility with API names self.async_search = AsyncSearchClient(self) - self.autoscaling = AutoscalingClient(self) self.cat = CatClient(self) self.cluster = ClusterClient(self) - self.fleet = FleetClient(self) - self.features = FeaturesClient(self) self.indices = IndicesClient(self) self.ingest = IngestClient(self) - self.nodes = NodesClient(self) - self.snapshot = SnapshotClient(self) self.tasks = TasksClient(self) - self.xpack = XPackClient(self) - self.ccr = CcrClient(self) - self.dangling_indices = DanglingIndicesClient(self) self.enrich = EnrichClient(self) self.eql = EqlClient(self) self.graph = GraphClient(self) - self.ilm = IlmClient(self) self.license = LicenseClient(self) self.logstash = LogstashClient(self) - self.migration = MigrationClient(self) self.ml = MlClient(self) - self.monitoring = MonitoringClient(self) self.query_ruleset = QueryRulesetClient(self) - self.rollup = RollupClient(self) self.search_application = SearchApplicationClient(self) - self.searchable_snapshots = SearchableSnapshotsClient(self) self.security = SecurityClient(self) - self.slm = SlmClient(self) - self.shutdown = ShutdownClient(self) self.sql = SqlClient(self) - self.ssl = SslClient(self) self.synonyms = SynonymsClient(self) - self.text_structure = TextStructureClient(self) self.transform = TransformClient(self) - self.watcher = WatcherClient(self) def __repr__(self) -> str: try: diff --git a/elasticsearch_serverless/_async/client/autoscaling.py b/elasticsearch_serverless/_async/client/autoscaling.py deleted file mode 100644 index b45bc9c..0000000 --- a/elasticsearch_serverless/_async/client/autoscaling.py +++ /dev/null @@ -1,175 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class AutoscalingClient(NamespacedClient): - @_rewrite_parameters() - async def delete_autoscaling_policy( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. - Direct use is not supported. - - ``_ - - :param name: the name of the autoscaling policy - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_autoscaling/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_autoscaling_capacity( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets the current autoscaling capacity based on the configured autoscaling policy. - Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. - - ``_ - """ - __path = "/_autoscaling/capacity" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_autoscaling_policy( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. - Direct use is not supported. - - ``_ - - :param name: the name of the autoscaling policy - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_autoscaling/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_name="policy", - ) - async def put_autoscaling_policy( - self, - *, - name: str, - policy: t.Mapping[str, t.Any], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and ECK. - Direct use is not supported. - - ``_ - - :param name: the name of the autoscaling policy - :param policy: - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - if policy is None: - raise ValueError("Empty value passed for parameter 'policy'") - __path = f"/_autoscaling/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __body = policy - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_async/client/ccr.py b/elasticsearch_serverless/_async/client/ccr.py deleted file mode 100644 index 01e7d3d..0000000 --- a/elasticsearch_serverless/_async/client/ccr.py +++ /dev/null @@ -1,749 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class CcrClient(NamespacedClient): - @_rewrite_parameters() - async def delete_auto_follow_pattern( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes auto-follow patterns. - - ``_ - - :param name: The name of the auto follow pattern. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ccr/auto_follow/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def follow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - leader_index: t.Optional[str] = None, - max_outstanding_read_requests: t.Optional[int] = None, - max_outstanding_write_requests: t.Optional[int] = None, - max_read_request_operation_count: t.Optional[int] = None, - max_read_request_size: t.Optional[str] = None, - max_retry_delay: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - max_write_buffer_count: t.Optional[int] = None, - max_write_buffer_size: t.Optional[str] = None, - max_write_request_operation_count: t.Optional[int] = None, - max_write_request_size: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - read_poll_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - remote_cluster: t.Optional[str] = None, - wait_for_active_shards: t.Optional[ - t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new follower index configured to follow the referenced leader index. - - ``_ - - :param index: The name of the follower index - :param leader_index: - :param max_outstanding_read_requests: - :param max_outstanding_write_requests: - :param max_read_request_operation_count: - :param max_read_request_size: - :param max_retry_delay: - :param max_write_buffer_count: - :param max_write_buffer_size: - :param max_write_request_operation_count: - :param max_write_request_size: - :param read_poll_timeout: - :param remote_cluster: - :param wait_for_active_shards: Sets the number of shard copies that must be active - before returning. Defaults to 0. Set to `all` for all shard copies, otherwise - set to any non-negative value less than or equal to the total number of copies - for the shard (number of replicas + 1) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/follow" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if leader_index is not None: - __body["leader_index"] = leader_index - if max_outstanding_read_requests is not None: - __body["max_outstanding_read_requests"] = max_outstanding_read_requests - if max_outstanding_write_requests is not None: - __body["max_outstanding_write_requests"] = max_outstanding_write_requests - if max_read_request_operation_count is not None: - __body["max_read_request_operation_count"] = ( - max_read_request_operation_count - ) - if max_read_request_size is not None: - __body["max_read_request_size"] = max_read_request_size - if max_retry_delay is not None: - __body["max_retry_delay"] = max_retry_delay - if max_write_buffer_count is not None: - __body["max_write_buffer_count"] = max_write_buffer_count - if max_write_buffer_size is not None: - __body["max_write_buffer_size"] = max_write_buffer_size - if max_write_request_operation_count is not None: - __body["max_write_request_operation_count"] = ( - max_write_request_operation_count - ) - if max_write_request_size is not None: - __body["max_write_request_size"] = max_write_request_size - if pretty is not None: - __query["pretty"] = pretty - if read_poll_timeout is not None: - __body["read_poll_timeout"] = read_poll_timeout - if remote_cluster is not None: - __body["remote_cluster"] = remote_cluster - if wait_for_active_shards is not None: - __query["wait_for_active_shards"] = wait_for_active_shards - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def follow_info( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about all follower indices, including parameters and status - for each follower index - - ``_ - - :param index: A comma-separated list of index patterns; use `_all` to perform - the operation on all indices - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/info" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def follow_stats( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves follower stats. return shard-level stats about the following tasks - associated with each shard for the specified indices. - - ``_ - - :param index: A comma-separated list of index patterns; use `_all` to perform - the operation on all indices - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def forget_follower( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - follower_cluster: t.Optional[str] = None, - follower_index: t.Optional[str] = None, - follower_index_uuid: t.Optional[str] = None, - human: t.Optional[bool] = None, - leader_remote_cluster: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes the follower retention leases from the leader. - - ``_ - - :param index: the name of the leader index for which specified follower retention - leases should be removed - :param follower_cluster: - :param follower_index: - :param follower_index_uuid: - :param leader_remote_cluster: - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/forget_follower" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if follower_cluster is not None: - __body["follower_cluster"] = follower_cluster - if follower_index is not None: - __body["follower_index"] = follower_index - if follower_index_uuid is not None: - __body["follower_index_uuid"] = follower_index_uuid - if human is not None: - __query["human"] = human - if leader_remote_cluster is not None: - __body["leader_remote_cluster"] = leader_remote_cluster - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def get_auto_follow_pattern( - self, - *, - name: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets configured auto-follow patterns. Returns the specified auto-follow pattern - collection. - - ``_ - - :param name: Specifies the auto-follow pattern collection that you want to retrieve. - If you do not specify a name, the API returns information for all collections. - """ - if name not in SKIP_IN_PATH: - __path = f"/_ccr/auto_follow/{_quote(name)}" - else: - __path = "/_ccr/auto_follow" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def pause_auto_follow_pattern( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Pauses an auto-follow pattern - - ``_ - - :param name: The name of the auto follow pattern that should pause discovering - new indices to follow. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ccr/auto_follow/{_quote(name)}/pause" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def pause_follow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Pauses a follower index. The follower index will not fetch any additional operations - from the leader index. - - ``_ - - :param index: The name of the follower index that should pause following its - leader index. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/pause_follow" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def put_auto_follow_pattern( - self, - *, - name: str, - remote_cluster: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - follow_index_pattern: t.Optional[str] = None, - human: t.Optional[bool] = None, - leader_index_exclusion_patterns: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - leader_index_patterns: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - max_outstanding_read_requests: t.Optional[int] = None, - max_outstanding_write_requests: t.Optional[int] = None, - max_read_request_operation_count: t.Optional[int] = None, - max_read_request_size: t.Optional[t.Union[int, str]] = None, - max_retry_delay: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - max_write_buffer_count: t.Optional[int] = None, - max_write_buffer_size: t.Optional[t.Union[int, str]] = None, - max_write_request_operation_count: t.Optional[int] = None, - max_write_request_size: t.Optional[t.Union[int, str]] = None, - pretty: t.Optional[bool] = None, - read_poll_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - settings: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new named collection of auto-follow patterns against a specified remote - cluster. Newly created indices on the remote cluster matching any of the specified - patterns will be automatically configured as follower indices. - - ``_ - - :param name: The name of the collection of auto-follow patterns. - :param remote_cluster: The remote cluster containing the leader indices to match - against. - :param follow_index_pattern: The name of follower index. The template {{leader_index}} - can be used to derive the name of the follower index from the name of the - leader index. When following a data stream, use {{leader_index}}; CCR does - not support changes to the names of a follower data stream’s backing indices. - :param leader_index_exclusion_patterns: An array of simple index patterns that - can be used to exclude indices from being auto-followed. Indices in the remote - cluster whose names are matching one or more leader_index_patterns and one - or more leader_index_exclusion_patterns won’t be followed. - :param leader_index_patterns: An array of simple index patterns to match against - indices in the remote cluster specified by the remote_cluster field. - :param max_outstanding_read_requests: The maximum number of outstanding reads - requests from the remote cluster. - :param max_outstanding_write_requests: The maximum number of outstanding reads - requests from the remote cluster. - :param max_read_request_operation_count: The maximum number of operations to - pull per read from the remote cluster. - :param max_read_request_size: The maximum size in bytes of per read of a batch - of operations pulled from the remote cluster. - :param max_retry_delay: The maximum time to wait before retrying an operation - that failed exceptionally. An exponential backoff strategy is employed when - retrying. - :param max_write_buffer_count: The maximum number of operations that can be queued - for writing. When this limit is reached, reads from the remote cluster will - be deferred until the number of queued operations goes below the limit. - :param max_write_buffer_size: The maximum total bytes of operations that can - be queued for writing. When this limit is reached, reads from the remote - cluster will be deferred until the total bytes of queued operations goes - below the limit. - :param max_write_request_operation_count: The maximum number of operations per - bulk write request executed on the follower. - :param max_write_request_size: The maximum total bytes of operations per bulk - write request executed on the follower. - :param read_poll_timeout: The maximum time to wait for new operations on the - remote cluster when the follower index is synchronized with the leader index. - When the timeout has elapsed, the poll for operations will return to the - follower so that it can update some statistics. Then the follower will immediately - attempt to read from the leader again. - :param settings: Settings to override from the leader index. Note that certain - settings can not be overrode (e.g., index.number_of_shards). - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - if remote_cluster is None: - raise ValueError("Empty value passed for parameter 'remote_cluster'") - __path = f"/_ccr/auto_follow/{_quote(name)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if remote_cluster is not None: - __body["remote_cluster"] = remote_cluster - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if follow_index_pattern is not None: - __body["follow_index_pattern"] = follow_index_pattern - if human is not None: - __query["human"] = human - if leader_index_exclusion_patterns is not None: - __body["leader_index_exclusion_patterns"] = leader_index_exclusion_patterns - if leader_index_patterns is not None: - __body["leader_index_patterns"] = leader_index_patterns - if max_outstanding_read_requests is not None: - __body["max_outstanding_read_requests"] = max_outstanding_read_requests - if max_outstanding_write_requests is not None: - __body["max_outstanding_write_requests"] = max_outstanding_write_requests - if max_read_request_operation_count is not None: - __body["max_read_request_operation_count"] = ( - max_read_request_operation_count - ) - if max_read_request_size is not None: - __body["max_read_request_size"] = max_read_request_size - if max_retry_delay is not None: - __body["max_retry_delay"] = max_retry_delay - if max_write_buffer_count is not None: - __body["max_write_buffer_count"] = max_write_buffer_count - if max_write_buffer_size is not None: - __body["max_write_buffer_size"] = max_write_buffer_size - if max_write_request_operation_count is not None: - __body["max_write_request_operation_count"] = ( - max_write_request_operation_count - ) - if max_write_request_size is not None: - __body["max_write_request_size"] = max_write_request_size - if pretty is not None: - __query["pretty"] = pretty - if read_poll_timeout is not None: - __body["read_poll_timeout"] = read_poll_timeout - if settings is not None: - __body["settings"] = settings - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def resume_auto_follow_pattern( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Resumes an auto-follow pattern that has been paused - - ``_ - - :param name: The name of the auto follow pattern to resume discovering new indices - to follow. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ccr/auto_follow/{_quote(name)}/resume" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def resume_follow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - max_outstanding_read_requests: t.Optional[int] = None, - max_outstanding_write_requests: t.Optional[int] = None, - max_read_request_operation_count: t.Optional[int] = None, - max_read_request_size: t.Optional[str] = None, - max_retry_delay: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - max_write_buffer_count: t.Optional[int] = None, - max_write_buffer_size: t.Optional[str] = None, - max_write_request_operation_count: t.Optional[int] = None, - max_write_request_size: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - read_poll_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Resumes a follower index that has been paused - - ``_ - - :param index: The name of the follow index to resume following. - :param max_outstanding_read_requests: - :param max_outstanding_write_requests: - :param max_read_request_operation_count: - :param max_read_request_size: - :param max_retry_delay: - :param max_write_buffer_count: - :param max_write_buffer_size: - :param max_write_request_operation_count: - :param max_write_request_size: - :param read_poll_timeout: - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/resume_follow" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if max_outstanding_read_requests is not None: - __body["max_outstanding_read_requests"] = max_outstanding_read_requests - if max_outstanding_write_requests is not None: - __body["max_outstanding_write_requests"] = max_outstanding_write_requests - if max_read_request_operation_count is not None: - __body["max_read_request_operation_count"] = ( - max_read_request_operation_count - ) - if max_read_request_size is not None: - __body["max_read_request_size"] = max_read_request_size - if max_retry_delay is not None: - __body["max_retry_delay"] = max_retry_delay - if max_write_buffer_count is not None: - __body["max_write_buffer_count"] = max_write_buffer_count - if max_write_buffer_size is not None: - __body["max_write_buffer_size"] = max_write_buffer_size - if max_write_request_operation_count is not None: - __body["max_write_request_operation_count"] = ( - max_write_request_operation_count - ) - if max_write_request_size is not None: - __body["max_write_request_size"] = max_write_request_size - if pretty is not None: - __query["pretty"] = pretty - if read_poll_timeout is not None: - __body["read_poll_timeout"] = read_poll_timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def stats( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets all stats related to cross-cluster replication. - - ``_ - """ - __path = "/_ccr/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def unfollow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Stops the following task associated with a follower index and removes index metadata - and settings associated with cross-cluster replication. - - ``_ - - :param index: The name of the follower index that should be turned into a regular - index. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/unfollow" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/dangling_indices.py b/elasticsearch_serverless/_async/client/dangling_indices.py deleted file mode 100644 index 0b1f2c9..0000000 --- a/elasticsearch_serverless/_async/client/dangling_indices.py +++ /dev/null @@ -1,162 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class DanglingIndicesClient(NamespacedClient): - @_rewrite_parameters() - async def delete_dangling_index( - self, - *, - index_uuid: str, - accept_data_loss: bool, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes the specified dangling index - - ``_ - - :param index_uuid: The UUID of the dangling index - :param accept_data_loss: Must be set to true in order to delete the dangling - index - :param master_timeout: Specify timeout for connection to master - :param timeout: Explicit operation timeout - """ - if index_uuid in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index_uuid'") - if accept_data_loss is None: - raise ValueError("Empty value passed for parameter 'accept_data_loss'") - __path = f"/_dangling/{_quote(index_uuid)}" - __query: t.Dict[str, t.Any] = {} - if accept_data_loss is not None: - __query["accept_data_loss"] = accept_data_loss - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def import_dangling_index( - self, - *, - index_uuid: str, - accept_data_loss: bool, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Imports the specified dangling index - - ``_ - - :param index_uuid: The UUID of the dangling index - :param accept_data_loss: Must be set to true in order to import the dangling - index - :param master_timeout: Specify timeout for connection to master - :param timeout: Explicit operation timeout - """ - if index_uuid in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index_uuid'") - if accept_data_loss is None: - raise ValueError("Empty value passed for parameter 'accept_data_loss'") - __path = f"/_dangling/{_quote(index_uuid)}" - __query: t.Dict[str, t.Any] = {} - if accept_data_loss is not None: - __query["accept_data_loss"] = accept_data_loss - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def list_dangling_indices( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns all dangling indices. - - ``_ - """ - __path = "/_dangling" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/features.py b/elasticsearch_serverless/_async/client/features.py deleted file mode 100644 index 5c19b7b..0000000 --- a/elasticsearch_serverless/_async/client/features.py +++ /dev/null @@ -1,88 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class FeaturesClient(NamespacedClient): - @_rewrite_parameters() - async def get_features( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets a list of features which can be included in snapshots using the feature_states - field when creating a snapshot - - ``_ - """ - __path = "/_features" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def reset_features( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Resets the internal state of features, usually by deleting system indices - - ``_ - """ - __path = "/_features/_reset" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/fleet.py b/elasticsearch_serverless/_async/client/fleet.py deleted file mode 100644 index e296a41..0000000 --- a/elasticsearch_serverless/_async/client/fleet.py +++ /dev/null @@ -1,631 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class FleetClient(NamespacedClient): - @_rewrite_parameters() - async def global_checkpoints( - self, - *, - index: str, - checkpoints: t.Optional[t.Union[t.List[int], t.Tuple[int, ...]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - wait_for_advance: t.Optional[bool] = None, - wait_for_index: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the current global checkpoints for an index. This API is design for internal - use by the fleet server project. - - ``_ - - :param index: A single index or index alias that resolves to a single index. - :param checkpoints: A comma separated list of previous global checkpoints. When - used in combination with `wait_for_advance`, the API will only return once - the global checkpoints advances past the checkpoints. Providing an empty - list will cause Elasticsearch to immediately return the current global checkpoints. - :param timeout: Period to wait for a global checkpoints to advance past `checkpoints`. - :param wait_for_advance: A boolean value which controls whether to wait (until - the timeout) for the global checkpoints to advance past the provided `checkpoints`. - :param wait_for_index: A boolean value which controls whether to wait (until - the timeout) for the target index to exist and all primary shards be active. - Can only be true when `wait_for_advance` is true. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_fleet/global_checkpoints" - __query: t.Dict[str, t.Any] = {} - if checkpoints is not None: - __query["checkpoints"] = checkpoints - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if wait_for_advance is not None: - __query["wait_for_advance"] = wait_for_advance - if wait_for_index is not None: - __query["wait_for_index"] = wait_for_index - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_name="searches", - ) - async def msearch( - self, - *, - searches: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[str] = None, - allow_no_indices: t.Optional[bool] = None, - allow_partial_search_results: t.Optional[bool] = None, - ccs_minimize_roundtrips: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - expand_wildcards: t.Optional[ - t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], - ], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_throttled: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - max_concurrent_searches: t.Optional[int] = None, - max_concurrent_shard_requests: t.Optional[int] = None, - pre_filter_shard_size: t.Optional[int] = None, - pretty: t.Optional[bool] = None, - rest_total_hits_as_int: t.Optional[bool] = None, - search_type: t.Optional[ - t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] - ] = None, - typed_keys: t.Optional[bool] = None, - wait_for_checkpoints: t.Optional[ - t.Union[t.List[int], t.Tuple[int, ...]] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Multi Search API where the search will only be executed after specified checkpoints - are available due to a refresh. This API is designed for internal use by the - fleet server project. - - :param searches: - :param index: A single target to search. If the target is an index alias, it - must resolve to a single index. - :param allow_no_indices: If false, the request returns an error if any wildcard - expression, index alias, or _all value targets only missing or closed indices. - This behavior applies even if the request targets other open indices. For - example, a request targeting foo*,bar* returns an error if an index starts - with foo but no index starts with bar. - :param allow_partial_search_results: If true, returns partial results if there - are shard request timeouts or [shard failures](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-replication.html#shard-failures). - If false, returns an error with no partial results. Defaults to the configured - cluster setting `search.default_allow_partial_results` which is true by default. - :param ccs_minimize_roundtrips: If true, network roundtrips between the coordinating - node and remote clusters are minimized for cross-cluster search requests. - :param expand_wildcards: Type of index that wildcard expressions can match. If - the request can target data streams, this argument determines whether wildcard - expressions match hidden data streams. - :param ignore_throttled: If true, concrete, expanded or aliased indices are ignored - when frozen. - :param ignore_unavailable: If true, missing or closed indices are not included - in the response. - :param max_concurrent_searches: Maximum number of concurrent searches the multi - search API can execute. - :param max_concurrent_shard_requests: Maximum number of concurrent shard requests - that each sub-search request executes per node. - :param pre_filter_shard_size: Defines a threshold that enforces a pre-filter - roundtrip to prefilter search shards based on query rewriting if the number - of shards the search request expands to exceeds the threshold. This filter - roundtrip can limit the number of shards significantly if for instance a - shard can not match any documents based on its rewrite method i.e., if date - filters are mandatory to match but the shard bounds and the query are disjoint. - :param rest_total_hits_as_int: If true, hits.total are returned as an integer - in the response. Defaults to false, which returns an object. - :param search_type: Indicates whether global term and document frequencies should - be used when scoring returned documents. - :param typed_keys: Specifies whether aggregation and suggester names should be - prefixed by their respective types in the response. - :param wait_for_checkpoints: A comma separated list of checkpoints. When configured, - the search API will only be executed on a shard after the relevant checkpoint - has become visible for search. Defaults to an empty list which will cause - Elasticsearch to immediately execute the search. - """ - if searches is None: - raise ValueError("Empty value passed for parameter 'searches'") - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_fleet/_fleet_msearch" - else: - __path = "/_fleet/_fleet_msearch" - __query: t.Dict[str, t.Any] = {} - if allow_no_indices is not None: - __query["allow_no_indices"] = allow_no_indices - if allow_partial_search_results is not None: - __query["allow_partial_search_results"] = allow_partial_search_results - if ccs_minimize_roundtrips is not None: - __query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips - if error_trace is not None: - __query["error_trace"] = error_trace - if expand_wildcards is not None: - __query["expand_wildcards"] = expand_wildcards - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_throttled is not None: - __query["ignore_throttled"] = ignore_throttled - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if max_concurrent_searches is not None: - __query["max_concurrent_searches"] = max_concurrent_searches - if max_concurrent_shard_requests is not None: - __query["max_concurrent_shard_requests"] = max_concurrent_shard_requests - if pre_filter_shard_size is not None: - __query["pre_filter_shard_size"] = pre_filter_shard_size - if pretty is not None: - __query["pretty"] = pretty - if rest_total_hits_as_int is not None: - __query["rest_total_hits_as_int"] = rest_total_hits_as_int - if search_type is not None: - __query["search_type"] = search_type - if typed_keys is not None: - __query["typed_keys"] = typed_keys - if wait_for_checkpoints is not None: - __query["wait_for_checkpoints"] = wait_for_checkpoints - __body = searches - __headers = { - "accept": "application/json", - "content-type": "application/x-ndjson", - } - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - parameter_aliases={ - "_source": "source", - "_source_excludes": "source_excludes", - "_source_includes": "source_includes", - "from": "from_", - }, - ) - async def search( - self, - *, - index: str, - aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - allow_no_indices: t.Optional[bool] = None, - allow_partial_search_results: t.Optional[bool] = None, - analyze_wildcard: t.Optional[bool] = None, - analyzer: t.Optional[str] = None, - batched_reduce_size: t.Optional[int] = None, - ccs_minimize_roundtrips: t.Optional[bool] = None, - collapse: t.Optional[t.Mapping[str, t.Any]] = None, - default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, - df: t.Optional[str] = None, - docvalue_fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - error_trace: t.Optional[bool] = None, - expand_wildcards: t.Optional[ - t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], - ], - ] - ] = None, - explain: t.Optional[bool] = None, - ext: t.Optional[t.Mapping[str, t.Any]] = None, - fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - from_: t.Optional[int] = None, - highlight: t.Optional[t.Mapping[str, t.Any]] = None, - human: t.Optional[bool] = None, - ignore_throttled: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - indices_boost: t.Optional[ - t.Union[t.List[t.Mapping[str, float]], t.Tuple[t.Mapping[str, float], ...]] - ] = None, - lenient: t.Optional[bool] = None, - max_concurrent_shard_requests: t.Optional[int] = None, - min_compatible_shard_node: t.Optional[str] = None, - min_score: t.Optional[float] = None, - pit: t.Optional[t.Mapping[str, t.Any]] = None, - post_filter: t.Optional[t.Mapping[str, t.Any]] = None, - pre_filter_shard_size: t.Optional[int] = None, - preference: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - profile: t.Optional[bool] = None, - q: t.Optional[str] = None, - query: t.Optional[t.Mapping[str, t.Any]] = None, - request_cache: t.Optional[bool] = None, - rescore: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] - ] = None, - rest_total_hits_as_int: t.Optional[bool] = None, - routing: t.Optional[str] = None, - runtime_mappings: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - script_fields: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - scroll: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] - ] = None, - search_type: t.Optional[ - t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] - ] = None, - seq_no_primary_term: t.Optional[bool] = None, - size: t.Optional[int] = None, - slice: t.Optional[t.Mapping[str, t.Any]] = None, - sort: t.Optional[ - t.Union[ - t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], - ] - ] = None, - source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - suggest: t.Optional[t.Mapping[str, t.Any]] = None, - suggest_field: t.Optional[str] = None, - suggest_mode: t.Optional[ - t.Union["t.Literal['always', 'missing', 'popular']", str] - ] = None, - suggest_size: t.Optional[int] = None, - suggest_text: t.Optional[str] = None, - terminate_after: t.Optional[int] = None, - timeout: t.Optional[str] = None, - track_scores: t.Optional[bool] = None, - track_total_hits: t.Optional[t.Union[bool, int]] = None, - typed_keys: t.Optional[bool] = None, - version: t.Optional[bool] = None, - wait_for_checkpoints: t.Optional[ - t.Union[t.List[int], t.Tuple[int, ...]] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Search API where the search will only be executed after specified checkpoints - are available due to a refresh. This API is designed for internal use by the - fleet server project. - - :param index: A single target to search. If the target is an index alias, it - must resolve to a single index. - :param aggregations: - :param aggs: - :param allow_no_indices: - :param allow_partial_search_results: If true, returns partial results if there - are shard request timeouts or [shard failures](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-replication.html#shard-failures). - If false, returns an error with no partial results. Defaults to the configured - cluster setting `search.default_allow_partial_results` which is true by default. - :param analyze_wildcard: - :param analyzer: - :param batched_reduce_size: - :param ccs_minimize_roundtrips: - :param collapse: - :param default_operator: - :param df: - :param docvalue_fields: Array of wildcard (*) patterns. The request returns doc - values for field names matching these patterns in the hits.fields property - of the response. - :param expand_wildcards: - :param explain: If true, returns detailed information about score computation - as part of a hit. - :param ext: Configuration of search extensions defined by Elasticsearch plugins. - :param fields: Array of wildcard (*) patterns. The request returns values for - field names matching these patterns in the hits.fields property of the response. - :param from_: Starting document offset. By default, you cannot page through more - than 10,000 hits using the from and size parameters. To page through more - hits, use the search_after parameter. - :param highlight: - :param ignore_throttled: - :param ignore_unavailable: - :param indices_boost: Boosts the _score of documents from specified indices. - :param lenient: - :param max_concurrent_shard_requests: - :param min_compatible_shard_node: - :param min_score: Minimum _score for matching documents. Documents with a lower - _score are not included in the search results. - :param pit: Limits the search to a point in time (PIT). If you provide a PIT, - you cannot specify an in the request path. - :param post_filter: - :param pre_filter_shard_size: - :param preference: - :param profile: - :param q: - :param query: Defines the search definition using the Query DSL. - :param request_cache: - :param rescore: - :param rest_total_hits_as_int: - :param routing: - :param runtime_mappings: Defines one or more runtime fields in the search request. - These fields take precedence over mapped fields with the same name. - :param script_fields: Retrieve a script evaluation (based on different fields) - for each hit. - :param scroll: - :param search_after: - :param search_type: - :param seq_no_primary_term: If true, returns sequence number and primary term - of the last modification of each hit. See Optimistic concurrency control. - :param size: The number of hits to return. By default, you cannot page through - more than 10,000 hits using the from and size parameters. To page through - more hits, use the search_after parameter. - :param slice: - :param sort: - :param source: Indicates which source fields are returned for matching documents. - These fields are returned in the hits._source property of the search response. - :param source_excludes: - :param source_includes: - :param stats: Stats groups to associate with the search. Each group maintains - a statistics aggregation for its associated searches. You can retrieve these - stats using the indices stats API. - :param stored_fields: List of stored fields to return as part of a hit. If no - fields are specified, no stored fields are included in the response. If this - field is specified, the _source parameter defaults to false. You can pass - _source: true to return both source fields and stored fields in the search - response. - :param suggest: - :param suggest_field: Specifies which field to use for suggestions. - :param suggest_mode: - :param suggest_size: - :param suggest_text: The source text for which the suggestions should be returned. - :param terminate_after: Maximum number of documents to collect for each shard. - If a query reaches this limit, Elasticsearch terminates the query early. - Elasticsearch collects documents before sorting. Defaults to 0, which does - not terminate query execution early. - :param timeout: Specifies the period of time to wait for a response from each - shard. If no response is received before the timeout expires, the request - fails and returns an error. Defaults to no timeout. - :param track_scores: If true, calculate and return document scores, even if the - scores are not used for sorting. - :param track_total_hits: Number of hits matching the query to count accurately. - If true, the exact number of hits is returned at the cost of some performance. - If false, the response does not include the total number of hits matching - the query. Defaults to 10,000 hits. - :param typed_keys: - :param version: If true, returns document version as part of a hit. - :param wait_for_checkpoints: A comma separated list of checkpoints. When configured, - the search API will only be executed on a shard after the relevant checkpoint - has become visible for search. Defaults to an empty list which will cause - Elasticsearch to immediately execute the search. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_fleet/_fleet_search" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - # The 'sort' parameter with a colon can't be encoded to the body. - if sort is not None and ( - (isinstance(sort, str) and ":" in sort) - or ( - isinstance(sort, (list, tuple)) - and all(isinstance(_x, str) for _x in sort) - and any(":" in _x for _x in sort) - ) - ): - __query["sort"] = sort - sort = None - if aggregations is not None: - __body["aggregations"] = aggregations - if aggs is not None: - __body["aggs"] = aggs - if allow_no_indices is not None: - __query["allow_no_indices"] = allow_no_indices - if allow_partial_search_results is not None: - __query["allow_partial_search_results"] = allow_partial_search_results - if analyze_wildcard is not None: - __query["analyze_wildcard"] = analyze_wildcard - if analyzer is not None: - __query["analyzer"] = analyzer - if batched_reduce_size is not None: - __query["batched_reduce_size"] = batched_reduce_size - if ccs_minimize_roundtrips is not None: - __query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips - if collapse is not None: - __body["collapse"] = collapse - if default_operator is not None: - __query["default_operator"] = default_operator - if df is not None: - __query["df"] = df - if docvalue_fields is not None: - __body["docvalue_fields"] = docvalue_fields - if error_trace is not None: - __query["error_trace"] = error_trace - if expand_wildcards is not None: - __query["expand_wildcards"] = expand_wildcards - if explain is not None: - __body["explain"] = explain - if ext is not None: - __body["ext"] = ext - if fields is not None: - __body["fields"] = fields - if filter_path is not None: - __query["filter_path"] = filter_path - if from_ is not None: - __body["from"] = from_ - if highlight is not None: - __body["highlight"] = highlight - if human is not None: - __query["human"] = human - if ignore_throttled is not None: - __query["ignore_throttled"] = ignore_throttled - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if indices_boost is not None: - __body["indices_boost"] = indices_boost - if lenient is not None: - __query["lenient"] = lenient - if max_concurrent_shard_requests is not None: - __query["max_concurrent_shard_requests"] = max_concurrent_shard_requests - if min_compatible_shard_node is not None: - __query["min_compatible_shard_node"] = min_compatible_shard_node - if min_score is not None: - __body["min_score"] = min_score - if pit is not None: - __body["pit"] = pit - if post_filter is not None: - __body["post_filter"] = post_filter - if pre_filter_shard_size is not None: - __query["pre_filter_shard_size"] = pre_filter_shard_size - if preference is not None: - __query["preference"] = preference - if pretty is not None: - __query["pretty"] = pretty - if profile is not None: - __body["profile"] = profile - if q is not None: - __query["q"] = q - if query is not None: - __body["query"] = query - if request_cache is not None: - __query["request_cache"] = request_cache - if rescore is not None: - __body["rescore"] = rescore - if rest_total_hits_as_int is not None: - __query["rest_total_hits_as_int"] = rest_total_hits_as_int - if routing is not None: - __query["routing"] = routing - if runtime_mappings is not None: - __body["runtime_mappings"] = runtime_mappings - if script_fields is not None: - __body["script_fields"] = script_fields - if scroll is not None: - __query["scroll"] = scroll - if search_after is not None: - __body["search_after"] = search_after - if search_type is not None: - __query["search_type"] = search_type - if seq_no_primary_term is not None: - __body["seq_no_primary_term"] = seq_no_primary_term - if size is not None: - __body["size"] = size - if slice is not None: - __body["slice"] = slice - if sort is not None: - __body["sort"] = sort - if source is not None: - __body["_source"] = source - if source_excludes is not None: - __query["_source_excludes"] = source_excludes - if source_includes is not None: - __query["_source_includes"] = source_includes - if stats is not None: - __body["stats"] = stats - if stored_fields is not None: - __body["stored_fields"] = stored_fields - if suggest is not None: - __body["suggest"] = suggest - if suggest_field is not None: - __query["suggest_field"] = suggest_field - if suggest_mode is not None: - __query["suggest_mode"] = suggest_mode - if suggest_size is not None: - __query["suggest_size"] = suggest_size - if suggest_text is not None: - __query["suggest_text"] = suggest_text - if terminate_after is not None: - __body["terminate_after"] = terminate_after - if timeout is not None: - __body["timeout"] = timeout - if track_scores is not None: - __body["track_scores"] = track_scores - if track_total_hits is not None: - __body["track_total_hits"] = track_total_hits - if typed_keys is not None: - __query["typed_keys"] = typed_keys - if version is not None: - __body["version"] = version - if wait_for_checkpoints is not None: - __query["wait_for_checkpoints"] = wait_for_checkpoints - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_async/client/ilm.py b/elasticsearch_serverless/_async/client/ilm.py deleted file mode 100644 index 83ffa2a..0000000 --- a/elasticsearch_serverless/_async/client/ilm.py +++ /dev/null @@ -1,543 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class IlmClient(NamespacedClient): - @_rewrite_parameters() - async def delete_lifecycle( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes the specified lifecycle policy definition. A currently used policy cannot - be deleted. - - ``_ - - :param name: Identifier for the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ilm/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def explain_lifecycle( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - only_errors: t.Optional[bool] = None, - only_managed: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about the index's current lifecycle state, such as the - currently executing phase, action, and step. - - ``_ - - :param index: Comma-separated list of data streams, indices, and aliases to target. - Supports wildcards (`*`). To target all data streams and indices, use `*` - or `_all`. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param only_errors: Filters the returned indices to only indices that are managed - by ILM and are in an error state, either due to an encountering an error - while executing the policy, or attempting to use a policy that does not exist. - :param only_managed: Filters the returned indices to only indices that are managed - by ILM. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ilm/explain" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if only_errors is not None: - __query["only_errors"] = only_errors - if only_managed is not None: - __query["only_managed"] = only_managed - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_lifecycle( - self, - *, - name: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the specified policy definition. Includes the policy version and last - modified date. - - ``_ - - :param name: Identifier for the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if name not in SKIP_IN_PATH: - __path = f"/_ilm/policy/{_quote(name)}" - else: - __path = "/_ilm/policy" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_status( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the current index lifecycle management (ILM) status. - - ``_ - """ - __path = "/_ilm/status" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def migrate_to_data_tiers( - self, - *, - dry_run: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - legacy_template_to_delete: t.Optional[str] = None, - node_attribute: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Migrates the indices and ILM policies away from custom node attribute allocation - routing to data tiers routing - - ``_ - - :param dry_run: If true, simulates the migration from node attributes based allocation - filters to data tiers, but does not perform the migration. This provides - a way to retrieve the indices and ILM policies that need to be migrated. - :param legacy_template_to_delete: - :param node_attribute: - """ - __path = "/_ilm/migrate_to_data_tiers" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if dry_run is not None: - __query["dry_run"] = dry_run - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if legacy_template_to_delete is not None: - __body["legacy_template_to_delete"] = legacy_template_to_delete - if node_attribute is not None: - __body["node_attribute"] = node_attribute - if pretty is not None: - __query["pretty"] = pretty - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def move_to_step( - self, - *, - index: str, - current_step: t.Optional[t.Mapping[str, t.Any]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - next_step: t.Optional[t.Mapping[str, t.Any]] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Manually moves an index into the specified step and executes that step. - - ``_ - - :param index: The name of the index whose lifecycle step is to change - :param current_step: - :param next_step: - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/_ilm/move/{_quote(index)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if current_step is not None: - __body["current_step"] = current_step - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if next_step is not None: - __body["next_step"] = next_step - if pretty is not None: - __query["pretty"] = pretty - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def put_lifecycle( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - policy: t.Optional[t.Mapping[str, t.Any]] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a lifecycle policy - - ``_ - - :param name: Identifier for the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param policy: - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ilm/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if policy is not None: - __body["policy"] = policy - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def remove_policy( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes the assigned lifecycle policy and stops managing the specified index - - ``_ - - :param index: The name of the index to remove policy on - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ilm/remove" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def retry( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retries executing the policy for an index that is in the ERROR step. - - ``_ - - :param index: The name of the indices (comma-separated) whose failed lifecycle - step is to be retry - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ilm/retry" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def start( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Start the index lifecycle management (ILM) plugin. - - ``_ - - :param master_timeout: - :param timeout: - """ - __path = "/_ilm/start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def stop( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Halts all lifecycle management operations and stops the index lifecycle management - (ILM) plugin - - ``_ - - :param master_timeout: - :param timeout: - """ - __path = "/_ilm/stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/migration.py b/elasticsearch_serverless/_async/client/migration.py deleted file mode 100644 index 8ba52c0..0000000 --- a/elasticsearch_serverless/_async/client/migration.py +++ /dev/null @@ -1,127 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class MigrationClient(NamespacedClient): - @_rewrite_parameters() - async def deprecations( - self, - *, - index: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about different cluster, node, and index level settings - that use deprecated features that will be removed or changed in the next major - version. - - ``_ - - :param index: Comma-separate list of data streams or indices to check. Wildcard - (*) expressions are supported. - """ - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_migration/deprecations" - else: - __path = "/_migration/deprecations" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_feature_upgrade_status( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Find out whether system features need to be upgraded or not - - ``_ - """ - __path = "/_migration/system_features" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def post_feature_upgrade( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Begin upgrades for system features - - ``_ - """ - __path = "/_migration/system_features" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/monitoring.py b/elasticsearch_serverless/_async/client/monitoring.py deleted file mode 100644 index 75fe1bc..0000000 --- a/elasticsearch_serverless/_async/client/monitoring.py +++ /dev/null @@ -1,87 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class MonitoringClient(NamespacedClient): - @_rewrite_parameters( - body_name="operations", - ) - async def bulk( - self, - *, - interval: t.Union["t.Literal[-1]", "t.Literal[0]", str], - operations: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - system_api_version: str, - system_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Used by the monitoring features to send monitoring data. - - ``_ - - :param interval: Collection interval (e.g., '10s' or '10000ms') of the payload - :param operations: - :param system_api_version: - :param system_id: Identifier of the monitored system - """ - if interval is None: - raise ValueError("Empty value passed for parameter 'interval'") - if operations is None: - raise ValueError("Empty value passed for parameter 'operations'") - if system_api_version is None: - raise ValueError("Empty value passed for parameter 'system_api_version'") - if system_id is None: - raise ValueError("Empty value passed for parameter 'system_id'") - __path = "/_monitoring/bulk" - __query: t.Dict[str, t.Any] = {} - if interval is not None: - __query["interval"] = interval - if system_api_version is not None: - __query["system_api_version"] = system_api_version - if system_id is not None: - __query["system_id"] = system_id - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __body = operations - __headers = { - "accept": "application/json", - "content-type": "application/x-ndjson", - } - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_async/client/nodes.py b/elasticsearch_serverless/_async/client/nodes.py deleted file mode 100644 index 51f7cf7..0000000 --- a/elasticsearch_serverless/_async/client/nodes.py +++ /dev/null @@ -1,483 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse, TextApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class NodesClient(NamespacedClient): - @_rewrite_parameters() - async def clear_repositories_metering_archive( - self, - *, - node_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - max_archive_version: int, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes the archived repositories metering information present in the cluster. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. All the nodes selective options are explained [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster.html#cluster-nodes). - :param max_archive_version: Specifies the maximum [archive_version](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-repositories-metering-api.html#get-repositories-metering-api-response-body) - to be cleared from the archive. - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - if max_archive_version in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'max_archive_version'") - __path = f"/_nodes/{_quote(node_id)}/_repositories_metering/{_quote(max_archive_version)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_repositories_metering_info( - self, - *, - node_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns cluster repositories metering information. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. All the nodes selective options are explained [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster.html#cluster-nodes). - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - __path = f"/_nodes/{_quote(node_id)}/_repositories_metering" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def hot_threads( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_idle_threads: t.Optional[bool] = None, - interval: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - snapshots: t.Optional[int] = None, - sort: t.Optional[ - t.Union["t.Literal['block', 'cpu', 'gpu', 'mem', 'wait']", str] - ] = None, - threads: t.Optional[int] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - type: t.Optional[ - t.Union["t.Literal['block', 'cpu', 'gpu', 'mem', 'wait']", str] - ] = None, - ) -> TextApiResponse: - """ - Returns information about hot threads on each node in the cluster. - - ``_ - - :param node_id: List of node IDs or names used to limit returned information. - :param ignore_idle_threads: If true, known idle threads (e.g. waiting in a socket - select, or to get a task from an empty queue) are filtered out. - :param interval: The interval to do the second sampling of threads. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param snapshots: Number of samples of thread stacktrace. - :param sort: The sort order for 'cpu' type (default: total) - :param threads: Specifies the number of hot threads to provide information for. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - :param type: The type to sample. - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/hot_threads" - else: - __path = "/_nodes/hot_threads" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_idle_threads is not None: - __query["ignore_idle_threads"] = ignore_idle_threads - if interval is not None: - __query["interval"] = interval - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if snapshots is not None: - __query["snapshots"] = snapshots - if sort is not None: - __query["sort"] = sort - if threads is not None: - __query["threads"] = threads - if timeout is not None: - __query["timeout"] = timeout - if type is not None: - __query["type"] = type - __headers = {"accept": "text/plain"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def info( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - flat_settings: t.Optional[bool] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about nodes in the cluster. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. - :param metric: Limits the information returned to the specific metrics. Supports - a comma-separated list, such as http,ingest. - :param flat_settings: If true, returns settings in flat format. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id not in SKIP_IN_PATH and metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/{_quote(metric)}" - elif node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}" - elif metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(metric)}" - else: - __path = "/_nodes" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if flat_settings is not None: - __query["flat_settings"] = flat_settings - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def reload_secure_settings( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - secure_settings_password: t.Optional[str] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Reloads secure settings. - - ``_ - - :param node_id: A comma-separated list of node IDs to span the reload/reinit - call. Should stay empty because reloading usually involves all cluster nodes. - :param secure_settings_password: - :param timeout: Explicit operation timeout - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/reload_secure_settings" - else: - __path = "/_nodes/reload_secure_settings" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if secure_settings_password is not None: - __body["secure_settings_password"] = secure_settings_password - if timeout is not None: - __query["timeout"] = timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def stats( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - index_metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - completion_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - fielddata_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - groups: t.Optional[bool] = None, - human: t.Optional[bool] = None, - include_segment_file_sizes: t.Optional[bool] = None, - include_unloaded_segments: t.Optional[bool] = None, - level: t.Optional[ - t.Union["t.Literal['cluster', 'indices', 'shards']", str] - ] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - types: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns statistical information about nodes in the cluster. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. - :param metric: Limit the information returned to the specified metrics - :param index_metric: Limit the information returned for indices metric to the - specific index metrics. It can be used only if indices (or all) metric is - specified. - :param completion_fields: Comma-separated list or wildcard expressions of fields - to include in fielddata and suggest statistics. - :param fielddata_fields: Comma-separated list or wildcard expressions of fields - to include in fielddata statistics. - :param fields: Comma-separated list or wildcard expressions of fields to include - in the statistics. - :param groups: Comma-separated list of search groups to include in the search - statistics. - :param include_segment_file_sizes: If true, the call reports the aggregated disk - usage of each one of the Lucene index files (only applies if segment stats - are requested). - :param include_unloaded_segments: If set to true segment stats will include stats - for segments that are not currently loaded into memory - :param level: Indicates whether statistics are aggregated at the cluster, index, - or shard level. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - :param types: A comma-separated list of document types for the indexing index - metric. - """ - if ( - node_id not in SKIP_IN_PATH - and metric not in SKIP_IN_PATH - and index_metric not in SKIP_IN_PATH - ): - __path = f"/_nodes/{_quote(node_id)}/stats/{_quote(metric)}/{_quote(index_metric)}" - elif node_id not in SKIP_IN_PATH and metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/stats/{_quote(metric)}" - elif metric not in SKIP_IN_PATH and index_metric not in SKIP_IN_PATH: - __path = f"/_nodes/stats/{_quote(metric)}/{_quote(index_metric)}" - elif node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/stats" - elif metric not in SKIP_IN_PATH: - __path = f"/_nodes/stats/{_quote(metric)}" - else: - __path = "/_nodes/stats" - __query: t.Dict[str, t.Any] = {} - if completion_fields is not None: - __query["completion_fields"] = completion_fields - if error_trace is not None: - __query["error_trace"] = error_trace - if fielddata_fields is not None: - __query["fielddata_fields"] = fielddata_fields - if fields is not None: - __query["fields"] = fields - if filter_path is not None: - __query["filter_path"] = filter_path - if groups is not None: - __query["groups"] = groups - if human is not None: - __query["human"] = human - if include_segment_file_sizes is not None: - __query["include_segment_file_sizes"] = include_segment_file_sizes - if include_unloaded_segments is not None: - __query["include_unloaded_segments"] = include_unloaded_segments - if level is not None: - __query["level"] = level - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if types is not None: - __query["types"] = types - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def usage( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns low-level information about REST actions usage on nodes. - - ``_ - - :param node_id: A comma-separated list of node IDs or names to limit the returned - information; use `_local` to return information from the node you're connecting - to, leave empty to get information from all nodes - :param metric: Limit the information returned to the specified metrics - :param timeout: Explicit operation timeout - """ - if node_id not in SKIP_IN_PATH and metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/usage/{_quote(metric)}" - elif node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/usage" - elif metric not in SKIP_IN_PATH: - __path = f"/_nodes/usage/{_quote(metric)}" - else: - __path = "/_nodes/usage" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/rollup.py b/elasticsearch_serverless/_async/client/rollup.py deleted file mode 100644 index 9a314db..0000000 --- a/elasticsearch_serverless/_async/client/rollup.py +++ /dev/null @@ -1,440 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class RollupClient(NamespacedClient): - @_rewrite_parameters() - async def delete_job( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes an existing rollup job. - - ``_ - - :param id: The ID of the job to delete - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_rollup/job/{_quote(id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_jobs( - self, - *, - id: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the configuration, stats, and status of rollup jobs. - - ``_ - - :param id: The ID of the job(s) to fetch. Accepts glob patterns, or left blank - for all jobs - """ - if id not in SKIP_IN_PATH: - __path = f"/_rollup/job/{_quote(id)}" - else: - __path = "/_rollup/job" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_rollup_caps( - self, - *, - id: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the capabilities of any rollup jobs that have been configured for a specific - index or index pattern. - - ``_ - - :param id: The ID of the index to check rollup capabilities on, or left blank - for all jobs - """ - if id not in SKIP_IN_PATH: - __path = f"/_rollup/data/{_quote(id)}" - else: - __path = "/_rollup/data" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_rollup_index_caps( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the rollup capabilities of all jobs inside of a rollup index (e.g. the - index where rollup data is stored). - - ``_ - - :param index: The rollup index or index pattern to obtain rollup capabilities - from. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_rollup/data" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ignore_deprecated_options={"headers"}, - ) - async def put_job( - self, - *, - id: str, - cron: str, - groups: t.Mapping[str, t.Any], - index_pattern: str, - page_size: int, - rollup_index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - headers: t.Optional[ - t.Mapping[str, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - human: t.Optional[bool] = None, - metrics: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a rollup job. - - ``_ - - :param id: Identifier for the rollup job. This can be any alphanumeric string - and uniquely identifies the data that is associated with the rollup job. - The ID is persistent; it is stored with the rolled up data. If you create - a job, let it run for a while, then delete the job, the data that the job - rolled up is still be associated with this job ID. You cannot create a new - job with the same ID since that could lead to problems with mismatched job - configurations. - :param cron: A cron string which defines the intervals when the rollup job should - be executed. When the interval triggers, the indexer attempts to rollup the - data in the index pattern. The cron pattern is unrelated to the time interval - of the data being rolled up. For example, you may wish to create hourly rollups - of your document but to only run the indexer on a daily basis at midnight, - as defined by the cron. The cron pattern is defined just like a Watcher cron - schedule. - :param groups: Defines the grouping fields and aggregations that are defined - for this rollup job. These fields will then be available later for aggregating - into buckets. These aggs and fields can be used in any combination. Think - of the groups configuration as defining a set of tools that can later be - used in aggregations to partition the data. Unlike raw data, we have to think - ahead to which fields and aggregations might be used. Rollups provide enough - flexibility that you simply need to determine which fields are needed, not - in what order they are needed. - :param index_pattern: The index or index pattern to roll up. Supports wildcard-style - patterns (`logstash-*`). The job attempts to rollup the entire index or index-pattern. - :param page_size: The number of bucket results that are processed on each iteration - of the rollup indexer. A larger value tends to execute faster, but requires - more memory during processing. This value has no effect on how the data is - rolled up; it is merely used for tweaking the speed or memory cost of the - indexer. - :param rollup_index: The index that contains the rollup results. The index can - be shared with other rollup jobs. The data is stored so that it doesn’t interfere - with unrelated jobs. - :param headers: - :param metrics: Defines the metrics to collect for each grouping tuple. By default, - only the doc_counts are collected for each group. To make rollup useful, - you will often add metrics like averages, mins, maxes, etc. Metrics are defined - on a per-field basis and for each field you configure which metric should - be collected. - :param timeout: Time to wait for the request to complete. - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - if cron is None: - raise ValueError("Empty value passed for parameter 'cron'") - if groups is None: - raise ValueError("Empty value passed for parameter 'groups'") - if index_pattern is None: - raise ValueError("Empty value passed for parameter 'index_pattern'") - if page_size is None: - raise ValueError("Empty value passed for parameter 'page_size'") - if rollup_index is None: - raise ValueError("Empty value passed for parameter 'rollup_index'") - __path = f"/_rollup/job/{_quote(id)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if cron is not None: - __body["cron"] = cron - if groups is not None: - __body["groups"] = groups - if index_pattern is not None: - __body["index_pattern"] = index_pattern - if page_size is not None: - __body["page_size"] = page_size - if rollup_index is not None: - __body["rollup_index"] = rollup_index - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if headers is not None: - __body["headers"] = headers - if human is not None: - __query["human"] = human - if metrics is not None: - __body["metrics"] = metrics - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __body["timeout"] = timeout - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def rollup_search( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - query: t.Optional[t.Mapping[str, t.Any]] = None, - rest_total_hits_as_int: t.Optional[bool] = None, - size: t.Optional[int] = None, - typed_keys: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Enables searching rolled-up data using the standard query DSL. - - ``_ - - :param index: The indices or index-pattern(s) (containing rollup or regular data) - that should be searched - :param aggregations: - :param aggs: - :param query: - :param rest_total_hits_as_int: Indicates whether hits.total should be rendered - as an integer or an object in the rest search response - :param size: Must be zero if set, as rollups work on pre-aggregated data - :param typed_keys: Specify whether aggregation and suggester names should be - prefixed by their respective types in the response - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_rollup_search" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if aggregations is not None: - __body["aggregations"] = aggregations - if aggs is not None: - __body["aggs"] = aggs - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if query is not None: - __body["query"] = query - if rest_total_hits_as_int is not None: - __query["rest_total_hits_as_int"] = rest_total_hits_as_int - if size is not None: - __body["size"] = size - if typed_keys is not None: - __query["typed_keys"] = typed_keys - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def start_job( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Starts an existing, stopped rollup job. - - ``_ - - :param id: The ID of the job to start - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_rollup/job/{_quote(id)}/_start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def stop_job( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Stops an existing, started rollup job. - - ``_ - - :param id: The ID of the job to stop - :param timeout: Block for (at maximum) the specified duration while waiting for - the job to stop. Defaults to 30s. - :param wait_for_completion: True if the API should block until the job has fully - stopped, false if should be executed async. Defaults to false. - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_rollup/job/{_quote(id)}/_stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/searchable_snapshots.py b/elasticsearch_serverless/_async/client/searchable_snapshots.py deleted file mode 100644 index e8ba046..0000000 --- a/elasticsearch_serverless/_async/client/searchable_snapshots.py +++ /dev/null @@ -1,265 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class SearchableSnapshotsClient(NamespacedClient): - @_rewrite_parameters() - async def cache_stats( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieve node-level cache statistics about searchable snapshots. - - ``_ - - :param node_id: A comma-separated list of node IDs or names to limit the returned - information; use `_local` to return information from the node you're connecting - to, leave empty to get information from all nodes - :param master_timeout: - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_searchable_snapshots/{_quote(node_id)}/cache/stats" - else: - __path = "/_searchable_snapshots/cache/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def clear_cache( - self, - *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - allow_no_indices: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - expand_wildcards: t.Optional[ - t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], - ], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Clear the cache of searchable snapshots. - - ``_ - - :param index: A comma-separated list of index names - :param allow_no_indices: Whether to ignore if a wildcard indices expression resolves - into no concrete indices. (This includes `_all` string or when no indices - have been specified) - :param expand_wildcards: Whether to expand wildcard expression to concrete indices - that are open, closed or both. - :param ignore_unavailable: Whether specified concrete indices should be ignored - when unavailable (missing or closed) - """ - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_searchable_snapshots/cache/clear" - else: - __path = "/_searchable_snapshots/cache/clear" - __query: t.Dict[str, t.Any] = {} - if allow_no_indices is not None: - __query["allow_no_indices"] = allow_no_indices - if error_trace is not None: - __query["error_trace"] = error_trace - if expand_wildcards is not None: - __query["expand_wildcards"] = expand_wildcards - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def mount( - self, - *, - repository: str, - snapshot: str, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_index_settings: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - index_settings: t.Optional[t.Mapping[str, t.Any]] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - renamed_index: t.Optional[str] = None, - storage: t.Optional[str] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Mount a snapshot as a searchable index. - - ``_ - - :param repository: The name of the repository containing the snapshot of the - index to mount - :param snapshot: The name of the snapshot of the index to mount - :param index: - :param ignore_index_settings: - :param index_settings: - :param master_timeout: Explicit operation timeout for connection to master node - :param renamed_index: - :param storage: Selects the kind of local storage used to accelerate searches. - Experimental, and defaults to `full_copy` - :param wait_for_completion: Should this request wait until the operation has - completed before returning - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - if index is None: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_mount" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if index is not None: - __body["index"] = index - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_index_settings is not None: - __body["ignore_index_settings"] = ignore_index_settings - if index_settings is not None: - __body["index_settings"] = index_settings - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if renamed_index is not None: - __body["renamed_index"] = renamed_index - if storage is not None: - __query["storage"] = storage - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def stats( - self, - *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - level: t.Optional[ - t.Union["t.Literal['cluster', 'indices', 'shards']", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieve shard-level statistics about searchable snapshots. - - ``_ - - :param index: A comma-separated list of index names - :param level: Return stats aggregated at cluster, index or shard level - """ - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_searchable_snapshots/stats" - else: - __path = "/_searchable_snapshots/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if level is not None: - __query["level"] = level - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/shutdown.py b/elasticsearch_serverless/_async/client/shutdown.py deleted file mode 100644 index a19cdc3..0000000 --- a/elasticsearch_serverless/_async/client/shutdown.py +++ /dev/null @@ -1,229 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class ShutdownClient(NamespacedClient): - @_rewrite_parameters() - async def delete_node( - self, - *, - node_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes a node from the shutdown list. Designed for indirect use by ECE/ESS and - ECK. Direct use is not supported. - - ``_ - - :param node_id: The node id of node to be removed from the shutdown state - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - __path = f"/_nodes/{_quote(node_id)}/shutdown" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_node( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieve status of a node or nodes that are currently marked as shutting down. - Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. - - ``_ - - :param node_id: Which node for which to retrieve the shutdown status - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/shutdown" - else: - __path = "/_nodes/shutdown" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def put_node( - self, - *, - node_id: str, - reason: str, - type: t.Union["t.Literal['remove', 'replace', 'restart']", str], - allocation_delay: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - pretty: t.Optional[bool] = None, - target_node_name: t.Optional[str] = None, - timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. Direct - use is not supported. - - ``_ - - :param node_id: The node id of node to be shut down - :param reason: A human-readable reason that the node is being shut down. This - field provides information for other cluster operators; it does not affect - the shut down process. - :param type: Valid values are restart, remove, or replace. Use restart when you - need to temporarily shut down a node to perform an upgrade, make configuration - changes, or perform other maintenance. Because the node is expected to rejoin - the cluster, data is not migrated off of the node. Use remove when you need - to permanently remove a node from the cluster. The node is not marked ready - for shutdown until data is migrated off of the node Use replace to do a 1:1 - replacement of a node with another node. Certain allocation decisions will - be ignored (such as disk watermarks) in the interest of true replacement - of the source node with the target node. During a replace-type shutdown, - rollover and index creation may result in unassigned shards, and shrink may - fail until the replacement is complete. - :param allocation_delay: Only valid if type is restart. Controls how long Elasticsearch - will wait for the node to restart and join the cluster before reassigning - its shards to other nodes. This works the same as delaying allocation with - the index.unassigned.node_left.delayed_timeout setting. If you specify both - a restart allocation delay and an index-level allocation delay, the longer - of the two is used. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param target_node_name: Only valid if type is replace. Specifies the name of - the node that is replacing the node being shut down. Shards from the shut - down node are only allowed to be allocated to the target node, and no other - data will be allocated to the target node. During relocation of data certain - allocation rules are ignored, such as disk watermarks or user attribute filtering - rules. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - if reason is None: - raise ValueError("Empty value passed for parameter 'reason'") - if type is None: - raise ValueError("Empty value passed for parameter 'type'") - __path = f"/_nodes/{_quote(node_id)}/shutdown" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if reason is not None: - __body["reason"] = reason - if type is not None: - __body["type"] = type - if allocation_delay is not None: - __body["allocation_delay"] = allocation_delay - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if target_node_name is not None: - __body["target_node_name"] = target_node_name - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_async/client/slm.py b/elasticsearch_serverless/_async/client/slm.py deleted file mode 100644 index 35b0c16..0000000 --- a/elasticsearch_serverless/_async/client/slm.py +++ /dev/null @@ -1,377 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class SlmClient(NamespacedClient): - @_rewrite_parameters() - async def delete_lifecycle( - self, - *, - policy_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes an existing snapshot lifecycle policy. - - ``_ - - :param policy_id: The id of the snapshot lifecycle policy to remove - """ - if policy_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'policy_id'") - __path = f"/_slm/policy/{_quote(policy_id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def execute_lifecycle( - self, - *, - policy_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Immediately creates a snapshot according to the lifecycle policy, without waiting - for the scheduled time. - - ``_ - - :param policy_id: The id of the snapshot lifecycle policy to be executed - """ - if policy_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'policy_id'") - __path = f"/_slm/policy/{_quote(policy_id)}/_execute" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def execute_retention( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes any snapshots that are expired according to the policy's retention rules. - - ``_ - """ - __path = "/_slm/_execute_retention" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_lifecycle( - self, - *, - policy_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves one or more snapshot lifecycle policy definitions and information about - the latest snapshot attempts. - - ``_ - - :param policy_id: Comma-separated list of snapshot lifecycle policies to retrieve - """ - if policy_id not in SKIP_IN_PATH: - __path = f"/_slm/policy/{_quote(policy_id)}" - else: - __path = "/_slm/policy" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_stats( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns global and policy-level statistics about actions taken by snapshot lifecycle - management. - - ``_ - """ - __path = "/_slm/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_status( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the status of snapshot lifecycle management (SLM). - - ``_ - """ - __path = "/_slm/status" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def put_lifecycle( - self, - *, - policy_id: str, - config: t.Optional[t.Mapping[str, t.Any]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - name: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - repository: t.Optional[str] = None, - retention: t.Optional[t.Mapping[str, t.Any]] = None, - schedule: t.Optional[str] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates or updates a snapshot lifecycle policy. - - ``_ - - :param policy_id: ID for the snapshot lifecycle policy you want to create or - update. - :param config: Configuration for each snapshot created by the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param name: Name automatically assigned to each snapshot created by the policy. - Date math is supported. To prevent conflicting snapshot names, a UUID is - automatically appended to each snapshot name. - :param repository: Repository used to store snapshots created by this policy. - This repository must exist prior to the policy’s creation. You can create - a repository using the snapshot repository API. - :param retention: Retention rules used to retain and delete snapshots created - by the policy. - :param schedule: Periodic or absolute schedule at which the policy creates snapshots. - SLM applies schedule changes immediately. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if policy_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'policy_id'") - __path = f"/_slm/policy/{_quote(policy_id)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if config is not None: - __body["config"] = config - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if name is not None: - __body["name"] = name - if pretty is not None: - __query["pretty"] = pretty - if repository is not None: - __body["repository"] = repository - if retention is not None: - __body["retention"] = retention - if schedule is not None: - __body["schedule"] = schedule - if timeout is not None: - __query["timeout"] = timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def start( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Turns on snapshot lifecycle management (SLM). - - ``_ - """ - __path = "/_slm/start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def stop( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Turns off snapshot lifecycle management (SLM). - - ``_ - """ - __path = "/_slm/stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/snapshot.py b/elasticsearch_serverless/_async/client/snapshot.py deleted file mode 100644 index 9bcf81f..0000000 --- a/elasticsearch_serverless/_async/client/snapshot.py +++ /dev/null @@ -1,773 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class SnapshotClient(NamespacedClient): - @_rewrite_parameters() - async def cleanup_repository( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes stale data from repository. - - ``_ - - :param name: Snapshot repository to clean up. - :param master_timeout: Period to wait for a connection to the master node. - :param timeout: Period to wait for a response. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_snapshot/{_quote(name)}/_cleanup" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def clone( - self, - *, - repository: str, - snapshot: str, - target_snapshot: str, - indices: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Clones indices from one snapshot into another snapshot in the same repository. - - ``_ - - :param repository: A repository name - :param snapshot: The name of the snapshot to clone from - :param target_snapshot: The name of the cloned snapshot to create - :param indices: - :param master_timeout: Explicit operation timeout for connection to master node - :param timeout: - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - if target_snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'target_snapshot'") - if indices is None: - raise ValueError("Empty value passed for parameter 'indices'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_clone/{_quote(target_snapshot)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if indices is not None: - __body["indices"] = indices - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def create( - self, - *, - repository: str, - snapshot: str, - error_trace: t.Optional[bool] = None, - feature_states: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - include_global_state: t.Optional[bool] = None, - indices: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - metadata: t.Optional[t.Mapping[str, t.Any]] = None, - partial: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a snapshot in a repository. - - ``_ - - :param repository: Repository for the snapshot. - :param snapshot: Name of the snapshot. Must be unique in the repository. - :param feature_states: Feature states to include in the snapshot. Each feature - state includes one or more system indices containing related data. You can - view a list of eligible features using the get features API. If `include_global_state` - is `true`, all current feature states are included by default. If `include_global_state` - is `false`, no feature states are included by default. - :param ignore_unavailable: If `true`, the request ignores data streams and indices - in `indices` that are missing or closed. If `false`, the request returns - an error for any data stream or index that is missing or closed. - :param include_global_state: If `true`, the current cluster state is included - in the snapshot. The cluster state includes persistent cluster settings, - composable index templates, legacy index templates, ingest pipelines, and - ILM policies. It also includes data stored in system indices, such as Watches - and task records (configurable via `feature_states`). - :param indices: Data streams and indices to include in the snapshot. Supports - multi-target syntax. Includes all data streams and indices by default. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param metadata: Optional metadata for the snapshot. May have any contents. Must - be less than 1024 bytes. This map is not automatically generated by Elasticsearch. - :param partial: If `true`, allows restoring a partial snapshot of indices with - unavailable shards. Only shards that were successfully included in the snapshot - will be restored. All missing shards will be recreated as empty. If `false`, - the entire restore operation will fail if one or more indices included in - the snapshot do not have all primary shards available. - :param wait_for_completion: If `true`, the request returns a response when the - snapshot is complete. If `false`, the request returns a response when the - snapshot initializes. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if feature_states is not None: - __body["feature_states"] = feature_states - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __body["ignore_unavailable"] = ignore_unavailable - if include_global_state is not None: - __body["include_global_state"] = include_global_state - if indices is not None: - __body["indices"] = indices - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if metadata is not None: - __body["metadata"] = metadata - if partial is not None: - __body["partial"] = partial - if pretty is not None: - __query["pretty"] = pretty - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def create_repository( - self, - *, - name: str, - settings: t.Mapping[str, t.Any], - type: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - repository: t.Optional[t.Mapping[str, t.Any]] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - verify: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a repository. - - ``_ - - :param name: A repository name - :param settings: - :param type: - :param master_timeout: Explicit operation timeout for connection to master node - :param repository: - :param timeout: Explicit operation timeout - :param verify: Whether to verify the repository after creation - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - if settings is None: - raise ValueError("Empty value passed for parameter 'settings'") - if type is None: - raise ValueError("Empty value passed for parameter 'type'") - __path = f"/_snapshot/{_quote(name)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if settings is not None: - __body["settings"] = settings - if type is not None: - __body["type"] = type - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if repository is not None: - __body["repository"] = repository - if timeout is not None: - __query["timeout"] = timeout - if verify is not None: - __query["verify"] = verify - __headers = {"accept": "application/json", "content-type": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def delete( - self, - *, - repository: str, - snapshot: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes one or more snapshots. - - ``_ - - :param repository: A repository name - :param snapshot: A comma-separated list of snapshot names - :param master_timeout: Explicit operation timeout for connection to master node - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def delete_repository( - self, - *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes a repository. - - ``_ - - :param name: Name of the snapshot repository to unregister. Wildcard (`*`) patterns - are supported. - :param master_timeout: Explicit operation timeout for connection to master node - :param timeout: Explicit operation timeout - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_snapshot/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get( - self, - *, - repository: str, - snapshot: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - after: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - from_sort_value: t.Optional[str] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - include_repository: t.Optional[bool] = None, - index_details: t.Optional[bool] = None, - index_names: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - offset: t.Optional[int] = None, - order: t.Optional[t.Union["t.Literal['asc', 'desc']", str]] = None, - pretty: t.Optional[bool] = None, - size: t.Optional[int] = None, - slm_policy_filter: t.Optional[str] = None, - sort: t.Optional[ - t.Union[ - "t.Literal['duration', 'failed_shard_count', 'index_count', 'name', 'repository', 'shard_count', 'start_time']", - str, - ] - ] = None, - verbose: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about a snapshot. - - ``_ - - :param repository: Comma-separated list of snapshot repository names used to - limit the request. Wildcard (*) expressions are supported. - :param snapshot: Comma-separated list of snapshot names to retrieve. Also accepts - wildcards (*). - To get information about all snapshots in a registered repository, - use a wildcard (*) or _all. - To get information about any snapshots that - are currently running, use _current. - :param after: Offset identifier to start pagination from as returned by the next - field in the response body. - :param from_sort_value: Value of the current sort column at which to start retrieval. - Can either be a string snapshot- or repository name when sorting by snapshot - or repository name, a millisecond time value or a number when sorting by - index- or shard count. - :param ignore_unavailable: If false, the request returns an error for any snapshots - that are unavailable. - :param include_repository: If true, returns the repository name in each snapshot. - :param index_details: If true, returns additional information about each index - in the snapshot comprising the number of shards in the index, the total size - of the index in bytes, and the maximum number of segments per shard in the - index. Defaults to false, meaning that this information is omitted. - :param index_names: If true, returns the name of each index in each snapshot. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param offset: Numeric offset to start pagination from based on the snapshots - matching this request. Using a non-zero value for this parameter is mutually - exclusive with using the after parameter. Defaults to 0. - :param order: Sort order. Valid values are asc for ascending and desc for descending - order. Defaults to asc, meaning ascending order. - :param size: Maximum number of snapshots to return. Defaults to 0 which means - return all that match the request without limit. - :param slm_policy_filter: Filter snapshots by a comma-separated list of SLM policy - names that snapshots belong to. Also accepts wildcards (*) and combinations - of wildcards followed by exclude patterns starting with -. To include snapshots - not created by an SLM policy you can use the special pattern _none that will - match all snapshots without an SLM policy. - :param sort: Allows setting a sort order for the result. Defaults to start_time, - i.e. sorting by snapshot start time stamp. - :param verbose: If true, returns additional information about each snapshot such - as the version of Elasticsearch which took the snapshot, the start and end - times of the snapshot, and the number of shards snapshotted. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}" - __query: t.Dict[str, t.Any] = {} - if after is not None: - __query["after"] = after - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if from_sort_value is not None: - __query["from_sort_value"] = from_sort_value - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if include_repository is not None: - __query["include_repository"] = include_repository - if index_details is not None: - __query["index_details"] = index_details - if index_names is not None: - __query["index_names"] = index_names - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if offset is not None: - __query["offset"] = offset - if order is not None: - __query["order"] = order - if pretty is not None: - __query["pretty"] = pretty - if size is not None: - __query["size"] = size - if slm_policy_filter is not None: - __query["slm_policy_filter"] = slm_policy_filter - if sort is not None: - __query["sort"] = sort - if verbose is not None: - __query["verbose"] = verbose - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def get_repository( - self, - *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - local: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about a repository. - - ``_ - - :param name: A comma-separated list of repository names - :param local: Return local information, do not retrieve the state from master - node (default: false) - :param master_timeout: Explicit operation timeout for connection to master node - """ - if name not in SKIP_IN_PATH: - __path = f"/_snapshot/{_quote(name)}" - else: - __path = "/_snapshot" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if local is not None: - __query["local"] = local - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def restore( - self, - *, - repository: str, - snapshot: str, - error_trace: t.Optional[bool] = None, - feature_states: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_index_settings: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - ignore_unavailable: t.Optional[bool] = None, - include_aliases: t.Optional[bool] = None, - include_global_state: t.Optional[bool] = None, - index_settings: t.Optional[t.Mapping[str, t.Any]] = None, - indices: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - partial: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - rename_pattern: t.Optional[str] = None, - rename_replacement: t.Optional[str] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Restores a snapshot. - - ``_ - - :param repository: A repository name - :param snapshot: A snapshot name - :param feature_states: - :param ignore_index_settings: - :param ignore_unavailable: - :param include_aliases: - :param include_global_state: - :param index_settings: - :param indices: - :param master_timeout: Explicit operation timeout for connection to master node - :param partial: - :param rename_pattern: - :param rename_replacement: - :param wait_for_completion: Should this request wait until the operation has - completed before returning - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_restore" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if feature_states is not None: - __body["feature_states"] = feature_states - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_index_settings is not None: - __body["ignore_index_settings"] = ignore_index_settings - if ignore_unavailable is not None: - __body["ignore_unavailable"] = ignore_unavailable - if include_aliases is not None: - __body["include_aliases"] = include_aliases - if include_global_state is not None: - __body["include_global_state"] = include_global_state - if index_settings is not None: - __body["index_settings"] = index_settings - if indices is not None: - __body["indices"] = indices - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if partial is not None: - __body["partial"] = partial - if pretty is not None: - __query["pretty"] = pretty - if rename_pattern is not None: - __body["rename_pattern"] = rename_pattern - if rename_replacement is not None: - __body["rename_replacement"] = rename_replacement - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def status( - self, - *, - repository: t.Optional[str] = None, - snapshot: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about the status of a snapshot. - - ``_ - - :param repository: A repository name - :param snapshot: A comma-separated list of snapshot names - :param ignore_unavailable: Whether to ignore unavailable snapshots, defaults - to false which means a SnapshotMissingException is thrown - :param master_timeout: Explicit operation timeout for connection to master node - """ - if repository not in SKIP_IN_PATH and snapshot not in SKIP_IN_PATH: - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_status" - elif repository not in SKIP_IN_PATH: - __path = f"/_snapshot/{_quote(repository)}/_status" - else: - __path = "/_snapshot/_status" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def verify_repository( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Verifies a repository. - - ``_ - - :param name: A repository name - :param master_timeout: Explicit operation timeout for connection to master node - :param timeout: Explicit operation timeout - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_snapshot/{_quote(name)}/_verify" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/ssl.py b/elasticsearch_serverless/_async/client/ssl.py deleted file mode 100644 index 4205dee..0000000 --- a/elasticsearch_serverless/_async/client/ssl.py +++ /dev/null @@ -1,57 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class SslClient(NamespacedClient): - @_rewrite_parameters() - async def certificates( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about the X.509 certificates used to encrypt communications - in the cluster. - - ``_ - """ - __path = "/_ssl/certificates" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/text_structure.py b/elasticsearch_serverless/_async/client/text_structure.py deleted file mode 100644 index 107ad05..0000000 --- a/elasticsearch_serverless/_async/client/text_structure.py +++ /dev/null @@ -1,158 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class TextStructureClient(NamespacedClient): - @_rewrite_parameters( - body_name="text_files", - ) - async def find_structure( - self, - *, - text_files: t.Union[t.List[t.Any], t.Tuple[t.Any, ...]], - charset: t.Optional[str] = None, - column_names: t.Optional[str] = None, - delimiter: t.Optional[str] = None, - explain: t.Optional[bool] = None, - format: t.Optional[str] = None, - grok_pattern: t.Optional[str] = None, - has_header_row: t.Optional[bool] = None, - line_merge_size_limit: t.Optional[int] = None, - lines_to_sample: t.Optional[int] = None, - quote: t.Optional[str] = None, - should_trim_fields: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - timestamp_field: t.Optional[str] = None, - timestamp_format: t.Optional[str] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Finds the structure of a text file. The text file must contain data that is suitable - to be ingested into Elasticsearch. - - ``_ - - :param text_files: - :param charset: The text’s character set. It must be a character set that is - supported by the JVM that Elasticsearch uses. For example, UTF-8, UTF-16LE, - windows-1252, or EUC-JP. If this parameter is not specified, the structure - finder chooses an appropriate character set. - :param column_names: If you have set format to delimited, you can specify the - column names in a comma-separated list. If this parameter is not specified, - the structure finder uses the column names from the header row of the text. - If the text does not have a header role, columns are named "column1", "column2", - "column3", etc. - :param delimiter: If you have set format to delimited, you can specify the character - used to delimit the values in each row. Only a single character is supported; - the delimiter cannot have multiple characters. By default, the API considers - the following possibilities: comma, tab, semi-colon, and pipe (|). In this - default scenario, all rows must have the same number of fields for the delimited - format to be detected. If you specify a delimiter, up to 10% of the rows - can have a different number of columns than the first row. - :param explain: If this parameter is set to true, the response includes a field - named explanation, which is an array of strings that indicate how the structure - finder produced its result. - :param format: The high level structure of the text. Valid values are ndjson, - xml, delimited, and semi_structured_text. By default, the API chooses the - format. In this default scenario, all rows must have the same number of fields - for a delimited format to be detected. If the format is set to delimited - and the delimiter is not set, however, the API tolerates up to 5% of rows - that have a different number of columns than the first row. - :param grok_pattern: If you have set format to semi_structured_text, you can - specify a Grok pattern that is used to extract fields from every message - in the text. The name of the timestamp field in the Grok pattern must match - what is specified in the timestamp_field parameter. If that parameter is - not specified, the name of the timestamp field in the Grok pattern must match - "timestamp". If grok_pattern is not specified, the structure finder creates - a Grok pattern. - :param has_header_row: If you have set format to delimited, you can use this - parameter to indicate whether the column names are in the first row of the - text. If this parameter is not specified, the structure finder guesses based - on the similarity of the first row of the text to other rows. - :param line_merge_size_limit: The maximum number of characters in a message when - lines are merged to form messages while analyzing semi-structured text. If - you have extremely long messages you may need to increase this, but be aware - that this may lead to very long processing times if the way to group lines - into messages is misdetected. - :param lines_to_sample: The number of lines to include in the structural analysis, - starting from the beginning of the text. The minimum is 2; If the value of - this parameter is greater than the number of lines in the text, the analysis - proceeds (as long as there are at least two lines in the text) for all of - the lines. - :param quote: If you have set format to delimited, you can specify the character - used to quote the values in each row if they contain newlines or the delimiter - character. Only a single character is supported. If this parameter is not - specified, the default value is a double quote ("). If your delimited text - format does not use quoting, a workaround is to set this argument to a character - that does not appear anywhere in the sample. - :param should_trim_fields: If you have set format to delimited, you can specify - whether values between delimiters should have whitespace trimmed from them. - If this parameter is not specified and the delimiter is pipe (|), the default - value is true. Otherwise, the default value is false. - :param timeout: Sets the maximum amount of time that the structure analysis make - take. If the analysis is still running when the timeout expires then it will - be aborted. - :param timestamp_field: Optional parameter to specify the timestamp field in - the file - :param timestamp_format: The Java time format of the timestamp field in the text. - """ - if text_files is None: - raise ValueError("Empty value passed for parameter 'text_files'") - __path = "/_text_structure/find_structure" - __query: t.Dict[str, t.Any] = {} - if charset is not None: - __query["charset"] = charset - if column_names is not None: - __query["column_names"] = column_names - if delimiter is not None: - __query["delimiter"] = delimiter - if explain is not None: - __query["explain"] = explain - if format is not None: - __query["format"] = format - if grok_pattern is not None: - __query["grok_pattern"] = grok_pattern - if has_header_row is not None: - __query["has_header_row"] = has_header_row - if line_merge_size_limit is not None: - __query["line_merge_size_limit"] = line_merge_size_limit - if lines_to_sample is not None: - __query["lines_to_sample"] = lines_to_sample - if quote is not None: - __query["quote"] = quote - if should_trim_fields is not None: - __query["should_trim_fields"] = should_trim_fields - if timeout is not None: - __query["timeout"] = timeout - if timestamp_field is not None: - __query["timestamp_field"] = timestamp_field - if timestamp_format is not None: - __query["timestamp_format"] = timestamp_format - __body = text_files - __headers = { - "accept": "application/json", - "content-type": "application/x-ndjson", - } - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_async/client/watcher.py b/elasticsearch_serverless/_async/client/watcher.py deleted file mode 100644 index 7c63f5f..0000000 --- a/elasticsearch_serverless/_async/client/watcher.py +++ /dev/null @@ -1,607 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class WatcherClient(NamespacedClient): - @_rewrite_parameters() - async def ack_watch( - self, - *, - watch_id: str, - action_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Acknowledges a watch, manually throttling the execution of the watch's actions. - - ``_ - - :param watch_id: Watch ID - :param action_id: A comma-separated list of the action ids to be acked - """ - if watch_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'watch_id'") - if watch_id not in SKIP_IN_PATH and action_id not in SKIP_IN_PATH: - __path = f"/_watcher/watch/{_quote(watch_id)}/_ack/{_quote(action_id)}" - elif watch_id not in SKIP_IN_PATH: - __path = f"/_watcher/watch/{_quote(watch_id)}/_ack" - else: - raise ValueError("Couldn't find a path for the given parameters") - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def activate_watch( - self, - *, - watch_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Activates a currently inactive watch. - - ``_ - - :param watch_id: Watch ID - """ - if watch_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'watch_id'") - __path = f"/_watcher/watch/{_quote(watch_id)}/_activate" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def deactivate_watch( - self, - *, - watch_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deactivates a currently active watch. - - ``_ - - :param watch_id: Watch ID - """ - if watch_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'watch_id'") - __path = f"/_watcher/watch/{_quote(watch_id)}/_deactivate" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def delete_watch( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes a watch from Watcher. - - ``_ - - :param id: Watch ID - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_watcher/watch/{_quote(id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def execute_watch( - self, - *, - id: t.Optional[str] = None, - action_modes: t.Optional[ - t.Mapping[ - str, - t.Union[ - "t.Literal['execute', 'force_execute', 'force_simulate', 'simulate', 'skip']", - str, - ], - ] - ] = None, - alternative_input: t.Optional[t.Mapping[str, t.Any]] = None, - debug: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_condition: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - record_execution: t.Optional[bool] = None, - simulated_actions: t.Optional[t.Mapping[str, t.Any]] = None, - trigger_data: t.Optional[t.Mapping[str, t.Any]] = None, - watch: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Forces the execution of a stored watch. - - ``_ - - :param id: Identifier for the watch. - :param action_modes: Determines how to handle the watch actions as part of the - watch execution. - :param alternative_input: When present, the watch uses this object as a payload - instead of executing its own input. - :param debug: Defines whether the watch runs in debug mode. - :param ignore_condition: When set to `true`, the watch execution uses the always - condition. This can also be specified as an HTTP parameter. - :param record_execution: When set to `true`, the watch record representing the - watch execution result is persisted to the `.watcher-history` index for the - current time. In addition, the status of the watch is updated, possibly throttling - subsequent executions. This can also be specified as an HTTP parameter. - :param simulated_actions: - :param trigger_data: This structure is parsed as the data of the trigger event - that will be used during the watch execution - :param watch: When present, this watch is used instead of the one specified in - the request. This watch is not persisted to the index and record_execution - cannot be set. - """ - if id not in SKIP_IN_PATH: - __path = f"/_watcher/watch/{_quote(id)}/_execute" - else: - __path = "/_watcher/watch/_execute" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if action_modes is not None: - __body["action_modes"] = action_modes - if alternative_input is not None: - __body["alternative_input"] = alternative_input - if debug is not None: - __query["debug"] = debug - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_condition is not None: - __body["ignore_condition"] = ignore_condition - if pretty is not None: - __query["pretty"] = pretty - if record_execution is not None: - __body["record_execution"] = record_execution - if simulated_actions is not None: - __body["simulated_actions"] = simulated_actions - if trigger_data is not None: - __body["trigger_data"] = trigger_data - if watch is not None: - __body["watch"] = watch - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def get_watch( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves a watch by its ID. - - ``_ - - :param id: Watch ID - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_watcher/watch/{_quote(id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - async def put_watch( - self, - *, - id: str, - actions: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - active: t.Optional[bool] = None, - condition: t.Optional[t.Mapping[str, t.Any]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - if_primary_term: t.Optional[int] = None, - if_seq_no: t.Optional[int] = None, - input: t.Optional[t.Mapping[str, t.Any]] = None, - metadata: t.Optional[t.Mapping[str, t.Any]] = None, - pretty: t.Optional[bool] = None, - throttle_period: t.Optional[str] = None, - transform: t.Optional[t.Mapping[str, t.Any]] = None, - trigger: t.Optional[t.Mapping[str, t.Any]] = None, - version: t.Optional[int] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new watch, or updates an existing one. - - ``_ - - :param id: Watch ID - :param actions: - :param active: Specify whether the watch is in/active by default - :param condition: - :param if_primary_term: only update the watch if the last operation that has - changed the watch has the specified primary term - :param if_seq_no: only update the watch if the last operation that has changed - the watch has the specified sequence number - :param input: - :param metadata: - :param throttle_period: - :param transform: - :param trigger: - :param version: Explicit version number for concurrency control - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_watcher/watch/{_quote(id)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if actions is not None: - __body["actions"] = actions - if active is not None: - __query["active"] = active - if condition is not None: - __body["condition"] = condition - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if if_primary_term is not None: - __query["if_primary_term"] = if_primary_term - if if_seq_no is not None: - __query["if_seq_no"] = if_seq_no - if input is not None: - __body["input"] = input - if metadata is not None: - __body["metadata"] = metadata - if pretty is not None: - __query["pretty"] = pretty - if throttle_period is not None: - __body["throttle_period"] = throttle_period - if transform is not None: - __body["transform"] = transform - if trigger is not None: - __body["trigger"] = trigger - if version is not None: - __query["version"] = version - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - parameter_aliases={"from": "from_"}, - ) - async def query_watches( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - from_: t.Optional[int] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - query: t.Optional[t.Mapping[str, t.Any]] = None, - search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] - ] = None, - size: t.Optional[int] = None, - sort: t.Optional[ - t.Union[ - t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], - ] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves stored watches. - - ``_ - - :param from_: The offset from the first result to fetch. Needs to be non-negative. - :param query: Optional, query filter watches to be returned. - :param search_after: Optional search After to do pagination using last hit’s - sort values. - :param size: The number of hits to return. Needs to be non-negative. - :param sort: Optional sort definition. - """ - __path = "/_watcher/_query/watches" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - # The 'sort' parameter with a colon can't be encoded to the body. - if sort is not None and ( - (isinstance(sort, str) and ":" in sort) - or ( - isinstance(sort, (list, tuple)) - and all(isinstance(_x, str) for _x in sort) - and any(":" in _x for _x in sort) - ) - ): - __query["sort"] = sort - sort = None - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if from_ is not None: - __body["from"] = from_ - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if query is not None: - __body["query"] = query - if search_after is not None: - __body["search_after"] = search_after - if size is not None: - __body["size"] = size - if sort is not None: - __body["sort"] = sort - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - async def start( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Starts Watcher if it is not already running. - - ``_ - """ - __path = "/_watcher/_start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def stats( - self, - *, - metric: t.Optional[ - t.Union[ - t.Union[ - "t.Literal['_all', 'current_watches', 'pending_watches', 'queued_watches']", - str, - ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['_all', 'current_watches', 'pending_watches', 'queued_watches']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['_all', 'current_watches', 'pending_watches', 'queued_watches']", - str, - ], - ..., - ], - ], - ] - ] = None, - emit_stacktraces: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the current Watcher metrics. - - ``_ - - :param metric: Defines which additional metrics are included in the response. - :param emit_stacktraces: Defines whether stack traces are generated for each - watch that is running. - """ - if metric not in SKIP_IN_PATH: - __path = f"/_watcher/stats/{_quote(metric)}" - else: - __path = "/_watcher/stats" - __query: t.Dict[str, t.Any] = {} - if emit_stacktraces is not None: - __query["emit_stacktraces"] = emit_stacktraces - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def stop( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Stops Watcher if it is running. - - ``_ - """ - __path = "/_watcher/_stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_async/client/xpack.py b/elasticsearch_serverless/_async/client/xpack.py deleted file mode 100644 index 0b5d4c3..0000000 --- a/elasticsearch_serverless/_async/client/xpack.py +++ /dev/null @@ -1,111 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class XPackClient(NamespacedClient): - def __getattr__(self, attr_name: str) -> t.Any: - return getattr(self.client, attr_name) - - # AUTO-GENERATED-API-DEFINITIONS # - - @_rewrite_parameters() - async def info( - self, - *, - accept_enterprise: t.Optional[bool] = None, - categories: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about the installed X-Pack features. - - ``_ - - :param accept_enterprise: If this param is used it must be set to true - :param categories: A comma-separated list of the information categories to include - in the response. For example, `build,license,features`. - """ - __path = "/_xpack" - __query: t.Dict[str, t.Any] = {} - if accept_enterprise is not None: - __query["accept_enterprise"] = accept_enterprise - if categories is not None: - __query["categories"] = categories - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - async def usage( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves usage information about the installed X-Pack features. - - ``_ - - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - """ - __path = "/_xpack/usage" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return await self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/__init__.py b/elasticsearch_serverless/_sync/client/__init__.py index f16211e..453df38 100644 --- a/elasticsearch_serverless/_sync/client/__init__.py +++ b/elasticsearch_serverless/_sync/client/__init__.py @@ -35,38 +35,22 @@ from ...serializer import DEFAULT_SERIALIZERS from ._base import BaseClient, resolve_auth_headers from .async_search import AsyncSearchClient -from .autoscaling import AutoscalingClient from .cat import CatClient -from .ccr import CcrClient from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient from .enrich import EnrichClient from .eql import EqlClient -from .features import FeaturesClient -from .fleet import FleetClient from .graph import GraphClient -from .ilm import IlmClient from .indices import IndicesClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient -from .migration import MigrationClient from .ml import MlClient -from .monitoring import MonitoringClient -from .nodes import NodesClient from .query_ruleset import QueryRulesetClient -from .rollup import RollupClient from .search_application import SearchApplicationClient -from .searchable_snapshots import SearchableSnapshotsClient from .security import SecurityClient -from .shutdown import ShutdownClient -from .slm import SlmClient -from .snapshot import SnapshotClient from .sql import SqlClient -from .ssl import SslClient from .synonyms import SynonymsClient from .tasks import TasksClient -from .text_structure import TextStructureClient from .transform import TransformClient from .utils import ( _TYPE_HOST, @@ -78,8 +62,6 @@ is_requests_http_auth, is_requests_node_class, ) -from .watcher import WatcherClient -from .xpack import XPackClient logger = logging.getLogger("elasticsearch") @@ -294,42 +276,24 @@ def __init__( # namespaced clients for compatibility with API names self.async_search = AsyncSearchClient(self) - self.autoscaling = AutoscalingClient(self) self.cat = CatClient(self) self.cluster = ClusterClient(self) - self.fleet = FleetClient(self) - self.features = FeaturesClient(self) self.indices = IndicesClient(self) self.ingest = IngestClient(self) - self.nodes = NodesClient(self) - self.snapshot = SnapshotClient(self) self.tasks = TasksClient(self) - self.xpack = XPackClient(self) - self.ccr = CcrClient(self) - self.dangling_indices = DanglingIndicesClient(self) self.enrich = EnrichClient(self) self.eql = EqlClient(self) self.graph = GraphClient(self) - self.ilm = IlmClient(self) self.license = LicenseClient(self) self.logstash = LogstashClient(self) - self.migration = MigrationClient(self) self.ml = MlClient(self) - self.monitoring = MonitoringClient(self) self.query_ruleset = QueryRulesetClient(self) - self.rollup = RollupClient(self) self.search_application = SearchApplicationClient(self) - self.searchable_snapshots = SearchableSnapshotsClient(self) self.security = SecurityClient(self) - self.slm = SlmClient(self) - self.shutdown = ShutdownClient(self) self.sql = SqlClient(self) - self.ssl = SslClient(self) self.synonyms = SynonymsClient(self) - self.text_structure = TextStructureClient(self) self.transform = TransformClient(self) - self.watcher = WatcherClient(self) def __repr__(self) -> str: try: diff --git a/elasticsearch_serverless/_sync/client/autoscaling.py b/elasticsearch_serverless/_sync/client/autoscaling.py deleted file mode 100644 index be21e6f..0000000 --- a/elasticsearch_serverless/_sync/client/autoscaling.py +++ /dev/null @@ -1,175 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class AutoscalingClient(NamespacedClient): - @_rewrite_parameters() - def delete_autoscaling_policy( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. - Direct use is not supported. - - ``_ - - :param name: the name of the autoscaling policy - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_autoscaling/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_autoscaling_capacity( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets the current autoscaling capacity based on the configured autoscaling policy. - Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. - - ``_ - """ - __path = "/_autoscaling/capacity" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_autoscaling_policy( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. - Direct use is not supported. - - ``_ - - :param name: the name of the autoscaling policy - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_autoscaling/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_name="policy", - ) - def put_autoscaling_policy( - self, - *, - name: str, - policy: t.Mapping[str, t.Any], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and ECK. - Direct use is not supported. - - ``_ - - :param name: the name of the autoscaling policy - :param policy: - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - if policy is None: - raise ValueError("Empty value passed for parameter 'policy'") - __path = f"/_autoscaling/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __body = policy - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_sync/client/ccr.py b/elasticsearch_serverless/_sync/client/ccr.py deleted file mode 100644 index bb19eb7..0000000 --- a/elasticsearch_serverless/_sync/client/ccr.py +++ /dev/null @@ -1,749 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class CcrClient(NamespacedClient): - @_rewrite_parameters() - def delete_auto_follow_pattern( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes auto-follow patterns. - - ``_ - - :param name: The name of the auto follow pattern. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ccr/auto_follow/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def follow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - leader_index: t.Optional[str] = None, - max_outstanding_read_requests: t.Optional[int] = None, - max_outstanding_write_requests: t.Optional[int] = None, - max_read_request_operation_count: t.Optional[int] = None, - max_read_request_size: t.Optional[str] = None, - max_retry_delay: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - max_write_buffer_count: t.Optional[int] = None, - max_write_buffer_size: t.Optional[str] = None, - max_write_request_operation_count: t.Optional[int] = None, - max_write_request_size: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - read_poll_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - remote_cluster: t.Optional[str] = None, - wait_for_active_shards: t.Optional[ - t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new follower index configured to follow the referenced leader index. - - ``_ - - :param index: The name of the follower index - :param leader_index: - :param max_outstanding_read_requests: - :param max_outstanding_write_requests: - :param max_read_request_operation_count: - :param max_read_request_size: - :param max_retry_delay: - :param max_write_buffer_count: - :param max_write_buffer_size: - :param max_write_request_operation_count: - :param max_write_request_size: - :param read_poll_timeout: - :param remote_cluster: - :param wait_for_active_shards: Sets the number of shard copies that must be active - before returning. Defaults to 0. Set to `all` for all shard copies, otherwise - set to any non-negative value less than or equal to the total number of copies - for the shard (number of replicas + 1) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/follow" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if leader_index is not None: - __body["leader_index"] = leader_index - if max_outstanding_read_requests is not None: - __body["max_outstanding_read_requests"] = max_outstanding_read_requests - if max_outstanding_write_requests is not None: - __body["max_outstanding_write_requests"] = max_outstanding_write_requests - if max_read_request_operation_count is not None: - __body["max_read_request_operation_count"] = ( - max_read_request_operation_count - ) - if max_read_request_size is not None: - __body["max_read_request_size"] = max_read_request_size - if max_retry_delay is not None: - __body["max_retry_delay"] = max_retry_delay - if max_write_buffer_count is not None: - __body["max_write_buffer_count"] = max_write_buffer_count - if max_write_buffer_size is not None: - __body["max_write_buffer_size"] = max_write_buffer_size - if max_write_request_operation_count is not None: - __body["max_write_request_operation_count"] = ( - max_write_request_operation_count - ) - if max_write_request_size is not None: - __body["max_write_request_size"] = max_write_request_size - if pretty is not None: - __query["pretty"] = pretty - if read_poll_timeout is not None: - __body["read_poll_timeout"] = read_poll_timeout - if remote_cluster is not None: - __body["remote_cluster"] = remote_cluster - if wait_for_active_shards is not None: - __query["wait_for_active_shards"] = wait_for_active_shards - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def follow_info( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about all follower indices, including parameters and status - for each follower index - - ``_ - - :param index: A comma-separated list of index patterns; use `_all` to perform - the operation on all indices - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/info" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def follow_stats( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves follower stats. return shard-level stats about the following tasks - associated with each shard for the specified indices. - - ``_ - - :param index: A comma-separated list of index patterns; use `_all` to perform - the operation on all indices - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def forget_follower( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - follower_cluster: t.Optional[str] = None, - follower_index: t.Optional[str] = None, - follower_index_uuid: t.Optional[str] = None, - human: t.Optional[bool] = None, - leader_remote_cluster: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes the follower retention leases from the leader. - - ``_ - - :param index: the name of the leader index for which specified follower retention - leases should be removed - :param follower_cluster: - :param follower_index: - :param follower_index_uuid: - :param leader_remote_cluster: - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/forget_follower" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if follower_cluster is not None: - __body["follower_cluster"] = follower_cluster - if follower_index is not None: - __body["follower_index"] = follower_index - if follower_index_uuid is not None: - __body["follower_index_uuid"] = follower_index_uuid - if human is not None: - __query["human"] = human - if leader_remote_cluster is not None: - __body["leader_remote_cluster"] = leader_remote_cluster - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def get_auto_follow_pattern( - self, - *, - name: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets configured auto-follow patterns. Returns the specified auto-follow pattern - collection. - - ``_ - - :param name: Specifies the auto-follow pattern collection that you want to retrieve. - If you do not specify a name, the API returns information for all collections. - """ - if name not in SKIP_IN_PATH: - __path = f"/_ccr/auto_follow/{_quote(name)}" - else: - __path = "/_ccr/auto_follow" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def pause_auto_follow_pattern( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Pauses an auto-follow pattern - - ``_ - - :param name: The name of the auto follow pattern that should pause discovering - new indices to follow. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ccr/auto_follow/{_quote(name)}/pause" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def pause_follow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Pauses a follower index. The follower index will not fetch any additional operations - from the leader index. - - ``_ - - :param index: The name of the follower index that should pause following its - leader index. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/pause_follow" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def put_auto_follow_pattern( - self, - *, - name: str, - remote_cluster: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - follow_index_pattern: t.Optional[str] = None, - human: t.Optional[bool] = None, - leader_index_exclusion_patterns: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - leader_index_patterns: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - max_outstanding_read_requests: t.Optional[int] = None, - max_outstanding_write_requests: t.Optional[int] = None, - max_read_request_operation_count: t.Optional[int] = None, - max_read_request_size: t.Optional[t.Union[int, str]] = None, - max_retry_delay: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - max_write_buffer_count: t.Optional[int] = None, - max_write_buffer_size: t.Optional[t.Union[int, str]] = None, - max_write_request_operation_count: t.Optional[int] = None, - max_write_request_size: t.Optional[t.Union[int, str]] = None, - pretty: t.Optional[bool] = None, - read_poll_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - settings: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new named collection of auto-follow patterns against a specified remote - cluster. Newly created indices on the remote cluster matching any of the specified - patterns will be automatically configured as follower indices. - - ``_ - - :param name: The name of the collection of auto-follow patterns. - :param remote_cluster: The remote cluster containing the leader indices to match - against. - :param follow_index_pattern: The name of follower index. The template {{leader_index}} - can be used to derive the name of the follower index from the name of the - leader index. When following a data stream, use {{leader_index}}; CCR does - not support changes to the names of a follower data stream’s backing indices. - :param leader_index_exclusion_patterns: An array of simple index patterns that - can be used to exclude indices from being auto-followed. Indices in the remote - cluster whose names are matching one or more leader_index_patterns and one - or more leader_index_exclusion_patterns won’t be followed. - :param leader_index_patterns: An array of simple index patterns to match against - indices in the remote cluster specified by the remote_cluster field. - :param max_outstanding_read_requests: The maximum number of outstanding reads - requests from the remote cluster. - :param max_outstanding_write_requests: The maximum number of outstanding reads - requests from the remote cluster. - :param max_read_request_operation_count: The maximum number of operations to - pull per read from the remote cluster. - :param max_read_request_size: The maximum size in bytes of per read of a batch - of operations pulled from the remote cluster. - :param max_retry_delay: The maximum time to wait before retrying an operation - that failed exceptionally. An exponential backoff strategy is employed when - retrying. - :param max_write_buffer_count: The maximum number of operations that can be queued - for writing. When this limit is reached, reads from the remote cluster will - be deferred until the number of queued operations goes below the limit. - :param max_write_buffer_size: The maximum total bytes of operations that can - be queued for writing. When this limit is reached, reads from the remote - cluster will be deferred until the total bytes of queued operations goes - below the limit. - :param max_write_request_operation_count: The maximum number of operations per - bulk write request executed on the follower. - :param max_write_request_size: The maximum total bytes of operations per bulk - write request executed on the follower. - :param read_poll_timeout: The maximum time to wait for new operations on the - remote cluster when the follower index is synchronized with the leader index. - When the timeout has elapsed, the poll for operations will return to the - follower so that it can update some statistics. Then the follower will immediately - attempt to read from the leader again. - :param settings: Settings to override from the leader index. Note that certain - settings can not be overrode (e.g., index.number_of_shards). - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - if remote_cluster is None: - raise ValueError("Empty value passed for parameter 'remote_cluster'") - __path = f"/_ccr/auto_follow/{_quote(name)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if remote_cluster is not None: - __body["remote_cluster"] = remote_cluster - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if follow_index_pattern is not None: - __body["follow_index_pattern"] = follow_index_pattern - if human is not None: - __query["human"] = human - if leader_index_exclusion_patterns is not None: - __body["leader_index_exclusion_patterns"] = leader_index_exclusion_patterns - if leader_index_patterns is not None: - __body["leader_index_patterns"] = leader_index_patterns - if max_outstanding_read_requests is not None: - __body["max_outstanding_read_requests"] = max_outstanding_read_requests - if max_outstanding_write_requests is not None: - __body["max_outstanding_write_requests"] = max_outstanding_write_requests - if max_read_request_operation_count is not None: - __body["max_read_request_operation_count"] = ( - max_read_request_operation_count - ) - if max_read_request_size is not None: - __body["max_read_request_size"] = max_read_request_size - if max_retry_delay is not None: - __body["max_retry_delay"] = max_retry_delay - if max_write_buffer_count is not None: - __body["max_write_buffer_count"] = max_write_buffer_count - if max_write_buffer_size is not None: - __body["max_write_buffer_size"] = max_write_buffer_size - if max_write_request_operation_count is not None: - __body["max_write_request_operation_count"] = ( - max_write_request_operation_count - ) - if max_write_request_size is not None: - __body["max_write_request_size"] = max_write_request_size - if pretty is not None: - __query["pretty"] = pretty - if read_poll_timeout is not None: - __body["read_poll_timeout"] = read_poll_timeout - if settings is not None: - __body["settings"] = settings - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def resume_auto_follow_pattern( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Resumes an auto-follow pattern that has been paused - - ``_ - - :param name: The name of the auto follow pattern to resume discovering new indices - to follow. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ccr/auto_follow/{_quote(name)}/resume" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def resume_follow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - max_outstanding_read_requests: t.Optional[int] = None, - max_outstanding_write_requests: t.Optional[int] = None, - max_read_request_operation_count: t.Optional[int] = None, - max_read_request_size: t.Optional[str] = None, - max_retry_delay: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - max_write_buffer_count: t.Optional[int] = None, - max_write_buffer_size: t.Optional[str] = None, - max_write_request_operation_count: t.Optional[int] = None, - max_write_request_size: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - read_poll_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Resumes a follower index that has been paused - - ``_ - - :param index: The name of the follow index to resume following. - :param max_outstanding_read_requests: - :param max_outstanding_write_requests: - :param max_read_request_operation_count: - :param max_read_request_size: - :param max_retry_delay: - :param max_write_buffer_count: - :param max_write_buffer_size: - :param max_write_request_operation_count: - :param max_write_request_size: - :param read_poll_timeout: - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/resume_follow" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if max_outstanding_read_requests is not None: - __body["max_outstanding_read_requests"] = max_outstanding_read_requests - if max_outstanding_write_requests is not None: - __body["max_outstanding_write_requests"] = max_outstanding_write_requests - if max_read_request_operation_count is not None: - __body["max_read_request_operation_count"] = ( - max_read_request_operation_count - ) - if max_read_request_size is not None: - __body["max_read_request_size"] = max_read_request_size - if max_retry_delay is not None: - __body["max_retry_delay"] = max_retry_delay - if max_write_buffer_count is not None: - __body["max_write_buffer_count"] = max_write_buffer_count - if max_write_buffer_size is not None: - __body["max_write_buffer_size"] = max_write_buffer_size - if max_write_request_operation_count is not None: - __body["max_write_request_operation_count"] = ( - max_write_request_operation_count - ) - if max_write_request_size is not None: - __body["max_write_request_size"] = max_write_request_size - if pretty is not None: - __query["pretty"] = pretty - if read_poll_timeout is not None: - __body["read_poll_timeout"] = read_poll_timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def stats( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets all stats related to cross-cluster replication. - - ``_ - """ - __path = "/_ccr/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def unfollow( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Stops the following task associated with a follower index and removes index metadata - and settings associated with cross-cluster replication. - - ``_ - - :param index: The name of the follower index that should be turned into a regular - index. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ccr/unfollow" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/dangling_indices.py b/elasticsearch_serverless/_sync/client/dangling_indices.py deleted file mode 100644 index b742998..0000000 --- a/elasticsearch_serverless/_sync/client/dangling_indices.py +++ /dev/null @@ -1,162 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class DanglingIndicesClient(NamespacedClient): - @_rewrite_parameters() - def delete_dangling_index( - self, - *, - index_uuid: str, - accept_data_loss: bool, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes the specified dangling index - - ``_ - - :param index_uuid: The UUID of the dangling index - :param accept_data_loss: Must be set to true in order to delete the dangling - index - :param master_timeout: Specify timeout for connection to master - :param timeout: Explicit operation timeout - """ - if index_uuid in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index_uuid'") - if accept_data_loss is None: - raise ValueError("Empty value passed for parameter 'accept_data_loss'") - __path = f"/_dangling/{_quote(index_uuid)}" - __query: t.Dict[str, t.Any] = {} - if accept_data_loss is not None: - __query["accept_data_loss"] = accept_data_loss - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def import_dangling_index( - self, - *, - index_uuid: str, - accept_data_loss: bool, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Imports the specified dangling index - - ``_ - - :param index_uuid: The UUID of the dangling index - :param accept_data_loss: Must be set to true in order to import the dangling - index - :param master_timeout: Specify timeout for connection to master - :param timeout: Explicit operation timeout - """ - if index_uuid in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index_uuid'") - if accept_data_loss is None: - raise ValueError("Empty value passed for parameter 'accept_data_loss'") - __path = f"/_dangling/{_quote(index_uuid)}" - __query: t.Dict[str, t.Any] = {} - if accept_data_loss is not None: - __query["accept_data_loss"] = accept_data_loss - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def list_dangling_indices( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns all dangling indices. - - ``_ - """ - __path = "/_dangling" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/features.py b/elasticsearch_serverless/_sync/client/features.py deleted file mode 100644 index 43dca35..0000000 --- a/elasticsearch_serverless/_sync/client/features.py +++ /dev/null @@ -1,88 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class FeaturesClient(NamespacedClient): - @_rewrite_parameters() - def get_features( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Gets a list of features which can be included in snapshots using the feature_states - field when creating a snapshot - - ``_ - """ - __path = "/_features" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def reset_features( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Resets the internal state of features, usually by deleting system indices - - ``_ - """ - __path = "/_features/_reset" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/fleet.py b/elasticsearch_serverless/_sync/client/fleet.py deleted file mode 100644 index 20357ef..0000000 --- a/elasticsearch_serverless/_sync/client/fleet.py +++ /dev/null @@ -1,631 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class FleetClient(NamespacedClient): - @_rewrite_parameters() - def global_checkpoints( - self, - *, - index: str, - checkpoints: t.Optional[t.Union[t.List[int], t.Tuple[int, ...]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - wait_for_advance: t.Optional[bool] = None, - wait_for_index: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the current global checkpoints for an index. This API is design for internal - use by the fleet server project. - - ``_ - - :param index: A single index or index alias that resolves to a single index. - :param checkpoints: A comma separated list of previous global checkpoints. When - used in combination with `wait_for_advance`, the API will only return once - the global checkpoints advances past the checkpoints. Providing an empty - list will cause Elasticsearch to immediately return the current global checkpoints. - :param timeout: Period to wait for a global checkpoints to advance past `checkpoints`. - :param wait_for_advance: A boolean value which controls whether to wait (until - the timeout) for the global checkpoints to advance past the provided `checkpoints`. - :param wait_for_index: A boolean value which controls whether to wait (until - the timeout) for the target index to exist and all primary shards be active. - Can only be true when `wait_for_advance` is true. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_fleet/global_checkpoints" - __query: t.Dict[str, t.Any] = {} - if checkpoints is not None: - __query["checkpoints"] = checkpoints - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if wait_for_advance is not None: - __query["wait_for_advance"] = wait_for_advance - if wait_for_index is not None: - __query["wait_for_index"] = wait_for_index - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_name="searches", - ) - def msearch( - self, - *, - searches: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[str] = None, - allow_no_indices: t.Optional[bool] = None, - allow_partial_search_results: t.Optional[bool] = None, - ccs_minimize_roundtrips: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - expand_wildcards: t.Optional[ - t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], - ], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_throttled: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - max_concurrent_searches: t.Optional[int] = None, - max_concurrent_shard_requests: t.Optional[int] = None, - pre_filter_shard_size: t.Optional[int] = None, - pretty: t.Optional[bool] = None, - rest_total_hits_as_int: t.Optional[bool] = None, - search_type: t.Optional[ - t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] - ] = None, - typed_keys: t.Optional[bool] = None, - wait_for_checkpoints: t.Optional[ - t.Union[t.List[int], t.Tuple[int, ...]] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Multi Search API where the search will only be executed after specified checkpoints - are available due to a refresh. This API is designed for internal use by the - fleet server project. - - :param searches: - :param index: A single target to search. If the target is an index alias, it - must resolve to a single index. - :param allow_no_indices: If false, the request returns an error if any wildcard - expression, index alias, or _all value targets only missing or closed indices. - This behavior applies even if the request targets other open indices. For - example, a request targeting foo*,bar* returns an error if an index starts - with foo but no index starts with bar. - :param allow_partial_search_results: If true, returns partial results if there - are shard request timeouts or [shard failures](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-replication.html#shard-failures). - If false, returns an error with no partial results. Defaults to the configured - cluster setting `search.default_allow_partial_results` which is true by default. - :param ccs_minimize_roundtrips: If true, network roundtrips between the coordinating - node and remote clusters are minimized for cross-cluster search requests. - :param expand_wildcards: Type of index that wildcard expressions can match. If - the request can target data streams, this argument determines whether wildcard - expressions match hidden data streams. - :param ignore_throttled: If true, concrete, expanded or aliased indices are ignored - when frozen. - :param ignore_unavailable: If true, missing or closed indices are not included - in the response. - :param max_concurrent_searches: Maximum number of concurrent searches the multi - search API can execute. - :param max_concurrent_shard_requests: Maximum number of concurrent shard requests - that each sub-search request executes per node. - :param pre_filter_shard_size: Defines a threshold that enforces a pre-filter - roundtrip to prefilter search shards based on query rewriting if the number - of shards the search request expands to exceeds the threshold. This filter - roundtrip can limit the number of shards significantly if for instance a - shard can not match any documents based on its rewrite method i.e., if date - filters are mandatory to match but the shard bounds and the query are disjoint. - :param rest_total_hits_as_int: If true, hits.total are returned as an integer - in the response. Defaults to false, which returns an object. - :param search_type: Indicates whether global term and document frequencies should - be used when scoring returned documents. - :param typed_keys: Specifies whether aggregation and suggester names should be - prefixed by their respective types in the response. - :param wait_for_checkpoints: A comma separated list of checkpoints. When configured, - the search API will only be executed on a shard after the relevant checkpoint - has become visible for search. Defaults to an empty list which will cause - Elasticsearch to immediately execute the search. - """ - if searches is None: - raise ValueError("Empty value passed for parameter 'searches'") - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_fleet/_fleet_msearch" - else: - __path = "/_fleet/_fleet_msearch" - __query: t.Dict[str, t.Any] = {} - if allow_no_indices is not None: - __query["allow_no_indices"] = allow_no_indices - if allow_partial_search_results is not None: - __query["allow_partial_search_results"] = allow_partial_search_results - if ccs_minimize_roundtrips is not None: - __query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips - if error_trace is not None: - __query["error_trace"] = error_trace - if expand_wildcards is not None: - __query["expand_wildcards"] = expand_wildcards - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_throttled is not None: - __query["ignore_throttled"] = ignore_throttled - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if max_concurrent_searches is not None: - __query["max_concurrent_searches"] = max_concurrent_searches - if max_concurrent_shard_requests is not None: - __query["max_concurrent_shard_requests"] = max_concurrent_shard_requests - if pre_filter_shard_size is not None: - __query["pre_filter_shard_size"] = pre_filter_shard_size - if pretty is not None: - __query["pretty"] = pretty - if rest_total_hits_as_int is not None: - __query["rest_total_hits_as_int"] = rest_total_hits_as_int - if search_type is not None: - __query["search_type"] = search_type - if typed_keys is not None: - __query["typed_keys"] = typed_keys - if wait_for_checkpoints is not None: - __query["wait_for_checkpoints"] = wait_for_checkpoints - __body = searches - __headers = { - "accept": "application/json", - "content-type": "application/x-ndjson", - } - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - parameter_aliases={ - "_source": "source", - "_source_excludes": "source_excludes", - "_source_includes": "source_includes", - "from": "from_", - }, - ) - def search( - self, - *, - index: str, - aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - allow_no_indices: t.Optional[bool] = None, - allow_partial_search_results: t.Optional[bool] = None, - analyze_wildcard: t.Optional[bool] = None, - analyzer: t.Optional[str] = None, - batched_reduce_size: t.Optional[int] = None, - ccs_minimize_roundtrips: t.Optional[bool] = None, - collapse: t.Optional[t.Mapping[str, t.Any]] = None, - default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, - df: t.Optional[str] = None, - docvalue_fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - error_trace: t.Optional[bool] = None, - expand_wildcards: t.Optional[ - t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], - ], - ] - ] = None, - explain: t.Optional[bool] = None, - ext: t.Optional[t.Mapping[str, t.Any]] = None, - fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - from_: t.Optional[int] = None, - highlight: t.Optional[t.Mapping[str, t.Any]] = None, - human: t.Optional[bool] = None, - ignore_throttled: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - indices_boost: t.Optional[ - t.Union[t.List[t.Mapping[str, float]], t.Tuple[t.Mapping[str, float], ...]] - ] = None, - lenient: t.Optional[bool] = None, - max_concurrent_shard_requests: t.Optional[int] = None, - min_compatible_shard_node: t.Optional[str] = None, - min_score: t.Optional[float] = None, - pit: t.Optional[t.Mapping[str, t.Any]] = None, - post_filter: t.Optional[t.Mapping[str, t.Any]] = None, - pre_filter_shard_size: t.Optional[int] = None, - preference: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - profile: t.Optional[bool] = None, - q: t.Optional[str] = None, - query: t.Optional[t.Mapping[str, t.Any]] = None, - request_cache: t.Optional[bool] = None, - rescore: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] - ] = None, - rest_total_hits_as_int: t.Optional[bool] = None, - routing: t.Optional[str] = None, - runtime_mappings: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - script_fields: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - scroll: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] - ] = None, - search_type: t.Optional[ - t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] - ] = None, - seq_no_primary_term: t.Optional[bool] = None, - size: t.Optional[int] = None, - slice: t.Optional[t.Mapping[str, t.Any]] = None, - sort: t.Optional[ - t.Union[ - t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], - ] - ] = None, - source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - suggest: t.Optional[t.Mapping[str, t.Any]] = None, - suggest_field: t.Optional[str] = None, - suggest_mode: t.Optional[ - t.Union["t.Literal['always', 'missing', 'popular']", str] - ] = None, - suggest_size: t.Optional[int] = None, - suggest_text: t.Optional[str] = None, - terminate_after: t.Optional[int] = None, - timeout: t.Optional[str] = None, - track_scores: t.Optional[bool] = None, - track_total_hits: t.Optional[t.Union[bool, int]] = None, - typed_keys: t.Optional[bool] = None, - version: t.Optional[bool] = None, - wait_for_checkpoints: t.Optional[ - t.Union[t.List[int], t.Tuple[int, ...]] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Search API where the search will only be executed after specified checkpoints - are available due to a refresh. This API is designed for internal use by the - fleet server project. - - :param index: A single target to search. If the target is an index alias, it - must resolve to a single index. - :param aggregations: - :param aggs: - :param allow_no_indices: - :param allow_partial_search_results: If true, returns partial results if there - are shard request timeouts or [shard failures](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-replication.html#shard-failures). - If false, returns an error with no partial results. Defaults to the configured - cluster setting `search.default_allow_partial_results` which is true by default. - :param analyze_wildcard: - :param analyzer: - :param batched_reduce_size: - :param ccs_minimize_roundtrips: - :param collapse: - :param default_operator: - :param df: - :param docvalue_fields: Array of wildcard (*) patterns. The request returns doc - values for field names matching these patterns in the hits.fields property - of the response. - :param expand_wildcards: - :param explain: If true, returns detailed information about score computation - as part of a hit. - :param ext: Configuration of search extensions defined by Elasticsearch plugins. - :param fields: Array of wildcard (*) patterns. The request returns values for - field names matching these patterns in the hits.fields property of the response. - :param from_: Starting document offset. By default, you cannot page through more - than 10,000 hits using the from and size parameters. To page through more - hits, use the search_after parameter. - :param highlight: - :param ignore_throttled: - :param ignore_unavailable: - :param indices_boost: Boosts the _score of documents from specified indices. - :param lenient: - :param max_concurrent_shard_requests: - :param min_compatible_shard_node: - :param min_score: Minimum _score for matching documents. Documents with a lower - _score are not included in the search results. - :param pit: Limits the search to a point in time (PIT). If you provide a PIT, - you cannot specify an in the request path. - :param post_filter: - :param pre_filter_shard_size: - :param preference: - :param profile: - :param q: - :param query: Defines the search definition using the Query DSL. - :param request_cache: - :param rescore: - :param rest_total_hits_as_int: - :param routing: - :param runtime_mappings: Defines one or more runtime fields in the search request. - These fields take precedence over mapped fields with the same name. - :param script_fields: Retrieve a script evaluation (based on different fields) - for each hit. - :param scroll: - :param search_after: - :param search_type: - :param seq_no_primary_term: If true, returns sequence number and primary term - of the last modification of each hit. See Optimistic concurrency control. - :param size: The number of hits to return. By default, you cannot page through - more than 10,000 hits using the from and size parameters. To page through - more hits, use the search_after parameter. - :param slice: - :param sort: - :param source: Indicates which source fields are returned for matching documents. - These fields are returned in the hits._source property of the search response. - :param source_excludes: - :param source_includes: - :param stats: Stats groups to associate with the search. Each group maintains - a statistics aggregation for its associated searches. You can retrieve these - stats using the indices stats API. - :param stored_fields: List of stored fields to return as part of a hit. If no - fields are specified, no stored fields are included in the response. If this - field is specified, the _source parameter defaults to false. You can pass - _source: true to return both source fields and stored fields in the search - response. - :param suggest: - :param suggest_field: Specifies which field to use for suggestions. - :param suggest_mode: - :param suggest_size: - :param suggest_text: The source text for which the suggestions should be returned. - :param terminate_after: Maximum number of documents to collect for each shard. - If a query reaches this limit, Elasticsearch terminates the query early. - Elasticsearch collects documents before sorting. Defaults to 0, which does - not terminate query execution early. - :param timeout: Specifies the period of time to wait for a response from each - shard. If no response is received before the timeout expires, the request - fails and returns an error. Defaults to no timeout. - :param track_scores: If true, calculate and return document scores, even if the - scores are not used for sorting. - :param track_total_hits: Number of hits matching the query to count accurately. - If true, the exact number of hits is returned at the cost of some performance. - If false, the response does not include the total number of hits matching - the query. Defaults to 10,000 hits. - :param typed_keys: - :param version: If true, returns document version as part of a hit. - :param wait_for_checkpoints: A comma separated list of checkpoints. When configured, - the search API will only be executed on a shard after the relevant checkpoint - has become visible for search. Defaults to an empty list which will cause - Elasticsearch to immediately execute the search. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_fleet/_fleet_search" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - # The 'sort' parameter with a colon can't be encoded to the body. - if sort is not None and ( - (isinstance(sort, str) and ":" in sort) - or ( - isinstance(sort, (list, tuple)) - and all(isinstance(_x, str) for _x in sort) - and any(":" in _x for _x in sort) - ) - ): - __query["sort"] = sort - sort = None - if aggregations is not None: - __body["aggregations"] = aggregations - if aggs is not None: - __body["aggs"] = aggs - if allow_no_indices is not None: - __query["allow_no_indices"] = allow_no_indices - if allow_partial_search_results is not None: - __query["allow_partial_search_results"] = allow_partial_search_results - if analyze_wildcard is not None: - __query["analyze_wildcard"] = analyze_wildcard - if analyzer is not None: - __query["analyzer"] = analyzer - if batched_reduce_size is not None: - __query["batched_reduce_size"] = batched_reduce_size - if ccs_minimize_roundtrips is not None: - __query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips - if collapse is not None: - __body["collapse"] = collapse - if default_operator is not None: - __query["default_operator"] = default_operator - if df is not None: - __query["df"] = df - if docvalue_fields is not None: - __body["docvalue_fields"] = docvalue_fields - if error_trace is not None: - __query["error_trace"] = error_trace - if expand_wildcards is not None: - __query["expand_wildcards"] = expand_wildcards - if explain is not None: - __body["explain"] = explain - if ext is not None: - __body["ext"] = ext - if fields is not None: - __body["fields"] = fields - if filter_path is not None: - __query["filter_path"] = filter_path - if from_ is not None: - __body["from"] = from_ - if highlight is not None: - __body["highlight"] = highlight - if human is not None: - __query["human"] = human - if ignore_throttled is not None: - __query["ignore_throttled"] = ignore_throttled - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if indices_boost is not None: - __body["indices_boost"] = indices_boost - if lenient is not None: - __query["lenient"] = lenient - if max_concurrent_shard_requests is not None: - __query["max_concurrent_shard_requests"] = max_concurrent_shard_requests - if min_compatible_shard_node is not None: - __query["min_compatible_shard_node"] = min_compatible_shard_node - if min_score is not None: - __body["min_score"] = min_score - if pit is not None: - __body["pit"] = pit - if post_filter is not None: - __body["post_filter"] = post_filter - if pre_filter_shard_size is not None: - __query["pre_filter_shard_size"] = pre_filter_shard_size - if preference is not None: - __query["preference"] = preference - if pretty is not None: - __query["pretty"] = pretty - if profile is not None: - __body["profile"] = profile - if q is not None: - __query["q"] = q - if query is not None: - __body["query"] = query - if request_cache is not None: - __query["request_cache"] = request_cache - if rescore is not None: - __body["rescore"] = rescore - if rest_total_hits_as_int is not None: - __query["rest_total_hits_as_int"] = rest_total_hits_as_int - if routing is not None: - __query["routing"] = routing - if runtime_mappings is not None: - __body["runtime_mappings"] = runtime_mappings - if script_fields is not None: - __body["script_fields"] = script_fields - if scroll is not None: - __query["scroll"] = scroll - if search_after is not None: - __body["search_after"] = search_after - if search_type is not None: - __query["search_type"] = search_type - if seq_no_primary_term is not None: - __body["seq_no_primary_term"] = seq_no_primary_term - if size is not None: - __body["size"] = size - if slice is not None: - __body["slice"] = slice - if sort is not None: - __body["sort"] = sort - if source is not None: - __body["_source"] = source - if source_excludes is not None: - __query["_source_excludes"] = source_excludes - if source_includes is not None: - __query["_source_includes"] = source_includes - if stats is not None: - __body["stats"] = stats - if stored_fields is not None: - __body["stored_fields"] = stored_fields - if suggest is not None: - __body["suggest"] = suggest - if suggest_field is not None: - __query["suggest_field"] = suggest_field - if suggest_mode is not None: - __query["suggest_mode"] = suggest_mode - if suggest_size is not None: - __query["suggest_size"] = suggest_size - if suggest_text is not None: - __query["suggest_text"] = suggest_text - if terminate_after is not None: - __body["terminate_after"] = terminate_after - if timeout is not None: - __body["timeout"] = timeout - if track_scores is not None: - __body["track_scores"] = track_scores - if track_total_hits is not None: - __body["track_total_hits"] = track_total_hits - if typed_keys is not None: - __query["typed_keys"] = typed_keys - if version is not None: - __body["version"] = version - if wait_for_checkpoints is not None: - __query["wait_for_checkpoints"] = wait_for_checkpoints - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_sync/client/ilm.py b/elasticsearch_serverless/_sync/client/ilm.py deleted file mode 100644 index 6fa488a..0000000 --- a/elasticsearch_serverless/_sync/client/ilm.py +++ /dev/null @@ -1,543 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class IlmClient(NamespacedClient): - @_rewrite_parameters() - def delete_lifecycle( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes the specified lifecycle policy definition. A currently used policy cannot - be deleted. - - ``_ - - :param name: Identifier for the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ilm/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def explain_lifecycle( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - only_errors: t.Optional[bool] = None, - only_managed: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about the index's current lifecycle state, such as the - currently executing phase, action, and step. - - ``_ - - :param index: Comma-separated list of data streams, indices, and aliases to target. - Supports wildcards (`*`). To target all data streams and indices, use `*` - or `_all`. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param only_errors: Filters the returned indices to only indices that are managed - by ILM and are in an error state, either due to an encountering an error - while executing the policy, or attempting to use a policy that does not exist. - :param only_managed: Filters the returned indices to only indices that are managed - by ILM. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ilm/explain" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if only_errors is not None: - __query["only_errors"] = only_errors - if only_managed is not None: - __query["only_managed"] = only_managed - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_lifecycle( - self, - *, - name: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the specified policy definition. Includes the policy version and last - modified date. - - ``_ - - :param name: Identifier for the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if name not in SKIP_IN_PATH: - __path = f"/_ilm/policy/{_quote(name)}" - else: - __path = "/_ilm/policy" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_status( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the current index lifecycle management (ILM) status. - - ``_ - """ - __path = "/_ilm/status" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def migrate_to_data_tiers( - self, - *, - dry_run: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - legacy_template_to_delete: t.Optional[str] = None, - node_attribute: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Migrates the indices and ILM policies away from custom node attribute allocation - routing to data tiers routing - - ``_ - - :param dry_run: If true, simulates the migration from node attributes based allocation - filters to data tiers, but does not perform the migration. This provides - a way to retrieve the indices and ILM policies that need to be migrated. - :param legacy_template_to_delete: - :param node_attribute: - """ - __path = "/_ilm/migrate_to_data_tiers" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if dry_run is not None: - __query["dry_run"] = dry_run - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if legacy_template_to_delete is not None: - __body["legacy_template_to_delete"] = legacy_template_to_delete - if node_attribute is not None: - __body["node_attribute"] = node_attribute - if pretty is not None: - __query["pretty"] = pretty - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - def move_to_step( - self, - *, - index: str, - current_step: t.Optional[t.Mapping[str, t.Any]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - next_step: t.Optional[t.Mapping[str, t.Any]] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Manually moves an index into the specified step and executes that step. - - ``_ - - :param index: The name of the index whose lifecycle step is to change - :param current_step: - :param next_step: - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/_ilm/move/{_quote(index)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if current_step is not None: - __body["current_step"] = current_step - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if next_step is not None: - __body["next_step"] = next_step - if pretty is not None: - __query["pretty"] = pretty - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - def put_lifecycle( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - policy: t.Optional[t.Mapping[str, t.Any]] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a lifecycle policy - - ``_ - - :param name: Identifier for the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param policy: - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_ilm/policy/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if policy is not None: - __body["policy"] = policy - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def remove_policy( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes the assigned lifecycle policy and stops managing the specified index - - ``_ - - :param index: The name of the index to remove policy on - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ilm/remove" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def retry( - self, - *, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retries executing the policy for an index that is in the ERROR step. - - ``_ - - :param index: The name of the indices (comma-separated) whose failed lifecycle - step is to be retry - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_ilm/retry" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def start( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Start the index lifecycle management (ILM) plugin. - - ``_ - - :param master_timeout: - :param timeout: - """ - __path = "/_ilm/start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def stop( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Halts all lifecycle management operations and stops the index lifecycle management - (ILM) plugin - - ``_ - - :param master_timeout: - :param timeout: - """ - __path = "/_ilm/stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/migration.py b/elasticsearch_serverless/_sync/client/migration.py deleted file mode 100644 index 2ce226c..0000000 --- a/elasticsearch_serverless/_sync/client/migration.py +++ /dev/null @@ -1,127 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class MigrationClient(NamespacedClient): - @_rewrite_parameters() - def deprecations( - self, - *, - index: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about different cluster, node, and index level settings - that use deprecated features that will be removed or changed in the next major - version. - - ``_ - - :param index: Comma-separate list of data streams or indices to check. Wildcard - (*) expressions are supported. - """ - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_migration/deprecations" - else: - __path = "/_migration/deprecations" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_feature_upgrade_status( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Find out whether system features need to be upgraded or not - - ``_ - """ - __path = "/_migration/system_features" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def post_feature_upgrade( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Begin upgrades for system features - - ``_ - """ - __path = "/_migration/system_features" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/monitoring.py b/elasticsearch_serverless/_sync/client/monitoring.py deleted file mode 100644 index 1b65a06..0000000 --- a/elasticsearch_serverless/_sync/client/monitoring.py +++ /dev/null @@ -1,87 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class MonitoringClient(NamespacedClient): - @_rewrite_parameters( - body_name="operations", - ) - def bulk( - self, - *, - interval: t.Union["t.Literal[-1]", "t.Literal[0]", str], - operations: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - system_api_version: str, - system_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Used by the monitoring features to send monitoring data. - - ``_ - - :param interval: Collection interval (e.g., '10s' or '10000ms') of the payload - :param operations: - :param system_api_version: - :param system_id: Identifier of the monitored system - """ - if interval is None: - raise ValueError("Empty value passed for parameter 'interval'") - if operations is None: - raise ValueError("Empty value passed for parameter 'operations'") - if system_api_version is None: - raise ValueError("Empty value passed for parameter 'system_api_version'") - if system_id is None: - raise ValueError("Empty value passed for parameter 'system_id'") - __path = "/_monitoring/bulk" - __query: t.Dict[str, t.Any] = {} - if interval is not None: - __query["interval"] = interval - if system_api_version is not None: - __query["system_api_version"] = system_api_version - if system_id is not None: - __query["system_id"] = system_id - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __body = operations - __headers = { - "accept": "application/json", - "content-type": "application/x-ndjson", - } - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_sync/client/nodes.py b/elasticsearch_serverless/_sync/client/nodes.py deleted file mode 100644 index e38240f..0000000 --- a/elasticsearch_serverless/_sync/client/nodes.py +++ /dev/null @@ -1,483 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse, TextApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class NodesClient(NamespacedClient): - @_rewrite_parameters() - def clear_repositories_metering_archive( - self, - *, - node_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - max_archive_version: int, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes the archived repositories metering information present in the cluster. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. All the nodes selective options are explained [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster.html#cluster-nodes). - :param max_archive_version: Specifies the maximum [archive_version](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-repositories-metering-api.html#get-repositories-metering-api-response-body) - to be cleared from the archive. - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - if max_archive_version in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'max_archive_version'") - __path = f"/_nodes/{_quote(node_id)}/_repositories_metering/{_quote(max_archive_version)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_repositories_metering_info( - self, - *, - node_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns cluster repositories metering information. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. All the nodes selective options are explained [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster.html#cluster-nodes). - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - __path = f"/_nodes/{_quote(node_id)}/_repositories_metering" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def hot_threads( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_idle_threads: t.Optional[bool] = None, - interval: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - snapshots: t.Optional[int] = None, - sort: t.Optional[ - t.Union["t.Literal['block', 'cpu', 'gpu', 'mem', 'wait']", str] - ] = None, - threads: t.Optional[int] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - type: t.Optional[ - t.Union["t.Literal['block', 'cpu', 'gpu', 'mem', 'wait']", str] - ] = None, - ) -> TextApiResponse: - """ - Returns information about hot threads on each node in the cluster. - - ``_ - - :param node_id: List of node IDs or names used to limit returned information. - :param ignore_idle_threads: If true, known idle threads (e.g. waiting in a socket - select, or to get a task from an empty queue) are filtered out. - :param interval: The interval to do the second sampling of threads. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param snapshots: Number of samples of thread stacktrace. - :param sort: The sort order for 'cpu' type (default: total) - :param threads: Specifies the number of hot threads to provide information for. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - :param type: The type to sample. - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/hot_threads" - else: - __path = "/_nodes/hot_threads" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_idle_threads is not None: - __query["ignore_idle_threads"] = ignore_idle_threads - if interval is not None: - __query["interval"] = interval - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if snapshots is not None: - __query["snapshots"] = snapshots - if sort is not None: - __query["sort"] = sort - if threads is not None: - __query["threads"] = threads - if timeout is not None: - __query["timeout"] = timeout - if type is not None: - __query["type"] = type - __headers = {"accept": "text/plain"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def info( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - flat_settings: t.Optional[bool] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about nodes in the cluster. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. - :param metric: Limits the information returned to the specific metrics. Supports - a comma-separated list, such as http,ingest. - :param flat_settings: If true, returns settings in flat format. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id not in SKIP_IN_PATH and metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/{_quote(metric)}" - elif node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}" - elif metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(metric)}" - else: - __path = "/_nodes" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if flat_settings is not None: - __query["flat_settings"] = flat_settings - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def reload_secure_settings( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - secure_settings_password: t.Optional[str] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Reloads secure settings. - - ``_ - - :param node_id: A comma-separated list of node IDs to span the reload/reinit - call. Should stay empty because reloading usually involves all cluster nodes. - :param secure_settings_password: - :param timeout: Explicit operation timeout - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/reload_secure_settings" - else: - __path = "/_nodes/reload_secure_settings" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if secure_settings_password is not None: - __body["secure_settings_password"] = secure_settings_password - if timeout is not None: - __query["timeout"] = timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def stats( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - index_metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - completion_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - fielddata_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - groups: t.Optional[bool] = None, - human: t.Optional[bool] = None, - include_segment_file_sizes: t.Optional[bool] = None, - include_unloaded_segments: t.Optional[bool] = None, - level: t.Optional[ - t.Union["t.Literal['cluster', 'indices', 'shards']", str] - ] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - types: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns statistical information about nodes in the cluster. - - ``_ - - :param node_id: Comma-separated list of node IDs or names used to limit returned - information. - :param metric: Limit the information returned to the specified metrics - :param index_metric: Limit the information returned for indices metric to the - specific index metrics. It can be used only if indices (or all) metric is - specified. - :param completion_fields: Comma-separated list or wildcard expressions of fields - to include in fielddata and suggest statistics. - :param fielddata_fields: Comma-separated list or wildcard expressions of fields - to include in fielddata statistics. - :param fields: Comma-separated list or wildcard expressions of fields to include - in the statistics. - :param groups: Comma-separated list of search groups to include in the search - statistics. - :param include_segment_file_sizes: If true, the call reports the aggregated disk - usage of each one of the Lucene index files (only applies if segment stats - are requested). - :param include_unloaded_segments: If set to true segment stats will include stats - for segments that are not currently loaded into memory - :param level: Indicates whether statistics are aggregated at the cluster, index, - or shard level. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - :param types: A comma-separated list of document types for the indexing index - metric. - """ - if ( - node_id not in SKIP_IN_PATH - and metric not in SKIP_IN_PATH - and index_metric not in SKIP_IN_PATH - ): - __path = f"/_nodes/{_quote(node_id)}/stats/{_quote(metric)}/{_quote(index_metric)}" - elif node_id not in SKIP_IN_PATH and metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/stats/{_quote(metric)}" - elif metric not in SKIP_IN_PATH and index_metric not in SKIP_IN_PATH: - __path = f"/_nodes/stats/{_quote(metric)}/{_quote(index_metric)}" - elif node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/stats" - elif metric not in SKIP_IN_PATH: - __path = f"/_nodes/stats/{_quote(metric)}" - else: - __path = "/_nodes/stats" - __query: t.Dict[str, t.Any] = {} - if completion_fields is not None: - __query["completion_fields"] = completion_fields - if error_trace is not None: - __query["error_trace"] = error_trace - if fielddata_fields is not None: - __query["fielddata_fields"] = fielddata_fields - if fields is not None: - __query["fields"] = fields - if filter_path is not None: - __query["filter_path"] = filter_path - if groups is not None: - __query["groups"] = groups - if human is not None: - __query["human"] = human - if include_segment_file_sizes is not None: - __query["include_segment_file_sizes"] = include_segment_file_sizes - if include_unloaded_segments is not None: - __query["include_unloaded_segments"] = include_unloaded_segments - if level is not None: - __query["level"] = level - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if types is not None: - __query["types"] = types - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def usage( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - metric: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns low-level information about REST actions usage on nodes. - - ``_ - - :param node_id: A comma-separated list of node IDs or names to limit the returned - information; use `_local` to return information from the node you're connecting - to, leave empty to get information from all nodes - :param metric: Limit the information returned to the specified metrics - :param timeout: Explicit operation timeout - """ - if node_id not in SKIP_IN_PATH and metric not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/usage/{_quote(metric)}" - elif node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/usage" - elif metric not in SKIP_IN_PATH: - __path = f"/_nodes/usage/{_quote(metric)}" - else: - __path = "/_nodes/usage" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/rollup.py b/elasticsearch_serverless/_sync/client/rollup.py deleted file mode 100644 index c54e0c0..0000000 --- a/elasticsearch_serverless/_sync/client/rollup.py +++ /dev/null @@ -1,440 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class RollupClient(NamespacedClient): - @_rewrite_parameters() - def delete_job( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes an existing rollup job. - - ``_ - - :param id: The ID of the job to delete - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_rollup/job/{_quote(id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_jobs( - self, - *, - id: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the configuration, stats, and status of rollup jobs. - - ``_ - - :param id: The ID of the job(s) to fetch. Accepts glob patterns, or left blank - for all jobs - """ - if id not in SKIP_IN_PATH: - __path = f"/_rollup/job/{_quote(id)}" - else: - __path = "/_rollup/job" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_rollup_caps( - self, - *, - id: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the capabilities of any rollup jobs that have been configured for a specific - index or index pattern. - - ``_ - - :param id: The ID of the index to check rollup capabilities on, or left blank - for all jobs - """ - if id not in SKIP_IN_PATH: - __path = f"/_rollup/data/{_quote(id)}" - else: - __path = "/_rollup/data" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_rollup_index_caps( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns the rollup capabilities of all jobs inside of a rollup index (e.g. the - index where rollup data is stored). - - ``_ - - :param index: The rollup index or index pattern to obtain rollup capabilities - from. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_rollup/data" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ignore_deprecated_options={"headers"}, - ) - def put_job( - self, - *, - id: str, - cron: str, - groups: t.Mapping[str, t.Any], - index_pattern: str, - page_size: int, - rollup_index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - headers: t.Optional[ - t.Mapping[str, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - human: t.Optional[bool] = None, - metrics: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a rollup job. - - ``_ - - :param id: Identifier for the rollup job. This can be any alphanumeric string - and uniquely identifies the data that is associated with the rollup job. - The ID is persistent; it is stored with the rolled up data. If you create - a job, let it run for a while, then delete the job, the data that the job - rolled up is still be associated with this job ID. You cannot create a new - job with the same ID since that could lead to problems with mismatched job - configurations. - :param cron: A cron string which defines the intervals when the rollup job should - be executed. When the interval triggers, the indexer attempts to rollup the - data in the index pattern. The cron pattern is unrelated to the time interval - of the data being rolled up. For example, you may wish to create hourly rollups - of your document but to only run the indexer on a daily basis at midnight, - as defined by the cron. The cron pattern is defined just like a Watcher cron - schedule. - :param groups: Defines the grouping fields and aggregations that are defined - for this rollup job. These fields will then be available later for aggregating - into buckets. These aggs and fields can be used in any combination. Think - of the groups configuration as defining a set of tools that can later be - used in aggregations to partition the data. Unlike raw data, we have to think - ahead to which fields and aggregations might be used. Rollups provide enough - flexibility that you simply need to determine which fields are needed, not - in what order they are needed. - :param index_pattern: The index or index pattern to roll up. Supports wildcard-style - patterns (`logstash-*`). The job attempts to rollup the entire index or index-pattern. - :param page_size: The number of bucket results that are processed on each iteration - of the rollup indexer. A larger value tends to execute faster, but requires - more memory during processing. This value has no effect on how the data is - rolled up; it is merely used for tweaking the speed or memory cost of the - indexer. - :param rollup_index: The index that contains the rollup results. The index can - be shared with other rollup jobs. The data is stored so that it doesn’t interfere - with unrelated jobs. - :param headers: - :param metrics: Defines the metrics to collect for each grouping tuple. By default, - only the doc_counts are collected for each group. To make rollup useful, - you will often add metrics like averages, mins, maxes, etc. Metrics are defined - on a per-field basis and for each field you configure which metric should - be collected. - :param timeout: Time to wait for the request to complete. - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - if cron is None: - raise ValueError("Empty value passed for parameter 'cron'") - if groups is None: - raise ValueError("Empty value passed for parameter 'groups'") - if index_pattern is None: - raise ValueError("Empty value passed for parameter 'index_pattern'") - if page_size is None: - raise ValueError("Empty value passed for parameter 'page_size'") - if rollup_index is None: - raise ValueError("Empty value passed for parameter 'rollup_index'") - __path = f"/_rollup/job/{_quote(id)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if cron is not None: - __body["cron"] = cron - if groups is not None: - __body["groups"] = groups - if index_pattern is not None: - __body["index_pattern"] = index_pattern - if page_size is not None: - __body["page_size"] = page_size - if rollup_index is not None: - __body["rollup_index"] = rollup_index - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if headers is not None: - __body["headers"] = headers - if human is not None: - __query["human"] = human - if metrics is not None: - __body["metrics"] = metrics - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __body["timeout"] = timeout - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - def rollup_search( - self, - *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - query: t.Optional[t.Mapping[str, t.Any]] = None, - rest_total_hits_as_int: t.Optional[bool] = None, - size: t.Optional[int] = None, - typed_keys: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Enables searching rolled-up data using the standard query DSL. - - ``_ - - :param index: The indices or index-pattern(s) (containing rollup or regular data) - that should be searched - :param aggregations: - :param aggs: - :param query: - :param rest_total_hits_as_int: Indicates whether hits.total should be rendered - as an integer or an object in the rest search response - :param size: Must be zero if set, as rollups work on pre-aggregated data - :param typed_keys: Specify whether aggregation and suggester names should be - prefixed by their respective types in the response - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/{_quote(index)}/_rollup_search" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if aggregations is not None: - __body["aggregations"] = aggregations - if aggs is not None: - __body["aggs"] = aggs - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if query is not None: - __body["query"] = query - if rest_total_hits_as_int is not None: - __query["rest_total_hits_as_int"] = rest_total_hits_as_int - if size is not None: - __body["size"] = size - if typed_keys is not None: - __query["typed_keys"] = typed_keys - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def start_job( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Starts an existing, stopped rollup job. - - ``_ - - :param id: The ID of the job to start - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_rollup/job/{_quote(id)}/_start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def stop_job( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Stops an existing, started rollup job. - - ``_ - - :param id: The ID of the job to stop - :param timeout: Block for (at maximum) the specified duration while waiting for - the job to stop. Defaults to 30s. - :param wait_for_completion: True if the API should block until the job has fully - stopped, false if should be executed async. Defaults to false. - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_rollup/job/{_quote(id)}/_stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/searchable_snapshots.py b/elasticsearch_serverless/_sync/client/searchable_snapshots.py deleted file mode 100644 index 17cc31e..0000000 --- a/elasticsearch_serverless/_sync/client/searchable_snapshots.py +++ /dev/null @@ -1,265 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class SearchableSnapshotsClient(NamespacedClient): - @_rewrite_parameters() - def cache_stats( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieve node-level cache statistics about searchable snapshots. - - ``_ - - :param node_id: A comma-separated list of node IDs or names to limit the returned - information; use `_local` to return information from the node you're connecting - to, leave empty to get information from all nodes - :param master_timeout: - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_searchable_snapshots/{_quote(node_id)}/cache/stats" - else: - __path = "/_searchable_snapshots/cache/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def clear_cache( - self, - *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - allow_no_indices: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - expand_wildcards: t.Optional[ - t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], - ], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Clear the cache of searchable snapshots. - - ``_ - - :param index: A comma-separated list of index names - :param allow_no_indices: Whether to ignore if a wildcard indices expression resolves - into no concrete indices. (This includes `_all` string or when no indices - have been specified) - :param expand_wildcards: Whether to expand wildcard expression to concrete indices - that are open, closed or both. - :param ignore_unavailable: Whether specified concrete indices should be ignored - when unavailable (missing or closed) - """ - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_searchable_snapshots/cache/clear" - else: - __path = "/_searchable_snapshots/cache/clear" - __query: t.Dict[str, t.Any] = {} - if allow_no_indices is not None: - __query["allow_no_indices"] = allow_no_indices - if error_trace is not None: - __query["error_trace"] = error_trace - if expand_wildcards is not None: - __query["expand_wildcards"] = expand_wildcards - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def mount( - self, - *, - repository: str, - snapshot: str, - index: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_index_settings: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - index_settings: t.Optional[t.Mapping[str, t.Any]] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - renamed_index: t.Optional[str] = None, - storage: t.Optional[str] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Mount a snapshot as a searchable index. - - ``_ - - :param repository: The name of the repository containing the snapshot of the - index to mount - :param snapshot: The name of the snapshot of the index to mount - :param index: - :param ignore_index_settings: - :param index_settings: - :param master_timeout: Explicit operation timeout for connection to master node - :param renamed_index: - :param storage: Selects the kind of local storage used to accelerate searches. - Experimental, and defaults to `full_copy` - :param wait_for_completion: Should this request wait until the operation has - completed before returning - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - if index is None: - raise ValueError("Empty value passed for parameter 'index'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_mount" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if index is not None: - __body["index"] = index - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_index_settings is not None: - __body["ignore_index_settings"] = ignore_index_settings - if index_settings is not None: - __body["index_settings"] = index_settings - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if renamed_index is not None: - __body["renamed_index"] = renamed_index - if storage is not None: - __query["storage"] = storage - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def stats( - self, - *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - level: t.Optional[ - t.Union["t.Literal['cluster', 'indices', 'shards']", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieve shard-level statistics about searchable snapshots. - - ``_ - - :param index: A comma-separated list of index names - :param level: Return stats aggregated at cluster, index or shard level - """ - if index not in SKIP_IN_PATH: - __path = f"/{_quote(index)}/_searchable_snapshots/stats" - else: - __path = "/_searchable_snapshots/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if level is not None: - __query["level"] = level - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/shutdown.py b/elasticsearch_serverless/_sync/client/shutdown.py deleted file mode 100644 index fef109e..0000000 --- a/elasticsearch_serverless/_sync/client/shutdown.py +++ /dev/null @@ -1,229 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class ShutdownClient(NamespacedClient): - @_rewrite_parameters() - def delete_node( - self, - *, - node_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes a node from the shutdown list. Designed for indirect use by ECE/ESS and - ECK. Direct use is not supported. - - ``_ - - :param node_id: The node id of node to be removed from the shutdown state - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - __path = f"/_nodes/{_quote(node_id)}/shutdown" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_node( - self, - *, - node_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieve status of a node or nodes that are currently marked as shutting down. - Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. - - ``_ - - :param node_id: Which node for which to retrieve the shutdown status - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id not in SKIP_IN_PATH: - __path = f"/_nodes/{_quote(node_id)}/shutdown" - else: - __path = "/_nodes/shutdown" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def put_node( - self, - *, - node_id: str, - reason: str, - type: t.Union["t.Literal['remove', 'replace', 'restart']", str], - allocation_delay: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - pretty: t.Optional[bool] = None, - target_node_name: t.Optional[str] = None, - timeout: t.Optional[ - t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. Direct - use is not supported. - - ``_ - - :param node_id: The node id of node to be shut down - :param reason: A human-readable reason that the node is being shut down. This - field provides information for other cluster operators; it does not affect - the shut down process. - :param type: Valid values are restart, remove, or replace. Use restart when you - need to temporarily shut down a node to perform an upgrade, make configuration - changes, or perform other maintenance. Because the node is expected to rejoin - the cluster, data is not migrated off of the node. Use remove when you need - to permanently remove a node from the cluster. The node is not marked ready - for shutdown until data is migrated off of the node Use replace to do a 1:1 - replacement of a node with another node. Certain allocation decisions will - be ignored (such as disk watermarks) in the interest of true replacement - of the source node with the target node. During a replace-type shutdown, - rollover and index creation may result in unassigned shards, and shrink may - fail until the replacement is complete. - :param allocation_delay: Only valid if type is restart. Controls how long Elasticsearch - will wait for the node to restart and join the cluster before reassigning - its shards to other nodes. This works the same as delaying allocation with - the index.unassigned.node_left.delayed_timeout setting. If you specify both - a restart allocation delay and an index-level allocation delay, the longer - of the two is used. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param target_node_name: Only valid if type is replace. Specifies the name of - the node that is replacing the node being shut down. Shards from the shut - down node are only allowed to be allocated to the target node, and no other - data will be allocated to the target node. During relocation of data certain - allocation rules are ignored, such as disk watermarks or user attribute filtering - rules. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if node_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'node_id'") - if reason is None: - raise ValueError("Empty value passed for parameter 'reason'") - if type is None: - raise ValueError("Empty value passed for parameter 'type'") - __path = f"/_nodes/{_quote(node_id)}/shutdown" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if reason is not None: - __body["reason"] = reason - if type is not None: - __body["type"] = type - if allocation_delay is not None: - __body["allocation_delay"] = allocation_delay - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if target_node_name is not None: - __body["target_node_name"] = target_node_name - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_sync/client/slm.py b/elasticsearch_serverless/_sync/client/slm.py deleted file mode 100644 index 671b1c7..0000000 --- a/elasticsearch_serverless/_sync/client/slm.py +++ /dev/null @@ -1,377 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class SlmClient(NamespacedClient): - @_rewrite_parameters() - def delete_lifecycle( - self, - *, - policy_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes an existing snapshot lifecycle policy. - - ``_ - - :param policy_id: The id of the snapshot lifecycle policy to remove - """ - if policy_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'policy_id'") - __path = f"/_slm/policy/{_quote(policy_id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def execute_lifecycle( - self, - *, - policy_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Immediately creates a snapshot according to the lifecycle policy, without waiting - for the scheduled time. - - ``_ - - :param policy_id: The id of the snapshot lifecycle policy to be executed - """ - if policy_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'policy_id'") - __path = f"/_slm/policy/{_quote(policy_id)}/_execute" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def execute_retention( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes any snapshots that are expired according to the policy's retention rules. - - ``_ - """ - __path = "/_slm/_execute_retention" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_lifecycle( - self, - *, - policy_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves one or more snapshot lifecycle policy definitions and information about - the latest snapshot attempts. - - ``_ - - :param policy_id: Comma-separated list of snapshot lifecycle policies to retrieve - """ - if policy_id not in SKIP_IN_PATH: - __path = f"/_slm/policy/{_quote(policy_id)}" - else: - __path = "/_slm/policy" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_stats( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns global and policy-level statistics about actions taken by snapshot lifecycle - management. - - ``_ - """ - __path = "/_slm/stats" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_status( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the status of snapshot lifecycle management (SLM). - - ``_ - """ - __path = "/_slm/status" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def put_lifecycle( - self, - *, - policy_id: str, - config: t.Optional[t.Mapping[str, t.Any]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - name: t.Optional[str] = None, - pretty: t.Optional[bool] = None, - repository: t.Optional[str] = None, - retention: t.Optional[t.Mapping[str, t.Any]] = None, - schedule: t.Optional[str] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates or updates a snapshot lifecycle policy. - - ``_ - - :param policy_id: ID for the snapshot lifecycle policy you want to create or - update. - :param config: Configuration for each snapshot created by the policy. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param name: Name automatically assigned to each snapshot created by the policy. - Date math is supported. To prevent conflicting snapshot names, a UUID is - automatically appended to each snapshot name. - :param repository: Repository used to store snapshots created by this policy. - This repository must exist prior to the policy’s creation. You can create - a repository using the snapshot repository API. - :param retention: Retention rules used to retain and delete snapshots created - by the policy. - :param schedule: Periodic or absolute schedule at which the policy creates snapshots. - SLM applies schedule changes immediately. - :param timeout: Period to wait for a response. If no response is received before - the timeout expires, the request fails and returns an error. - """ - if policy_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'policy_id'") - __path = f"/_slm/policy/{_quote(policy_id)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if config is not None: - __body["config"] = config - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if name is not None: - __body["name"] = name - if pretty is not None: - __query["pretty"] = pretty - if repository is not None: - __body["repository"] = repository - if retention is not None: - __body["retention"] = retention - if schedule is not None: - __body["schedule"] = schedule - if timeout is not None: - __query["timeout"] = timeout - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def start( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Turns on snapshot lifecycle management (SLM). - - ``_ - """ - __path = "/_slm/start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def stop( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Turns off snapshot lifecycle management (SLM). - - ``_ - """ - __path = "/_slm/stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/snapshot.py b/elasticsearch_serverless/_sync/client/snapshot.py deleted file mode 100644 index 20cba3f..0000000 --- a/elasticsearch_serverless/_sync/client/snapshot.py +++ /dev/null @@ -1,773 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class SnapshotClient(NamespacedClient): - @_rewrite_parameters() - def cleanup_repository( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes stale data from repository. - - ``_ - - :param name: Snapshot repository to clean up. - :param master_timeout: Period to wait for a connection to the master node. - :param timeout: Period to wait for a response. - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_snapshot/{_quote(name)}/_cleanup" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def clone( - self, - *, - repository: str, - snapshot: str, - target_snapshot: str, - indices: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Clones indices from one snapshot into another snapshot in the same repository. - - ``_ - - :param repository: A repository name - :param snapshot: The name of the snapshot to clone from - :param target_snapshot: The name of the cloned snapshot to create - :param indices: - :param master_timeout: Explicit operation timeout for connection to master node - :param timeout: - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - if target_snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'target_snapshot'") - if indices is None: - raise ValueError("Empty value passed for parameter 'indices'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_clone/{_quote(target_snapshot)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if indices is not None: - __body["indices"] = indices - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - def create( - self, - *, - repository: str, - snapshot: str, - error_trace: t.Optional[bool] = None, - feature_states: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - include_global_state: t.Optional[bool] = None, - indices: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - metadata: t.Optional[t.Mapping[str, t.Any]] = None, - partial: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a snapshot in a repository. - - ``_ - - :param repository: Repository for the snapshot. - :param snapshot: Name of the snapshot. Must be unique in the repository. - :param feature_states: Feature states to include in the snapshot. Each feature - state includes one or more system indices containing related data. You can - view a list of eligible features using the get features API. If `include_global_state` - is `true`, all current feature states are included by default. If `include_global_state` - is `false`, no feature states are included by default. - :param ignore_unavailable: If `true`, the request ignores data streams and indices - in `indices` that are missing or closed. If `false`, the request returns - an error for any data stream or index that is missing or closed. - :param include_global_state: If `true`, the current cluster state is included - in the snapshot. The cluster state includes persistent cluster settings, - composable index templates, legacy index templates, ingest pipelines, and - ILM policies. It also includes data stored in system indices, such as Watches - and task records (configurable via `feature_states`). - :param indices: Data streams and indices to include in the snapshot. Supports - multi-target syntax. Includes all data streams and indices by default. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param metadata: Optional metadata for the snapshot. May have any contents. Must - be less than 1024 bytes. This map is not automatically generated by Elasticsearch. - :param partial: If `true`, allows restoring a partial snapshot of indices with - unavailable shards. Only shards that were successfully included in the snapshot - will be restored. All missing shards will be recreated as empty. If `false`, - the entire restore operation will fail if one or more indices included in - the snapshot do not have all primary shards available. - :param wait_for_completion: If `true`, the request returns a response when the - snapshot is complete. If `false`, the request returns a response when the - snapshot initializes. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if feature_states is not None: - __body["feature_states"] = feature_states - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __body["ignore_unavailable"] = ignore_unavailable - if include_global_state is not None: - __body["include_global_state"] = include_global_state - if indices is not None: - __body["indices"] = indices - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if metadata is not None: - __body["metadata"] = metadata - if partial is not None: - __body["partial"] = partial - if pretty is not None: - __query["pretty"] = pretty - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - ) - def create_repository( - self, - *, - name: str, - settings: t.Mapping[str, t.Any], - type: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - repository: t.Optional[t.Mapping[str, t.Any]] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - verify: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a repository. - - ``_ - - :param name: A repository name - :param settings: - :param type: - :param master_timeout: Explicit operation timeout for connection to master node - :param repository: - :param timeout: Explicit operation timeout - :param verify: Whether to verify the repository after creation - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - if settings is None: - raise ValueError("Empty value passed for parameter 'settings'") - if type is None: - raise ValueError("Empty value passed for parameter 'type'") - __path = f"/_snapshot/{_quote(name)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if settings is not None: - __body["settings"] = settings - if type is not None: - __body["type"] = type - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if repository is not None: - __body["repository"] = repository - if timeout is not None: - __query["timeout"] = timeout - if verify is not None: - __query["verify"] = verify - __headers = {"accept": "application/json", "content-type": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def delete( - self, - *, - repository: str, - snapshot: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes one or more snapshots. - - ``_ - - :param repository: A repository name - :param snapshot: A comma-separated list of snapshot names - :param master_timeout: Explicit operation timeout for connection to master node - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def delete_repository( - self, - *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deletes a repository. - - ``_ - - :param name: Name of the snapshot repository to unregister. Wildcard (`*`) patterns - are supported. - :param master_timeout: Explicit operation timeout for connection to master node - :param timeout: Explicit operation timeout - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_snapshot/{_quote(name)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get( - self, - *, - repository: str, - snapshot: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - after: t.Optional[str] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - from_sort_value: t.Optional[str] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - include_repository: t.Optional[bool] = None, - index_details: t.Optional[bool] = None, - index_names: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - offset: t.Optional[int] = None, - order: t.Optional[t.Union["t.Literal['asc', 'desc']", str]] = None, - pretty: t.Optional[bool] = None, - size: t.Optional[int] = None, - slm_policy_filter: t.Optional[str] = None, - sort: t.Optional[ - t.Union[ - "t.Literal['duration', 'failed_shard_count', 'index_count', 'name', 'repository', 'shard_count', 'start_time']", - str, - ] - ] = None, - verbose: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about a snapshot. - - ``_ - - :param repository: Comma-separated list of snapshot repository names used to - limit the request. Wildcard (*) expressions are supported. - :param snapshot: Comma-separated list of snapshot names to retrieve. Also accepts - wildcards (*). - To get information about all snapshots in a registered repository, - use a wildcard (*) or _all. - To get information about any snapshots that - are currently running, use _current. - :param after: Offset identifier to start pagination from as returned by the next - field in the response body. - :param from_sort_value: Value of the current sort column at which to start retrieval. - Can either be a string snapshot- or repository name when sorting by snapshot - or repository name, a millisecond time value or a number when sorting by - index- or shard count. - :param ignore_unavailable: If false, the request returns an error for any snapshots - that are unavailable. - :param include_repository: If true, returns the repository name in each snapshot. - :param index_details: If true, returns additional information about each index - in the snapshot comprising the number of shards in the index, the total size - of the index in bytes, and the maximum number of segments per shard in the - index. Defaults to false, meaning that this information is omitted. - :param index_names: If true, returns the name of each index in each snapshot. - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - :param offset: Numeric offset to start pagination from based on the snapshots - matching this request. Using a non-zero value for this parameter is mutually - exclusive with using the after parameter. Defaults to 0. - :param order: Sort order. Valid values are asc for ascending and desc for descending - order. Defaults to asc, meaning ascending order. - :param size: Maximum number of snapshots to return. Defaults to 0 which means - return all that match the request without limit. - :param slm_policy_filter: Filter snapshots by a comma-separated list of SLM policy - names that snapshots belong to. Also accepts wildcards (*) and combinations - of wildcards followed by exclude patterns starting with -. To include snapshots - not created by an SLM policy you can use the special pattern _none that will - match all snapshots without an SLM policy. - :param sort: Allows setting a sort order for the result. Defaults to start_time, - i.e. sorting by snapshot start time stamp. - :param verbose: If true, returns additional information about each snapshot such - as the version of Elasticsearch which took the snapshot, the start and end - times of the snapshot, and the number of shards snapshotted. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}" - __query: t.Dict[str, t.Any] = {} - if after is not None: - __query["after"] = after - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if from_sort_value is not None: - __query["from_sort_value"] = from_sort_value - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if include_repository is not None: - __query["include_repository"] = include_repository - if index_details is not None: - __query["index_details"] = index_details - if index_names is not None: - __query["index_names"] = index_names - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if offset is not None: - __query["offset"] = offset - if order is not None: - __query["order"] = order - if pretty is not None: - __query["pretty"] = pretty - if size is not None: - __query["size"] = size - if slm_policy_filter is not None: - __query["slm_policy_filter"] = slm_policy_filter - if sort is not None: - __query["sort"] = sort - if verbose is not None: - __query["verbose"] = verbose - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def get_repository( - self, - *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - local: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about a repository. - - ``_ - - :param name: A comma-separated list of repository names - :param local: Return local information, do not retrieve the state from master - node (default: false) - :param master_timeout: Explicit operation timeout for connection to master node - """ - if name not in SKIP_IN_PATH: - __path = f"/_snapshot/{_quote(name)}" - else: - __path = "/_snapshot" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if local is not None: - __query["local"] = local - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def restore( - self, - *, - repository: str, - snapshot: str, - error_trace: t.Optional[bool] = None, - feature_states: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_index_settings: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, - ignore_unavailable: t.Optional[bool] = None, - include_aliases: t.Optional[bool] = None, - include_global_state: t.Optional[bool] = None, - index_settings: t.Optional[t.Mapping[str, t.Any]] = None, - indices: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - partial: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - rename_pattern: t.Optional[str] = None, - rename_replacement: t.Optional[str] = None, - wait_for_completion: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Restores a snapshot. - - ``_ - - :param repository: A repository name - :param snapshot: A snapshot name - :param feature_states: - :param ignore_index_settings: - :param ignore_unavailable: - :param include_aliases: - :param include_global_state: - :param index_settings: - :param indices: - :param master_timeout: Explicit operation timeout for connection to master node - :param partial: - :param rename_pattern: - :param rename_replacement: - :param wait_for_completion: Should this request wait until the operation has - completed before returning - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'repository'") - if snapshot in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'snapshot'") - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_restore" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if feature_states is not None: - __body["feature_states"] = feature_states - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_index_settings is not None: - __body["ignore_index_settings"] = ignore_index_settings - if ignore_unavailable is not None: - __body["ignore_unavailable"] = ignore_unavailable - if include_aliases is not None: - __body["include_aliases"] = include_aliases - if include_global_state is not None: - __body["include_global_state"] = include_global_state - if index_settings is not None: - __body["index_settings"] = index_settings - if indices is not None: - __body["indices"] = indices - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if partial is not None: - __body["partial"] = partial - if pretty is not None: - __query["pretty"] = pretty - if rename_pattern is not None: - __body["rename_pattern"] = rename_pattern - if rename_replacement is not None: - __body["rename_replacement"] = rename_replacement - if wait_for_completion is not None: - __query["wait_for_completion"] = wait_for_completion - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def status( - self, - *, - repository: t.Optional[str] = None, - snapshot: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_unavailable: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Returns information about the status of a snapshot. - - ``_ - - :param repository: A repository name - :param snapshot: A comma-separated list of snapshot names - :param ignore_unavailable: Whether to ignore unavailable snapshots, defaults - to false which means a SnapshotMissingException is thrown - :param master_timeout: Explicit operation timeout for connection to master node - """ - if repository not in SKIP_IN_PATH and snapshot not in SKIP_IN_PATH: - __path = f"/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_status" - elif repository not in SKIP_IN_PATH: - __path = f"/_snapshot/{_quote(repository)}/_status" - else: - __path = "/_snapshot/_status" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_unavailable is not None: - __query["ignore_unavailable"] = ignore_unavailable - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def verify_repository( - self, - *, - name: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Verifies a repository. - - ``_ - - :param name: A repository name - :param master_timeout: Explicit operation timeout for connection to master node - :param timeout: Explicit operation timeout - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'name'") - __path = f"/_snapshot/{_quote(name)}/_verify" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - if timeout is not None: - __query["timeout"] = timeout - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/ssl.py b/elasticsearch_serverless/_sync/client/ssl.py deleted file mode 100644 index 99c1926..0000000 --- a/elasticsearch_serverless/_sync/client/ssl.py +++ /dev/null @@ -1,57 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class SslClient(NamespacedClient): - @_rewrite_parameters() - def certificates( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about the X.509 certificates used to encrypt communications - in the cluster. - - ``_ - """ - __path = "/_ssl/certificates" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/text_structure.py b/elasticsearch_serverless/_sync/client/text_structure.py deleted file mode 100644 index 5172c25..0000000 --- a/elasticsearch_serverless/_sync/client/text_structure.py +++ /dev/null @@ -1,158 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class TextStructureClient(NamespacedClient): - @_rewrite_parameters( - body_name="text_files", - ) - def find_structure( - self, - *, - text_files: t.Union[t.List[t.Any], t.Tuple[t.Any, ...]], - charset: t.Optional[str] = None, - column_names: t.Optional[str] = None, - delimiter: t.Optional[str] = None, - explain: t.Optional[bool] = None, - format: t.Optional[str] = None, - grok_pattern: t.Optional[str] = None, - has_header_row: t.Optional[bool] = None, - line_merge_size_limit: t.Optional[int] = None, - lines_to_sample: t.Optional[int] = None, - quote: t.Optional[str] = None, - should_trim_fields: t.Optional[bool] = None, - timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - timestamp_field: t.Optional[str] = None, - timestamp_format: t.Optional[str] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Finds the structure of a text file. The text file must contain data that is suitable - to be ingested into Elasticsearch. - - ``_ - - :param text_files: - :param charset: The text’s character set. It must be a character set that is - supported by the JVM that Elasticsearch uses. For example, UTF-8, UTF-16LE, - windows-1252, or EUC-JP. If this parameter is not specified, the structure - finder chooses an appropriate character set. - :param column_names: If you have set format to delimited, you can specify the - column names in a comma-separated list. If this parameter is not specified, - the structure finder uses the column names from the header row of the text. - If the text does not have a header role, columns are named "column1", "column2", - "column3", etc. - :param delimiter: If you have set format to delimited, you can specify the character - used to delimit the values in each row. Only a single character is supported; - the delimiter cannot have multiple characters. By default, the API considers - the following possibilities: comma, tab, semi-colon, and pipe (|). In this - default scenario, all rows must have the same number of fields for the delimited - format to be detected. If you specify a delimiter, up to 10% of the rows - can have a different number of columns than the first row. - :param explain: If this parameter is set to true, the response includes a field - named explanation, which is an array of strings that indicate how the structure - finder produced its result. - :param format: The high level structure of the text. Valid values are ndjson, - xml, delimited, and semi_structured_text. By default, the API chooses the - format. In this default scenario, all rows must have the same number of fields - for a delimited format to be detected. If the format is set to delimited - and the delimiter is not set, however, the API tolerates up to 5% of rows - that have a different number of columns than the first row. - :param grok_pattern: If you have set format to semi_structured_text, you can - specify a Grok pattern that is used to extract fields from every message - in the text. The name of the timestamp field in the Grok pattern must match - what is specified in the timestamp_field parameter. If that parameter is - not specified, the name of the timestamp field in the Grok pattern must match - "timestamp". If grok_pattern is not specified, the structure finder creates - a Grok pattern. - :param has_header_row: If you have set format to delimited, you can use this - parameter to indicate whether the column names are in the first row of the - text. If this parameter is not specified, the structure finder guesses based - on the similarity of the first row of the text to other rows. - :param line_merge_size_limit: The maximum number of characters in a message when - lines are merged to form messages while analyzing semi-structured text. If - you have extremely long messages you may need to increase this, but be aware - that this may lead to very long processing times if the way to group lines - into messages is misdetected. - :param lines_to_sample: The number of lines to include in the structural analysis, - starting from the beginning of the text. The minimum is 2; If the value of - this parameter is greater than the number of lines in the text, the analysis - proceeds (as long as there are at least two lines in the text) for all of - the lines. - :param quote: If you have set format to delimited, you can specify the character - used to quote the values in each row if they contain newlines or the delimiter - character. Only a single character is supported. If this parameter is not - specified, the default value is a double quote ("). If your delimited text - format does not use quoting, a workaround is to set this argument to a character - that does not appear anywhere in the sample. - :param should_trim_fields: If you have set format to delimited, you can specify - whether values between delimiters should have whitespace trimmed from them. - If this parameter is not specified and the delimiter is pipe (|), the default - value is true. Otherwise, the default value is false. - :param timeout: Sets the maximum amount of time that the structure analysis make - take. If the analysis is still running when the timeout expires then it will - be aborted. - :param timestamp_field: Optional parameter to specify the timestamp field in - the file - :param timestamp_format: The Java time format of the timestamp field in the text. - """ - if text_files is None: - raise ValueError("Empty value passed for parameter 'text_files'") - __path = "/_text_structure/find_structure" - __query: t.Dict[str, t.Any] = {} - if charset is not None: - __query["charset"] = charset - if column_names is not None: - __query["column_names"] = column_names - if delimiter is not None: - __query["delimiter"] = delimiter - if explain is not None: - __query["explain"] = explain - if format is not None: - __query["format"] = format - if grok_pattern is not None: - __query["grok_pattern"] = grok_pattern - if has_header_row is not None: - __query["has_header_row"] = has_header_row - if line_merge_size_limit is not None: - __query["line_merge_size_limit"] = line_merge_size_limit - if lines_to_sample is not None: - __query["lines_to_sample"] = lines_to_sample - if quote is not None: - __query["quote"] = quote - if should_trim_fields is not None: - __query["should_trim_fields"] = should_trim_fields - if timeout is not None: - __query["timeout"] = timeout - if timestamp_field is not None: - __query["timestamp_field"] = timestamp_field - if timestamp_format is not None: - __query["timestamp_format"] = timestamp_format - __body = text_files - __headers = { - "accept": "application/json", - "content-type": "application/x-ndjson", - } - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) diff --git a/elasticsearch_serverless/_sync/client/watcher.py b/elasticsearch_serverless/_sync/client/watcher.py deleted file mode 100644 index 2e1d866..0000000 --- a/elasticsearch_serverless/_sync/client/watcher.py +++ /dev/null @@ -1,607 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters - - -class WatcherClient(NamespacedClient): - @_rewrite_parameters() - def ack_watch( - self, - *, - watch_id: str, - action_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Acknowledges a watch, manually throttling the execution of the watch's actions. - - ``_ - - :param watch_id: Watch ID - :param action_id: A comma-separated list of the action ids to be acked - """ - if watch_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'watch_id'") - if watch_id not in SKIP_IN_PATH and action_id not in SKIP_IN_PATH: - __path = f"/_watcher/watch/{_quote(watch_id)}/_ack/{_quote(action_id)}" - elif watch_id not in SKIP_IN_PATH: - __path = f"/_watcher/watch/{_quote(watch_id)}/_ack" - else: - raise ValueError("Couldn't find a path for the given parameters") - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def activate_watch( - self, - *, - watch_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Activates a currently inactive watch. - - ``_ - - :param watch_id: Watch ID - """ - if watch_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'watch_id'") - __path = f"/_watcher/watch/{_quote(watch_id)}/_activate" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def deactivate_watch( - self, - *, - watch_id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Deactivates a currently active watch. - - ``_ - - :param watch_id: Watch ID - """ - if watch_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'watch_id'") - __path = f"/_watcher/watch/{_quote(watch_id)}/_deactivate" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def delete_watch( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Removes a watch from Watcher. - - ``_ - - :param id: Watch ID - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_watcher/watch/{_quote(id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "DELETE", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def execute_watch( - self, - *, - id: t.Optional[str] = None, - action_modes: t.Optional[ - t.Mapping[ - str, - t.Union[ - "t.Literal['execute', 'force_execute', 'force_simulate', 'simulate', 'skip']", - str, - ], - ] - ] = None, - alternative_input: t.Optional[t.Mapping[str, t.Any]] = None, - debug: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - ignore_condition: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - record_execution: t.Optional[bool] = None, - simulated_actions: t.Optional[t.Mapping[str, t.Any]] = None, - trigger_data: t.Optional[t.Mapping[str, t.Any]] = None, - watch: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Forces the execution of a stored watch. - - ``_ - - :param id: Identifier for the watch. - :param action_modes: Determines how to handle the watch actions as part of the - watch execution. - :param alternative_input: When present, the watch uses this object as a payload - instead of executing its own input. - :param debug: Defines whether the watch runs in debug mode. - :param ignore_condition: When set to `true`, the watch execution uses the always - condition. This can also be specified as an HTTP parameter. - :param record_execution: When set to `true`, the watch record representing the - watch execution result is persisted to the `.watcher-history` index for the - current time. In addition, the status of the watch is updated, possibly throttling - subsequent executions. This can also be specified as an HTTP parameter. - :param simulated_actions: - :param trigger_data: This structure is parsed as the data of the trigger event - that will be used during the watch execution - :param watch: When present, this watch is used instead of the one specified in - the request. This watch is not persisted to the index and record_execution - cannot be set. - """ - if id not in SKIP_IN_PATH: - __path = f"/_watcher/watch/{_quote(id)}/_execute" - else: - __path = "/_watcher/watch/_execute" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if action_modes is not None: - __body["action_modes"] = action_modes - if alternative_input is not None: - __body["alternative_input"] = alternative_input - if debug is not None: - __query["debug"] = debug - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if ignore_condition is not None: - __body["ignore_condition"] = ignore_condition - if pretty is not None: - __query["pretty"] = pretty - if record_execution is not None: - __body["record_execution"] = record_execution - if simulated_actions is not None: - __body["simulated_actions"] = simulated_actions - if trigger_data is not None: - __body["trigger_data"] = trigger_data - if watch is not None: - __body["watch"] = watch - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def get_watch( - self, - *, - id: str, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves a watch by its ID. - - ``_ - - :param id: Watch ID - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_watcher/watch/{_quote(id)}" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters( - body_fields=True, - ) - def put_watch( - self, - *, - id: str, - actions: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - active: t.Optional[bool] = None, - condition: t.Optional[t.Mapping[str, t.Any]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - if_primary_term: t.Optional[int] = None, - if_seq_no: t.Optional[int] = None, - input: t.Optional[t.Mapping[str, t.Any]] = None, - metadata: t.Optional[t.Mapping[str, t.Any]] = None, - pretty: t.Optional[bool] = None, - throttle_period: t.Optional[str] = None, - transform: t.Optional[t.Mapping[str, t.Any]] = None, - trigger: t.Optional[t.Mapping[str, t.Any]] = None, - version: t.Optional[int] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Creates a new watch, or updates an existing one. - - ``_ - - :param id: Watch ID - :param actions: - :param active: Specify whether the watch is in/active by default - :param condition: - :param if_primary_term: only update the watch if the last operation that has - changed the watch has the specified primary term - :param if_seq_no: only update the watch if the last operation that has changed - the watch has the specified sequence number - :param input: - :param metadata: - :param throttle_period: - :param transform: - :param trigger: - :param version: Explicit version number for concurrency control - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") - __path = f"/_watcher/watch/{_quote(id)}" - __body: t.Dict[str, t.Any] = {} - __query: t.Dict[str, t.Any] = {} - if actions is not None: - __body["actions"] = actions - if active is not None: - __query["active"] = active - if condition is not None: - __body["condition"] = condition - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if if_primary_term is not None: - __query["if_primary_term"] = if_primary_term - if if_seq_no is not None: - __query["if_seq_no"] = if_seq_no - if input is not None: - __body["input"] = input - if metadata is not None: - __body["metadata"] = metadata - if pretty is not None: - __query["pretty"] = pretty - if throttle_period is not None: - __body["throttle_period"] = throttle_period - if transform is not None: - __body["transform"] = transform - if trigger is not None: - __body["trigger"] = trigger - if version is not None: - __query["version"] = version - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "PUT", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters( - body_fields=True, - parameter_aliases={"from": "from_"}, - ) - def query_watches( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - from_: t.Optional[int] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - query: t.Optional[t.Mapping[str, t.Any]] = None, - search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] - ] = None, - size: t.Optional[int] = None, - sort: t.Optional[ - t.Union[ - t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], - ] - ] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves stored watches. - - ``_ - - :param from_: The offset from the first result to fetch. Needs to be non-negative. - :param query: Optional, query filter watches to be returned. - :param search_after: Optional search After to do pagination using last hit’s - sort values. - :param size: The number of hits to return. Needs to be non-negative. - :param sort: Optional sort definition. - """ - __path = "/_watcher/_query/watches" - __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = {} - # The 'sort' parameter with a colon can't be encoded to the body. - if sort is not None and ( - (isinstance(sort, str) and ":" in sort) - or ( - isinstance(sort, (list, tuple)) - and all(isinstance(_x, str) for _x in sort) - and any(":" in _x for _x in sort) - ) - ): - __query["sort"] = sort - sort = None - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if from_ is not None: - __body["from"] = from_ - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - if query is not None: - __body["query"] = query - if search_after is not None: - __body["search_after"] = search_after - if size is not None: - __body["size"] = size - if sort is not None: - __body["sort"] = sort - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body - ) - - @_rewrite_parameters() - def start( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Starts Watcher if it is not already running. - - ``_ - """ - __path = "/_watcher/_start" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def stats( - self, - *, - metric: t.Optional[ - t.Union[ - t.Union[ - "t.Literal['_all', 'current_watches', 'pending_watches', 'queued_watches']", - str, - ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['_all', 'current_watches', 'pending_watches', 'queued_watches']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['_all', 'current_watches', 'pending_watches', 'queued_watches']", - str, - ], - ..., - ], - ], - ] - ] = None, - emit_stacktraces: t.Optional[bool] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves the current Watcher metrics. - - ``_ - - :param metric: Defines which additional metrics are included in the response. - :param emit_stacktraces: Defines whether stack traces are generated for each - watch that is running. - """ - if metric not in SKIP_IN_PATH: - __path = f"/_watcher/stats/{_quote(metric)}" - else: - __path = "/_watcher/stats" - __query: t.Dict[str, t.Any] = {} - if emit_stacktraces is not None: - __query["emit_stacktraces"] = emit_stacktraces - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def stop( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Stops Watcher if it is running. - - ``_ - """ - __path = "/_watcher/_stop" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/_sync/client/xpack.py b/elasticsearch_serverless/_sync/client/xpack.py deleted file mode 100644 index b1fbdd5..0000000 --- a/elasticsearch_serverless/_sync/client/xpack.py +++ /dev/null @@ -1,111 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import typing as t - -from elastic_transport import ObjectApiResponse - -from ._base import NamespacedClient -from .utils import _rewrite_parameters - - -class XPackClient(NamespacedClient): - def __getattr__(self, attr_name: str) -> t.Any: - return getattr(self.client, attr_name) - - # AUTO-GENERATED-API-DEFINITIONS # - - @_rewrite_parameters() - def info( - self, - *, - accept_enterprise: t.Optional[bool] = None, - categories: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves information about the installed X-Pack features. - - ``_ - - :param accept_enterprise: If this param is used it must be set to true - :param categories: A comma-separated list of the information categories to include - in the response. For example, `build,license,features`. - """ - __path = "/_xpack" - __query: t.Dict[str, t.Any] = {} - if accept_enterprise is not None: - __query["accept_enterprise"] = accept_enterprise - if categories is not None: - __query["categories"] = categories - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) - - @_rewrite_parameters() - def usage( - self, - *, - error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - human: t.Optional[bool] = None, - master_timeout: t.Optional[ - t.Union["t.Literal[-1]", "t.Literal[0]", str] - ] = None, - pretty: t.Optional[bool] = None, - ) -> ObjectApiResponse[t.Any]: - """ - Retrieves usage information about the installed X-Pack features. - - ``_ - - :param master_timeout: Period to wait for a connection to the master node. If - no response is received before the timeout expires, the request fails and - returns an error. - """ - __path = "/_xpack/usage" - __query: t.Dict[str, t.Any] = {} - if error_trace is not None: - __query["error_trace"] = error_trace - if filter_path is not None: - __query["filter_path"] = filter_path - if human is not None: - __query["human"] = human - if master_timeout is not None: - __query["master_timeout"] = master_timeout - if pretty is not None: - __query["pretty"] = pretty - __headers = {"accept": "application/json"} - return self.perform_request( # type: ignore[return-value] - "GET", __path, params=__query, headers=__headers - ) diff --git a/elasticsearch_serverless/client.py b/elasticsearch_serverless/client.py index ae31a2f..802268b 100644 --- a/elasticsearch_serverless/client.py +++ b/elasticsearch_serverless/client.py @@ -21,50 +21,24 @@ from ._sync.client.async_search import ( # noqa: F401 AsyncSearchClient as AsyncSearchClient, ) -from ._sync.client.autoscaling import ( # noqa: F401 - AutoscalingClient as AutoscalingClient, -) from ._sync.client.cat import CatClient as CatClient # noqa: F401 -from ._sync.client.ccr import CcrClient as CcrClient # noqa: F401 from ._sync.client.cluster import ClusterClient as ClusterClient # noqa: F401 -from ._sync.client.dangling_indices import ( # noqa: F401 - DanglingIndicesClient as DanglingIndicesClient, -) from ._sync.client.enrich import EnrichClient as EnrichClient # noqa: F401 from ._sync.client.eql import EqlClient as EqlClient # noqa: F401 -from ._sync.client.features import FeaturesClient as FeaturesClient # noqa: F401 -from ._sync.client.fleet import FleetClient as FleetClient # noqa: F401 from ._sync.client.graph import GraphClient as GraphClient # noqa: F401 -from ._sync.client.ilm import IlmClient as IlmClient # noqa: F401 from ._sync.client.indices import IndicesClient as IndicesClient # noqa: F401 from ._sync.client.ingest import IngestClient as IngestClient # noqa: F401 from ._sync.client.license import LicenseClient as LicenseClient # noqa: F401 from ._sync.client.logstash import LogstashClient as LogstashClient # noqa: F401 -from ._sync.client.migration import MigrationClient as MigrationClient # noqa: F401 from ._sync.client.ml import MlClient as MlClient # noqa: F401 -from ._sync.client.monitoring import MonitoringClient as MonitoringClient # noqa: F401 -from ._sync.client.nodes import NodesClient as NodesClient # noqa: F401 from ._sync.client.query_ruleset import ( # noqa: F401 QueryRulesetClient as QueryRulesetClient, ) -from ._sync.client.rollup import RollupClient as RollupClient # noqa: F401 -from ._sync.client.searchable_snapshots import ( # noqa: F401 - SearchableSnapshotsClient as SearchableSnapshotsClient, -) from ._sync.client.security import SecurityClient as SecurityClient # noqa: F401 -from ._sync.client.shutdown import ShutdownClient as ShutdownClient # noqa: F401 -from ._sync.client.slm import SlmClient as SlmClient # noqa: F401 -from ._sync.client.snapshot import SnapshotClient as SnapshotClient # noqa: F401 from ._sync.client.sql import SqlClient as SqlClient # noqa: F401 -from ._sync.client.ssl import SslClient as SslClient # noqa: F401 from ._sync.client.synonyms import SynonymsClient as SynonymsClient # noqa: F401 from ._sync.client.tasks import TasksClient as TasksClient # noqa: F401 -from ._sync.client.text_structure import ( # noqa: F401 - TextStructureClient as TextStructureClient, -) from ._sync.client.transform import TransformClient as TransformClient # noqa: F401 -from ._sync.client.watcher import WatcherClient as WatcherClient # noqa: F401 -from ._sync.client.xpack import XPackClient as XPackClient # noqa: F401 from ._utils import fixup_module_metadata # This file exists for backwards compatibility. @@ -77,39 +51,21 @@ __all__ = [ "AsyncSearchClient", - "AutoscalingClient", "CatClient", - "CcrClient", "ClusterClient", - "DanglingIndicesClient", "Elasticsearch", "EnrichClient", "EqlClient", - "FeaturesClient", - "FleetClient", "GraphClient", - "IlmClient", "IndicesClient", "IngestClient", "LicenseClient", "LogstashClient", - "MigrationClient", "MlClient", - "MonitoringClient", - "NodesClient", - "RollupClient", - "SearchableSnapshotsClient", "SecurityClient", - "ShutdownClient", - "SlmClient", - "SnapshotClient", "SqlClient", - "SslClient", "TasksClient", - "TextStructureClient", "TransformClient", - "WatcherClient", - "XPackClient", ] fixup_module_metadata(__name__, globals()) From fd8d44f2ea8ab319172328313bc643f8156fa936 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 8 Apr 2024 14:24:03 +0400 Subject: [PATCH 2/2] Update generated code --- docs/sphinx/api.rst | 6 + .../_async/client/__init__.py | 563 ++++---------- .../_async/client/async_search.py | 85 +-- elasticsearch_serverless/_async/client/cat.py | 309 +++----- .../_async/client/cluster.py | 45 +- .../_async/client/enrich.py | 23 +- elasticsearch_serverless/_async/client/eql.py | 53 +- .../_async/client/graph.py | 11 +- .../_async/client/indices.py | 706 +++++------------- .../_async/client/inference.py | 238 ++++++ .../_async/client/ingest.py | 33 +- .../_async/client/license.py | 5 +- .../_async/client/logstash.py | 17 +- elasticsearch_serverless/_async/client/ml.py | 341 +++------ .../_async/client/query_ruleset.py | 21 +- .../_async/client/search_application.py | 37 +- .../_async/client/security.py | 126 ++-- elasticsearch_serverless/_async/client/sql.py | 25 +- .../_async/client/synonyms.py | 35 +- .../_async/client/tasks.py | 5 +- .../_async/client/transform.py | 53 +- .../_sync/client/__init__.py | 563 ++++---------- .../_sync/client/async_search.py | 85 +-- elasticsearch_serverless/_sync/client/cat.py | 309 +++----- .../_sync/client/cluster.py | 45 +- .../_sync/client/enrich.py | 23 +- elasticsearch_serverless/_sync/client/eql.py | 53 +- .../_sync/client/graph.py | 11 +- .../_sync/client/indices.py | 706 +++++------------- .../_sync/client/inference.py | 238 ++++++ .../_sync/client/ingest.py | 33 +- .../_sync/client/license.py | 5 +- .../_sync/client/logstash.py | 17 +- elasticsearch_serverless/_sync/client/ml.py | 341 +++------ .../_sync/client/query_ruleset.py | 21 +- .../_sync/client/search_application.py | 37 +- .../_sync/client/security.py | 126 ++-- elasticsearch_serverless/_sync/client/sql.py | 25 +- .../_sync/client/synonyms.py | 35 +- .../_sync/client/tasks.py | 5 +- .../_sync/client/transform.py | 53 +- elasticsearch_serverless/client.py | 2 + 42 files changed, 2012 insertions(+), 3458 deletions(-) create mode 100644 elasticsearch_serverless/_async/client/inference.py create mode 100644 elasticsearch_serverless/_sync/client/inference.py diff --git a/docs/sphinx/api.rst b/docs/sphinx/api.rst index 77895da..56c5bc6 100644 --- a/docs/sphinx/api.rst +++ b/docs/sphinx/api.rst @@ -66,6 +66,12 @@ Indices .. autoclass:: IndicesClient :members: +Inference +--------- + +.. autoclass:: InferenceClient + :members: + Ingest Pipelines ---------------- diff --git a/elasticsearch_serverless/_async/client/__init__.py b/elasticsearch_serverless/_async/client/__init__.py index aec5b40..2cda8a9 100644 --- a/elasticsearch_serverless/_async/client/__init__.py +++ b/elasticsearch_serverless/_async/client/__init__.py @@ -41,6 +41,7 @@ from .eql import EqlClient from .graph import GraphClient from .indices import IndicesClient +from .inference import InferenceClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient @@ -279,6 +280,7 @@ def __init__( self.cat = CatClient(self) self.cluster = ClusterClient(self) self.indices = IndicesClient(self) + self.inference = InferenceClient(self) self.ingest = IngestClient(self) self.tasks = TasksClient(self) @@ -442,14 +444,10 @@ async def ping( async def bulk( self, *, - operations: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + operations: t.Sequence[t.Mapping[str, t.Any]], index: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pipeline: t.Optional[str] = None, pretty: t.Optional[bool] = None, @@ -458,15 +456,9 @@ async def bulk( ] = None, require_alias: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, wait_for_active_shards: t.Optional[ t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]] @@ -552,14 +544,10 @@ async def clear_scroll( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, - scroll_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + scroll_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, ) -> ObjectApiResponse[t.Any]: """ Explicitly clears the search context for a scroll. @@ -598,9 +586,7 @@ async def close_point_in_time( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -641,7 +627,7 @@ async def close_point_in_time( async def count( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, analyzer: t.Optional[str] = None, @@ -650,25 +636,13 @@ async def count( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -786,9 +760,7 @@ async def create( id: str, document: t.Mapping[str, t.Any], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pipeline: t.Optional[str] = None, pretty: t.Optional[bool] = None, @@ -881,9 +853,7 @@ async def delete( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_primary_term: t.Optional[int] = None, if_seq_no: t.Optional[int] = None, @@ -968,7 +938,7 @@ async def delete( async def delete_by_query( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, analyzer: t.Optional[str] = None, @@ -978,25 +948,13 @@ async def delete_by_query( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -1020,8 +978,8 @@ async def delete_by_query( ] = None, slice: t.Optional[t.Mapping[str, t.Any]] = None, slices: t.Optional[t.Union[int, t.Union["t.Literal['auto']", str]]] = None, - sort: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + sort: t.Optional[t.Sequence[str]] = None, + stats: t.Optional[t.Sequence[str]] = None, terminate_after: t.Optional[int] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, version: t.Optional[bool] = None, @@ -1195,9 +1153,7 @@ async def delete_script( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -1251,27 +1207,17 @@ async def exists( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1357,24 +1303,16 @@ async def exists_source( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1460,9 +1398,7 @@ async def explain( default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, df: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, lenient: t.Optional[bool] = None, preference: t.Optional[str] = None, @@ -1470,18 +1406,10 @@ async def explain( q: t.Optional[str] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, ) -> ObjectApiResponse[t.Any]: """ Returns information about why a specific matches (or doesn't match) a query. @@ -1570,41 +1498,28 @@ async def explain( async def field_caps( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, filters: t.Optional[str] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, + include_empty_fields: t.Optional[bool] = None, include_unmapped: t.Optional[bool] = None, index_filter: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, runtime_mappings: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - types: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + types: t.Optional[t.Sequence[str]] = None, ) -> ObjectApiResponse[t.Any]: """ Returns the information about the capabilities of fields among multiple indices. @@ -1628,6 +1543,7 @@ async def field_caps( :param filters: An optional set of filters: can include +metadata,-metadata,-nested,-multifield,-parent :param ignore_unavailable: If `true`, missing or closed indices are not included in the response. + :param include_empty_fields: If false, empty fields are not included in the response. :param include_unmapped: If true, unmapped fields are included in the response. :param index_filter: Allows to filter indices if the provided query rewrites to match_none on every shard. @@ -1660,6 +1576,8 @@ async def field_caps( __query["human"] = human if ignore_unavailable is not None: __query["ignore_unavailable"] = ignore_unavailable + if include_empty_fields is not None: + __query["include_empty_fields"] = include_empty_fields if include_unmapped is not None: __query["include_unmapped"] = include_unmapped if index_filter is not None: @@ -1692,27 +1610,18 @@ async def get( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + force_synthetic_source: t.Optional[bool] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1725,6 +1634,10 @@ async def get( :param index: Name of the index that contains the document. :param id: Unique identifier of the document. + :param force_synthetic_source: Should this request force synthetic _source? Use + this to test if the mapping supports synthetic _source and to get a sense + of the worst case performance. Fetches with this enabled will be slower the + enabling synthetic source natively in the index. :param preference: Specifies the node or shard the operation should be performed on. Random by default. :param realtime: If `true`, the request is real-time as opposed to near-real-time. @@ -1755,6 +1668,8 @@ async def get( __query["error_trace"] = error_trace if filter_path is not None: __query["filter_path"] = filter_path + if force_synthetic_source is not None: + __query["force_synthetic_source"] = force_synthetic_source if human is not None: __query["human"] = human if preference is not None: @@ -1790,9 +1705,7 @@ async def get_script( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -1839,27 +1752,17 @@ async def get_source( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1939,9 +1842,7 @@ async def index( document: t.Mapping[str, t.Any], id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_primary_term: t.Optional[int] = None, if_seq_no: t.Optional[int] = None, @@ -2054,9 +1955,7 @@ async def info( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -2092,32 +1991,21 @@ async def mget( self, *, index: t.Optional[str] = None, - docs: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docs: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + force_synthetic_source: t.Optional[bool] = None, human: t.Optional[bool] = None, - ids: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + ids: t.Optional[t.Union[str, t.Sequence[str]]] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, ) -> ObjectApiResponse[t.Any]: """ Allows to get multiple documents in one request. @@ -2128,6 +2016,10 @@ async def mget( or when a document in the `docs` array does not specify an index. :param docs: The documents you want to retrieve. Required if no index is specified in the request URI. + :param force_synthetic_source: Should this request force synthetic _source? Use + this to test if the mapping supports synthetic _source and to get a sense + of the worst case performance. Fetches with this enabled will be slower the + enabling synthetic source natively in the index. :param ids: The IDs of the documents you want to retrieve. Allowed when the index is specified in the request URI. :param preference: Specifies the node or shard the operation should be performed @@ -2161,6 +2053,8 @@ async def mget( __query["error_trace"] = error_trace if filter_path is not None: __query["filter_path"] = filter_path + if force_synthetic_source is not None: + __query["force_synthetic_source"] = force_synthetic_source if human is not None: __query["human"] = human if ids is not None: @@ -2194,34 +2088,20 @@ async def mget( async def msearch( self, *, - searches: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + searches: t.Sequence[t.Mapping[str, t.Any]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, ccs_minimize_roundtrips: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -2331,15 +2211,11 @@ async def msearch( async def msearch_template( self, *, - search_templates: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + search_templates: t.Sequence[t.Mapping[str, t.Any]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, ccs_minimize_roundtrips: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_concurrent_searches: t.Optional[int] = None, pretty: t.Optional[bool] = None, @@ -2410,19 +2286,13 @@ async def mtermvectors( self, *, index: t.Optional[str] = None, - docs: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docs: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, field_statistics: t.Optional[bool] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - ids: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + ids: t.Optional[t.Sequence[str]] = None, offsets: t.Optional[bool] = None, payloads: t.Optional[bool] = None, positions: t.Optional[bool] = None, @@ -2515,30 +2385,18 @@ async def mtermvectors( async def open_point_in_time( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], keep_alive: t.Union["t.Literal[-1]", "t.Literal[0]", str], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, preference: t.Optional[str] = None, @@ -2602,9 +2460,7 @@ async def put_script( script: t.Mapping[str, t.Any], context: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -2667,33 +2523,19 @@ async def put_script( async def rank_eval( self, *, - requests: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + requests: t.Sequence[t.Mapping[str, t.Any]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, metric: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2767,9 +2609,7 @@ async def reindex( source: t.Mapping[str, t.Any], conflicts: t.Optional[t.Union["t.Literal['abort', 'proceed']", str]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_docs: t.Optional[int] = None, pretty: t.Optional[bool] = None, @@ -2875,9 +2715,7 @@ async def render_search_template( id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, file: t.Optional[str] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, params: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -2935,9 +2773,7 @@ async def scripts_painless_execute( context: t.Optional[str] = None, context_setup: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, script: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2985,9 +2821,7 @@ async def scroll( *, scroll_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, rest_total_hits_as_int: t.Optional[bool] = None, @@ -3044,7 +2878,7 @@ async def scroll( async def search( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, allow_no_indices: t.Optional[bool] = None, @@ -3056,51 +2890,29 @@ async def search( collapse: t.Optional[t.Mapping[str, t.Any]] = None, default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, df: t.Optional[str] = None, - docvalue_fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docvalue_fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, ext: t.Optional[t.Mapping[str, t.Any]] = None, - fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + force_synthetic_source: t.Optional[bool] = None, from_: t.Optional[int] = None, highlight: t.Optional[t.Mapping[str, t.Any]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indices_boost: t.Optional[ - t.Union[t.List[t.Mapping[str, float]], t.Tuple[t.Mapping[str, float], ...]] - ] = None, + indices_boost: t.Optional[t.Sequence[t.Mapping[str, float]]] = None, knn: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, lenient: t.Optional[bool] = None, max_concurrent_shard_requests: t.Optional[int] = None, @@ -3117,12 +2929,7 @@ async def search( rank: t.Optional[t.Mapping[str, t.Any]] = None, request_cache: t.Optional[bool] = None, rescore: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, rest_total_hits_as_int: t.Optional[bool] = None, routing: t.Optional[str] = None, @@ -3130,10 +2937,7 @@ async def search( script_fields: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, scroll: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] ] = None, search_type: t.Optional[ t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] @@ -3143,24 +2947,15 @@ async def search( slice: t.Optional[t.Mapping[str, t.Any]] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stats: t.Optional[t.Sequence[str]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, suggest: t.Optional[t.Mapping[str, t.Any]] = None, suggest_field: t.Optional[str] = None, suggest_mode: t.Optional[ @@ -3225,6 +3020,10 @@ async def search( :param fields: Array of wildcard (`*`) patterns. The request returns values for field names matching these patterns in the `hits.fields` property of the response. + :param force_synthetic_source: Should this request force synthetic _source? Use + this to test if the mapping supports synthetic _source and to get a sense + of the worst case performance. Fetches with this enabled will be slower the + enabling synthetic source natively in the index. :param from_: Starting document offset. Needs to be non-negative. By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through more hits, use the `search_after` parameter. @@ -3415,6 +3214,8 @@ async def search( __body["fields"] = fields if filter_path is not None: __query["filter_path"] = filter_path + if force_synthetic_source is not None: + __query["force_synthetic_source"] = force_synthetic_source if from_ is not None: __body["from"] = from_ if highlight is not None: @@ -3528,7 +3329,7 @@ async def search( async def search_mvt( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], field: str, zoom: int, x: int, @@ -3538,12 +3339,8 @@ async def search_mvt( error_trace: t.Optional[bool] = None, exact_bounds: t.Optional[bool] = None, extent: t.Optional[int] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, grid_agg: t.Optional[t.Union["t.Literal['geohex', 'geotile']", str]] = None, grid_precision: t.Optional[int] = None, grid_type: t.Optional[ @@ -3556,11 +3353,8 @@ async def search_mvt( size: t.Optional[int] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, track_total_hits: t.Optional[t.Union[bool, int]] = None, @@ -3691,32 +3485,20 @@ async def search_mvt( async def search_template( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, ccs_minimize_roundtrips: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, id: t.Optional[str] = None, ignore_throttled: t.Optional[bool] = None, @@ -3838,9 +3620,7 @@ async def terms_enum( field: str, case_insensitive: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, index_filter: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -3922,13 +3702,9 @@ async def termvectors( doc: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, field_statistics: t.Optional[bool] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, offsets: t.Optional[bool] = None, payloads: t.Optional[bool] = None, @@ -4045,9 +3821,7 @@ async def update( doc: t.Optional[t.Mapping[str, t.Any]] = None, doc_as_upsert: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_primary_term: t.Optional[int] = None, if_seq_no: t.Optional[int] = None, @@ -4062,12 +3836,8 @@ async def update( script: t.Optional[t.Mapping[str, t.Any]] = None, scripted_upsert: t.Optional[bool] = None, source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, upsert: t.Optional[t.Mapping[str, t.Any]] = None, wait_for_active_shards: t.Optional[ @@ -4178,7 +3948,7 @@ async def update( async def update_by_query( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, analyzer: t.Optional[str] = None, @@ -4188,25 +3958,13 @@ async def update_by_query( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -4231,8 +3989,8 @@ async def update_by_query( ] = None, slice: t.Optional[t.Mapping[str, t.Any]] = None, slices: t.Optional[t.Union[int, t.Union["t.Literal['auto']", str]]] = None, - sort: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + sort: t.Optional[t.Sequence[str]] = None, + stats: t.Optional[t.Sequence[str]] = None, terminate_after: t.Optional[int] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, version: t.Optional[bool] = None, @@ -4243,8 +4001,9 @@ async def update_by_query( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs an update on every document in the index without changing the source, - for example to pick up a mapping change. + Updates documents that match the specified query. If no query is specified, performs + an update on every document in the index without changing the source, for example + to pick up a mapping change. ``_ diff --git a/elasticsearch_serverless/_async/client/async_search.py b/elasticsearch_serverless/_async/client/async_search.py index 1351d3e..9269449 100644 --- a/elasticsearch_serverless/_async/client/async_search.py +++ b/elasticsearch_serverless/_async/client/async_search.py @@ -24,15 +24,14 @@ class AsyncSearchClient(NamespacedClient): + @_rewrite_parameters() async def delete( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -67,9 +66,7 @@ async def get( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, pretty: t.Optional[bool] = None, @@ -129,9 +126,7 @@ async def status( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -172,7 +167,7 @@ async def status( async def submit( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, allow_no_indices: t.Optional[bool] = None, @@ -184,53 +179,30 @@ async def submit( collapse: t.Optional[t.Mapping[str, t.Any]] = None, default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, df: t.Optional[str] = None, - docvalue_fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docvalue_fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, ext: t.Optional[t.Mapping[str, t.Any]] = None, - fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, highlight: t.Optional[t.Mapping[str, t.Any]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indices_boost: t.Optional[ - t.Union[t.List[t.Mapping[str, float]], t.Tuple[t.Mapping[str, float], ...]] - ] = None, + indices_boost: t.Optional[t.Sequence[t.Mapping[str, float]]] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, keep_on_completion: t.Optional[bool] = None, knn: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, lenient: t.Optional[bool] = None, max_concurrent_shard_requests: t.Optional[int] = None, @@ -246,12 +218,7 @@ async def submit( query: t.Optional[t.Mapping[str, t.Any]] = None, request_cache: t.Optional[bool] = None, rescore: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, rest_total_hits_as_int: t.Optional[bool] = None, routing: t.Optional[str] = None, @@ -259,10 +226,7 @@ async def submit( script_fields: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, scroll: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] ] = None, search_type: t.Optional[ t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] @@ -272,24 +236,15 @@ async def submit( slice: t.Optional[t.Mapping[str, t.Any]] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stats: t.Optional[t.Sequence[str]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, suggest: t.Optional[t.Mapping[str, t.Any]] = None, suggest_field: t.Optional[str] = None, suggest_mode: t.Optional[ diff --git a/elasticsearch_serverless/_async/client/cat.py b/elasticsearch_serverless/_async/client/cat.py index da1e424..c9a65c5 100644 --- a/elasticsearch_serverless/_async/client/cat.py +++ b/elasticsearch_serverless/_async/client/cat.py @@ -24,35 +24,24 @@ class CatClient(NamespacedClient): + @_rewrite_parameters() async def aliases( self, *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -60,7 +49,7 @@ async def aliases( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ @@ -128,11 +117,9 @@ async def component_templates( *, name: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -140,7 +127,7 @@ async def component_templates( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ @@ -201,13 +188,11 @@ async def component_templates( async def count( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -215,7 +200,7 @@ async def count( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ @@ -279,11 +264,9 @@ async def help( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -291,7 +274,7 @@ async def help( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> TextApiResponse: """ @@ -347,34 +330,22 @@ async def help( async def indices( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, bytes: t.Optional[ t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, health: t.Optional[t.Union["t.Literal['green', 'red', 'yellow']", str]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, @@ -385,7 +356,7 @@ async def indices( ] = None, pretty: t.Optional[bool] = None, pri: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, time: t.Optional[ t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] ] = None, @@ -477,31 +448,20 @@ async def ml_data_frame_analytics( t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -513,25 +473,16 @@ async def ml_data_frame_analytics( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ], - ..., - ], - ], ] ] = None, time: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -606,31 +557,20 @@ async def ml_datafeeds( datafeed_id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", + str, + ] + ], t.Union[ "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -642,25 +582,16 @@ async def ml_datafeeds( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", + str, + ] + ], t.Union[ "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ], - ..., - ], - ], ] ] = None, time: t.Optional[ @@ -743,31 +674,20 @@ async def ml_jobs( t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -779,25 +699,16 @@ async def ml_jobs( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ], - ..., - ], - ], ] ] = None, time: t.Optional[ @@ -884,32 +795,21 @@ async def ml_trained_models( t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, from_: t.Optional[int] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", + str, + ] + ], t.Union[ "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -921,25 +821,16 @@ async def ml_trained_models( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", + str, + ] + ], t.Union[ "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ], - ..., - ], - ], ] ] = None, size: t.Optional[int] = None, @@ -1024,32 +915,21 @@ async def transforms( transform_id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, from_: t.Optional[int] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", + str, + ] + ], t.Union[ "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -1061,25 +941,16 @@ async def transforms( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", + str, + ] + ], t.Union[ "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ], - ..., - ], - ], ] ] = None, size: t.Optional[int] = None, diff --git a/elasticsearch_serverless/_async/client/cluster.py b/elasticsearch_serverless/_async/client/cluster.py index f5f581e..c1d9965 100644 --- a/elasticsearch_serverless/_async/client/cluster.py +++ b/elasticsearch_serverless/_async/client/cluster.py @@ -24,15 +24,14 @@ class ClusterClient(NamespacedClient): + @_rewrite_parameters() async def delete_component_template( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -78,11 +77,9 @@ async def delete_component_template( async def exists_component_template( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -131,9 +128,7 @@ async def get_component_template( *, name: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, @@ -190,29 +185,17 @@ async def info( self, *, target: t.Union[ - t.Union[ - "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", str + t.Sequence[ + t.Union[ + "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", str + ] ], t.Union[ - t.List[ - t.Union[ - "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", - str, - ], - ..., - ], + "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", str ], ], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -253,9 +236,7 @@ async def put_component_template( allow_auto_create: t.Optional[bool] = None, create: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] diff --git a/elasticsearch_serverless/_async/client/enrich.py b/elasticsearch_serverless/_async/client/enrich.py index 62abc72..d639ac4 100644 --- a/elasticsearch_serverless/_async/client/enrich.py +++ b/elasticsearch_serverless/_async/client/enrich.py @@ -24,15 +24,14 @@ class EnrichClient(NamespacedClient): + @_rewrite_parameters() async def delete_policy( self, *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -66,9 +65,7 @@ async def execute_policy( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, wait_for_completion: t.Optional[bool] = None, @@ -105,11 +102,9 @@ async def execute_policy( async def get_policy( self, *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -147,9 +142,7 @@ async def put_policy( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, geo_match: t.Optional[t.Mapping[str, t.Any]] = None, human: t.Optional[bool] = None, match: t.Optional[t.Mapping[str, t.Any]] = None, @@ -197,9 +190,7 @@ async def stats( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_async/client/eql.py b/elasticsearch_serverless/_async/client/eql.py index 056e45b..4a3e5d1 100644 --- a/elasticsearch_serverless/_async/client/eql.py +++ b/elasticsearch_serverless/_async/client/eql.py @@ -24,15 +24,14 @@ class EqlClient(NamespacedClient): + @_rewrite_parameters() async def delete( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -69,9 +68,7 @@ async def get( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, pretty: t.Optional[bool] = None, @@ -82,7 +79,7 @@ async def get( """ Returns async results from previously executed Event Query Language (EQL) search - `< https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-eql-search-api.html>`_ + ``_ :param id: Identifier for the search. :param keep_alive: Period for which the search and its results are stored on @@ -119,9 +116,7 @@ async def get_status( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -129,7 +124,7 @@ async def get_status( Returns the status of a previously submitted async or stored Event Query Language (EQL) search - `< https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-eql-status-api.html>`_ + ``_ :param id: Identifier for the search. """ @@ -156,7 +151,7 @@ async def get_status( async def search( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], query: str, allow_no_indices: t.Optional[bool] = None, case_sensitive: t.Optional[bool] = None, @@ -164,42 +159,20 @@ async def search( event_category_field: t.Optional[str] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, fetch_size: t.Optional[int] = None, fields: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, filter: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, diff --git a/elasticsearch_serverless/_async/client/graph.py b/elasticsearch_serverless/_async/client/graph.py index 214bd31..c814b77 100644 --- a/elasticsearch_serverless/_async/client/graph.py +++ b/elasticsearch_serverless/_async/client/graph.py @@ -24,27 +24,24 @@ class GraphClient(NamespacedClient): + @_rewrite_parameters( body_fields=True, ) async def explore( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], connections: t.Optional[t.Mapping[str, t.Any]] = None, controls: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, routing: t.Optional[str] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - vertices: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + vertices: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, ) -> ObjectApiResponse[t.Any]: """ Explore extracted and summarized information about the documents and terms in diff --git a/elasticsearch_serverless/_async/client/indices.py b/elasticsearch_serverless/_async/client/indices.py index 40c59ea..c9e9443 100644 --- a/elasticsearch_serverless/_async/client/indices.py +++ b/elasticsearch_serverless/_async/client/indices.py @@ -24,6 +24,7 @@ class IndicesClient(NamespacedClient): + @_rewrite_parameters() async def add_block( self, @@ -34,25 +35,13 @@ async def add_block( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -115,29 +104,17 @@ async def analyze( *, index: t.Optional[str] = None, analyzer: t.Optional[str] = None, - attributes: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - char_filter: t.Optional[ - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ] - ] = None, + attributes: t.Optional[t.Sequence[str]] = None, + char_filter: t.Optional[t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]] = None, error_trace: t.Optional[bool] = None, explain: t.Optional[bool] = None, field: t.Optional[str] = None, - filter: t.Optional[ - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter: t.Optional[t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, normalizer: t.Optional[str] = None, pretty: t.Optional[bool] = None, - text: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + text: t.Optional[t.Union[str, t.Sequence[str]]] = None, tokenizer: t.Optional[t.Union[str, t.Mapping[str, t.Any]]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -217,9 +194,7 @@ async def create( index: str, aliases: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, mappings: t.Optional[t.Mapping[str, t.Any]] = None, master_timeout: t.Optional[ @@ -291,9 +266,7 @@ async def create_data_stream( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -333,25 +306,13 @@ async def data_streams_stats( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -390,30 +351,18 @@ async def data_streams_stats( async def delete( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -477,12 +426,10 @@ async def delete( async def delete_alias( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -532,29 +479,17 @@ async def delete_alias( async def delete_data_lifecycle( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -601,29 +536,17 @@ async def delete_data_lifecycle( async def delete_data_stream( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -660,11 +583,9 @@ async def delete_data_stream( async def delete_index_template( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -675,7 +596,7 @@ async def delete_index_template( """ Deletes an index template. - ``_ + ``_ :param name: Comma-separated list of index template names used to limit the request. Wildcard (*) expressions are supported. @@ -710,30 +631,18 @@ async def delete_index_template( async def exists( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -795,31 +704,19 @@ async def exists( async def exists_alias( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Union[str, t.Sequence[str]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -882,9 +779,7 @@ async def exists_index_template( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -894,7 +789,7 @@ async def exists_index_template( """ Returns information about whether a particular index template exists. - ``_ + ``_ :param name: Comma-separated list of index template names used to limit the request. Wildcard (*) expressions are supported. @@ -925,11 +820,9 @@ async def exists_index_template( async def explain_data_lifecycle( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -973,44 +866,26 @@ async def explain_data_lifecycle( async def get( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, features: t.Optional[ t.Union[ - t.Union["t.Literal['aliases', 'mappings', 'settings']", str], - t.Union[ - t.List[ - t.Union["t.Literal['aliases', 'mappings', 'settings']", str] - ], - t.Tuple[ - t.Union["t.Literal['aliases', 'mappings', 'settings']", str], - ..., - ], + t.Sequence[ + t.Union["t.Literal['aliases', 'mappings', 'settings']", str] ], + t.Union["t.Literal['aliases', 'mappings', 'settings']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -1086,31 +961,19 @@ async def get( async def get_alias( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -1172,29 +1035,17 @@ async def get_alias( async def get_data_lifecycle( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1237,29 +1088,17 @@ async def get_data_lifecycle( async def get_data_stream( self, *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1305,9 +1144,7 @@ async def get_index_template( *, name: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, @@ -1320,7 +1157,7 @@ async def get_index_template( """ Returns an index template. - ``_ + ``_ :param name: Comma-separated list of index template names used to limit the request. Wildcard (*) expressions are supported. @@ -1364,30 +1201,18 @@ async def get_index_template( async def get_mapping( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -1451,31 +1276,19 @@ async def get_mapping( async def get_settings( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -1556,9 +1369,7 @@ async def migrate_to_data_stream( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1592,13 +1403,9 @@ async def migrate_to_data_stream( async def modify_data_stream( self, *, - actions: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + actions: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1635,13 +1442,11 @@ async def modify_data_stream( async def put_alias( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], name: str, error_trace: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, index_routing: t.Optional[str] = None, is_write_index: t.Optional[bool] = None, @@ -1728,7 +1533,7 @@ async def put_alias( async def put_data_lifecycle( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], data_retention: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, @@ -1736,25 +1541,13 @@ async def put_data_lifecycle( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -1825,17 +1618,13 @@ async def put_index_template( self, *, name: str, - composed_of: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + composed_of: t.Optional[t.Sequence[str]] = None, create: t.Optional[bool] = None, data_stream: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - index_patterns: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + index_patterns: t.Optional[t.Union[str, t.Sequence[str]]] = None, meta: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, priority: t.Optional[int] = None, @@ -1845,7 +1634,7 @@ async def put_index_template( """ Creates or updates an index template. - ``_ + ``_ :param name: Index or template name :param composed_of: An ordered list of component template names. Component templates @@ -1915,47 +1704,30 @@ async def put_index_template( async def put_mapping( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, date_detection: t.Optional[bool] = None, dynamic: t.Optional[ t.Union["t.Literal['false', 'runtime', 'strict', 'true']", str] ] = None, - dynamic_date_formats: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, + dynamic_date_formats: t.Optional[t.Sequence[str]] = None, dynamic_templates: t.Optional[ t.Union[ t.Mapping[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Mapping[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Mapping[str, t.Mapping[str, t.Any]], ...], - ], + t.Sequence[t.Mapping[str, t.Mapping[str, t.Any]]], ] ] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, field_names: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -2071,30 +1843,18 @@ async def put_settings( self, *, settings: t.Mapping[str, t.Any], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -2177,14 +1937,10 @@ async def put_template( aliases: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, create: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, - index_patterns: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + index_patterns: t.Optional[t.Union[str, t.Sequence[str]]] = None, mappings: t.Optional[t.Mapping[str, t.Any]] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -2198,7 +1954,7 @@ async def put_template( """ Creates or updates an index template. - ``_ + ``_ :param name: The name of the template :param aliases: Aliases for the index. @@ -2263,30 +2019,18 @@ async def put_template( async def refresh( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2337,29 +2081,17 @@ async def refresh( async def resolve_index( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -2407,9 +2139,7 @@ async def rollover( conditions: t.Optional[t.Mapping[str, t.Any]] = None, dry_run: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, mappings: t.Optional[t.Mapping[str, t.Any]] = None, master_timeout: t.Optional[ @@ -2497,90 +2227,36 @@ async def rollover( "POST", __path, params=__query, headers=__headers, body=__body ) - @_rewrite_parameters( - body_fields=True, - parameter_aliases={"_meta": "meta"}, - ) + @_rewrite_parameters() async def simulate_index_template( self, *, name: str, - allow_auto_create: t.Optional[bool] = None, - composed_of: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - create: t.Optional[bool] = None, - data_stream: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, - index_patterns: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, - meta: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, - priority: t.Optional[int] = None, - template: t.Optional[t.Mapping[str, t.Any]] = None, - version: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ Simulate matching the given index name against the index templates in the system - ``_ + ``_ - :param name: Index or template name to simulate - :param allow_auto_create: This setting overrides the value of the `action.auto_create_index` - cluster setting. If set to `true` in a template, then indices can be automatically - created using that template even if auto-creation of indices is disabled - via `actions.auto_create_index`. If set to `false`, then indices or data - streams matching the template must always be explicitly created, and may - never be automatically created. - :param composed_of: An ordered list of component template names. Component templates - are merged in the order specified, meaning that the last component template - specified has the highest precedence. - :param create: If `true`, the template passed in the body is only used if no - existing templates match the same index patterns. If `false`, the simulation - uses the template with the highest priority. Note that the template is not - permanently added or updated in either case; it is only used for the simulation. - :param data_stream: If this object is included, the template is used to create - data streams and their backing indices. Supports an empty object. Data streams - require a matching index template with a `data_stream` object. + :param name: Name of the index to simulate :param include_defaults: If true, returns all relevant default configurations for the index template. - :param index_patterns: Array of wildcard (`*`) expressions used to match the - names of data streams and indices during creation. :param master_timeout: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. - :param meta: Optional user metadata about the index template. May have any contents. - This map is not automatically generated by Elasticsearch. - :param priority: Priority to determine index template precedence when a new data - stream or index is created. The index template with the highest priority - is chosen. If no priority is specified the template is treated as though - it is of priority 0 (lowest priority). This number is not automatically generated - by Elasticsearch. - :param template: Template to be applied. It may optionally include an `aliases`, - `mappings`, or `settings` configuration. - :param version: Version number used to manage index templates externally. This - number is not automatically generated by Elasticsearch. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'name'") __path = f"/_index_template/_simulate_index/{_quote(name)}" - __body: t.Dict[str, t.Any] = {} __query: t.Dict[str, t.Any] = {} - if allow_auto_create is not None: - __body["allow_auto_create"] = allow_auto_create - if composed_of is not None: - __body["composed_of"] = composed_of - if create is not None: - __query["create"] = create - if data_stream is not None: - __body["data_stream"] = data_stream if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -2589,90 +2265,130 @@ async def simulate_index_template( __query["human"] = human if include_defaults is not None: __query["include_defaults"] = include_defaults - if index_patterns is not None: - __body["index_patterns"] = index_patterns if master_timeout is not None: __query["master_timeout"] = master_timeout - if meta is not None: - __body["_meta"] = meta if pretty is not None: __query["pretty"] = pretty - if priority is not None: - __body["priority"] = priority - if template is not None: - __body["template"] = template - if version is not None: - __body["version"] = version - if not __body: - __body = None # type: ignore[assignment] __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" return await self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body + "POST", __path, params=__query, headers=__headers ) @_rewrite_parameters( - body_name="template", + body_fields=True, + parameter_aliases={"_meta": "meta"}, ) async def simulate_template( self, *, name: t.Optional[str] = None, + allow_auto_create: t.Optional[bool] = None, + composed_of: t.Optional[t.Sequence[str]] = None, create: t.Optional[bool] = None, + data_stream: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, + ignore_missing_component_templates: t.Optional[t.Sequence[str]] = None, include_defaults: t.Optional[bool] = None, + index_patterns: t.Optional[t.Union[str, t.Sequence[str]]] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, + meta: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, + priority: t.Optional[int] = None, template: t.Optional[t.Mapping[str, t.Any]] = None, + version: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ Simulate resolving the given template name or body - ``_ + ``_ :param name: Name of the index template to simulate. To test a template configuration before you add it to the cluster, omit this parameter and specify the template configuration in the request body. + :param allow_auto_create: This setting overrides the value of the `action.auto_create_index` + cluster setting. If set to `true` in a template, then indices can be automatically + created using that template even if auto-creation of indices is disabled + via `actions.auto_create_index`. If set to `false`, then indices or data + streams matching the template must always be explicitly created, and may + never be automatically created. + :param composed_of: An ordered list of component template names. Component templates + are merged in the order specified, meaning that the last component template + specified has the highest precedence. :param create: If true, the template passed in the body is only used if no existing templates match the same index patterns. If false, the simulation uses the template with the highest priority. Note that the template is not permanently added or updated in either case; it is only used for the simulation. + :param data_stream: If this object is included, the template is used to create + data streams and their backing indices. Supports an empty object. Data streams + require a matching index template with a `data_stream` object. + :param ignore_missing_component_templates: The configuration option ignore_missing_component_templates + can be used when an index template references a component template that might + not exist :param include_defaults: If true, returns all relevant default configurations for the index template. + :param index_patterns: Array of wildcard (`*`) expressions used to match the + names of data streams and indices during creation. :param master_timeout: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. - :param template: + :param meta: Optional user metadata about the index template. May have any contents. + This map is not automatically generated by Elasticsearch. + :param priority: Priority to determine index template precedence when a new data + stream or index is created. The index template with the highest priority + is chosen. If no priority is specified the template is treated as though + it is of priority 0 (lowest priority). This number is not automatically generated + by Elasticsearch. + :param template: Template to be applied. It may optionally include an `aliases`, + `mappings`, or `settings` configuration. + :param version: Version number used to manage index templates externally. This + number is not automatically generated by Elasticsearch. """ if name not in SKIP_IN_PATH: __path = f"/_index_template/_simulate/{_quote(name)}" else: __path = "/_index_template/_simulate" + __body: t.Dict[str, t.Any] = {} __query: t.Dict[str, t.Any] = {} + if allow_auto_create is not None: + __body["allow_auto_create"] = allow_auto_create + if composed_of is not None: + __body["composed_of"] = composed_of if create is not None: __query["create"] = create + if data_stream is not None: + __body["data_stream"] = data_stream if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: __query["filter_path"] = filter_path if human is not None: __query["human"] = human + if ignore_missing_component_templates is not None: + __body["ignore_missing_component_templates"] = ( + ignore_missing_component_templates + ) if include_defaults is not None: __query["include_defaults"] = include_defaults + if index_patterns is not None: + __body["index_patterns"] = index_patterns if master_timeout is not None: __query["master_timeout"] = master_timeout + if meta is not None: + __body["_meta"] = meta if pretty is not None: __query["pretty"] = pretty - __body = template + if priority is not None: + __body["priority"] = priority + if template is not None: + __body["template"] = template + if version is not None: + __body["version"] = version if not __body: - __body = None + __body = None # type: ignore[assignment] __headers = {"accept": "application/json"} if __body is not None: __headers["content-type"] = "application/json" @@ -2686,13 +2402,9 @@ async def simulate_template( async def update_aliases( self, *, - actions: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + actions: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -2740,7 +2452,7 @@ async def update_aliases( async def validate_query( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, all_shards: t.Optional[bool] = None, allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, @@ -2750,26 +2462,14 @@ async def validate_query( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, lenient: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_async/client/inference.py b/elasticsearch_serverless/_async/client/inference.py new file mode 100644 index 0000000..3c89f3d --- /dev/null +++ b/elasticsearch_serverless/_async/client/inference.py @@ -0,0 +1,238 @@ +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import typing as t + +from elastic_transport import ObjectApiResponse + +from ._base import NamespacedClient +from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters + + +class InferenceClient(NamespacedClient): + + @_rewrite_parameters() + async def delete_model( + self, + *, + inference_id: str, + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Delete model in the Inference API + + ``_ + + :param inference_id: The inference Id + :param task_type: The task type + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __query: t.Dict[str, t.Any] = {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + __headers = {"accept": "application/json"} + return await self.perform_request( # type: ignore[return-value] + "DELETE", __path, params=__query, headers=__headers + ) + + @_rewrite_parameters() + async def get_model( + self, + *, + inference_id: str, + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Get a model in the Inference API + + ``_ + + :param inference_id: The inference Id + :param task_type: The task type + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __query: t.Dict[str, t.Any] = {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + __headers = {"accept": "application/json"} + return await self.perform_request( # type: ignore[return-value] + "GET", __path, params=__query, headers=__headers + ) + + @_rewrite_parameters( + body_fields=True, + ) + async def inference( + self, + *, + inference_id: str, + input: t.Union[str, t.Sequence[str]], + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + query: t.Optional[str] = None, + task_settings: t.Optional[t.Any] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Perform inference on a model + + ``_ + + :param inference_id: The inference Id + :param input: Text input to the model. Either a string or an array of strings. + :param task_type: The task type + :param query: Query input, required for rerank task. Not required for other tasks. + :param task_settings: Optional task settings + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if input is None: + raise ValueError("Empty value passed for parameter 'input'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __body: t.Dict[str, t.Any] = {} + __query: t.Dict[str, t.Any] = {} + if input is not None: + __body["input"] = input + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if query is not None: + __body["query"] = query + if task_settings is not None: + __body["task_settings"] = task_settings + if not __body: + __body = None # type: ignore[assignment] + __headers = {"accept": "application/json"} + if __body is not None: + __headers["content-type"] = "application/json" + return await self.perform_request( # type: ignore[return-value] + "POST", __path, params=__query, headers=__headers, body=__body + ) + + @_rewrite_parameters( + body_name="model_config", + ) + async def put_model( + self, + *, + inference_id: str, + model_config: t.Mapping[str, t.Any], + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Configure a model for use in the Inference API + + ``_ + + :param inference_id: The inference Id + :param model_config: + :param task_type: The task type + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if model_config is None: + raise ValueError("Empty value passed for parameter 'model_config'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __query: t.Dict[str, t.Any] = {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + __body = model_config + __headers = {"accept": "application/json", "content-type": "application/json"} + return await self.perform_request( # type: ignore[return-value] + "PUT", __path, params=__query, headers=__headers, body=__body + ) diff --git a/elasticsearch_serverless/_async/client/ingest.py b/elasticsearch_serverless/_async/client/ingest.py index dca93eb..5eb936d 100644 --- a/elasticsearch_serverless/_async/client/ingest.py +++ b/elasticsearch_serverless/_async/client/ingest.py @@ -24,15 +24,14 @@ class IngestClient(NamespacedClient): + @_rewrite_parameters() async def delete_pipeline( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -80,9 +79,7 @@ async def get_pipeline( *, id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -129,9 +126,7 @@ async def processor_grok( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -165,22 +160,16 @@ async def put_pipeline( id: str, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_version: t.Optional[int] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, meta: t.Optional[t.Mapping[str, t.Any]] = None, - on_failure: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + on_failure: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, pretty: t.Optional[bool] = None, - processors: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + processors: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, version: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: @@ -253,13 +242,9 @@ async def simulate( self, *, id: t.Optional[str] = None, - docs: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docs: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pipeline: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_async/client/license.py b/elasticsearch_serverless/_async/client/license.py index bb5c631..24800e5 100644 --- a/elasticsearch_serverless/_async/client/license.py +++ b/elasticsearch_serverless/_async/client/license.py @@ -24,15 +24,14 @@ class LicenseClient(NamespacedClient): + @_rewrite_parameters() async def get( self, *, accept_enterprise: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, pretty: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_async/client/logstash.py b/elasticsearch_serverless/_async/client/logstash.py index 967b16d..c6b2ac6 100644 --- a/elasticsearch_serverless/_async/client/logstash.py +++ b/elasticsearch_serverless/_async/client/logstash.py @@ -24,15 +24,14 @@ class LogstashClient(NamespacedClient): + @_rewrite_parameters() async def delete_pipeline( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -64,11 +63,9 @@ async def delete_pipeline( async def get_pipeline( self, *, - id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + id: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -79,8 +76,6 @@ async def get_pipeline( :param id: Comma-separated list of pipeline identifiers. """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") if id not in SKIP_IN_PATH: __path = f"/_logstash/pipeline/{_quote(id)}" else: @@ -108,9 +103,7 @@ async def put_pipeline( id: str, pipeline: t.Mapping[str, t.Any], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_async/client/ml.py b/elasticsearch_serverless/_async/client/ml.py index 9cf02ce..d191ce8 100644 --- a/elasticsearch_serverless/_async/client/ml.py +++ b/elasticsearch_serverless/_async/client/ml.py @@ -24,6 +24,7 @@ class MlClient(NamespacedClient): + @_rewrite_parameters( body_fields=True, ) @@ -33,9 +34,7 @@ async def close_job( job_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -91,9 +90,7 @@ async def delete_calendar( *, calendar_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -128,9 +125,7 @@ async def delete_calendar_event( calendar_id: str, event_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -167,11 +162,9 @@ async def delete_calendar_job( self, *, calendar_id: str, - job_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + job_id: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -209,9 +202,7 @@ async def delete_data_frame_analytics( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -254,9 +245,7 @@ async def delete_datafeed( *, datafeed_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -298,9 +287,7 @@ async def delete_filter( *, filter_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -335,9 +322,7 @@ async def delete_job( job_id: str, delete_user_annotations: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -386,9 +371,7 @@ async def delete_trained_model( *, model_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -429,9 +412,7 @@ async def delete_trained_model_alias( model_id: str, model_alias: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -470,9 +451,7 @@ async def estimate_model_memory( *, analysis_config: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_bucket_cardinality: t.Optional[t.Mapping[str, int]] = None, overall_cardinality: t.Optional[t.Mapping[str, int]] = None, @@ -529,9 +508,7 @@ async def evaluate_data_frame( evaluation: t.Mapping[str, t.Any], index: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, @@ -583,9 +560,7 @@ async def flush_job( calc_interim: t.Optional[bool] = None, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, skip_time: t.Optional[t.Union[str, t.Any]] = None, @@ -644,9 +619,7 @@ async def get_calendar_events( calendar_id: str, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, job_id: t.Optional[str] = None, @@ -706,9 +679,7 @@ async def get_calendars( *, calendar_id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, page: t.Optional[t.Mapping[str, t.Any]] = None, @@ -769,9 +740,7 @@ async def get_data_frame_analytics( allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -834,9 +803,7 @@ async def get_data_frame_analytics_stats( id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -893,14 +860,10 @@ async def get_data_frame_analytics_stats( async def get_datafeed_stats( self, *, - datafeed_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + datafeed_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -944,15 +907,11 @@ async def get_datafeed_stats( async def get_datafeeds( self, *, - datafeed_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + datafeed_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1003,13 +962,9 @@ async def get_datafeeds( async def get_filters( self, *, - filter_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1053,9 +1008,7 @@ async def get_job_stats( job_id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1100,15 +1053,11 @@ async def get_job_stats( async def get_jobs( self, *, - job_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + job_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1165,9 +1114,7 @@ async def get_overall_buckets( end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, exclude_interim: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, overall_score: t.Optional[t.Union[float, str]] = None, pretty: t.Optional[bool] = None, @@ -1242,9 +1189,7 @@ async def get_trained_models( decompress_definition: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, include: t.Optional[ @@ -1320,14 +1265,10 @@ async def get_trained_models( async def get_trained_models_stats( self, *, - model_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + model_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1379,13 +1320,9 @@ async def infer_trained_model( self, *, model_id: str, - docs: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + docs: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, inference_config: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -1439,9 +1376,7 @@ async def open_job( *, job_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -1485,13 +1420,9 @@ async def post_calendar_events( self, *, calendar_id: str, - events: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + events: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1536,9 +1467,7 @@ async def preview_data_frame_analytics( id: t.Optional[str] = None, config: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1587,9 +1516,7 @@ async def preview_datafeed( datafeed_config: t.Optional[t.Mapping[str, t.Any]] = None, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, job_config: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -1655,11 +1582,9 @@ async def put_calendar( calendar_id: str, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - job_ids: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + job_ids: t.Optional[t.Sequence[str]] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -1704,9 +1629,7 @@ async def put_calendar_job( calendar_id: str, job_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1753,12 +1676,8 @@ async def put_data_frame_analytics( analyzed_fields: t.Optional[t.Mapping[str, t.Any]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - headers: t.Optional[ - t.Mapping[str, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + headers: t.Optional[t.Mapping[str, t.Union[str, t.Sequence[str]]]] = None, human: t.Optional[bool] = None, max_num_threads: t.Optional[int] = None, model_memory_limit: t.Optional[str] = None, @@ -1884,38 +1803,20 @@ async def put_datafeed( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - headers: t.Optional[ - t.Mapping[str, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, + headers: t.Optional[t.Mapping[str, t.Union[str, t.Sequence[str]]]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indexes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - indices: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + indexes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + indices: t.Optional[t.Union[str, t.Sequence[str]]] = None, indices_options: t.Optional[t.Mapping[str, t.Any]] = None, job_id: t.Optional[str] = None, max_empty_searches: t.Optional[int] = None, @@ -2065,11 +1966,9 @@ async def put_filter( filter_id: str, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - items: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + items: t.Optional[t.Sequence[str]] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -2123,10 +2022,8 @@ async def put_job( datafeed_config: t.Optional[t.Mapping[str, t.Any]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - groups: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + groups: t.Optional[t.Sequence[str]] = None, human: t.Optional[bool] = None, model_plot_config: t.Optional[t.Mapping[str, t.Any]] = None, model_snapshot_retention_days: t.Optional[int] = None, @@ -2279,9 +2176,7 @@ async def put_trained_model( definition: t.Optional[t.Mapping[str, t.Any]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, inference_config: t.Optional[t.Mapping[str, t.Any]] = None, input: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2290,8 +2185,11 @@ async def put_trained_model( model_type: t.Optional[ t.Union["t.Literal['lang_ident', 'pytorch', 'tree_ensemble']", str] ] = None, + platform_architecture: t.Optional[str] = None, + prefix_strings: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, - tags: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + tags: t.Optional[t.Sequence[str]] = None, + wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Creates an inference trained model. @@ -2318,7 +2216,18 @@ async def put_trained_model( model in memory. This property is supported only if defer_definition_decompression is true or the model definition is not supplied. :param model_type: The model type. + :param platform_architecture: The platform architecture (if applicable) of the + trained mode. If the model only works on one platform, because it is heavily + optimized for a particular processor architecture and OS combination, then + this field specifies which. The format of the string must match the platform + identifiers used by Elasticsearch, so one of, `linux-x86_64`, `linux-aarch64`, + `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models + (those that work independent of processor architecture or OS features), leave + this field unset. + :param prefix_strings: Optional prefix strings applied at inference :param tags: An array of tags to organize the model. + :param wait_for_completion: Whether to wait for all child operations (e.g. model + download) to complete. """ if model_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'model_id'") @@ -2349,10 +2258,16 @@ async def put_trained_model( __body["model_size_bytes"] = model_size_bytes if model_type is not None: __body["model_type"] = model_type + if platform_architecture is not None: + __body["platform_architecture"] = platform_architecture + if prefix_strings is not None: + __body["prefix_strings"] = prefix_strings if pretty is not None: __query["pretty"] = pretty if tags is not None: __body["tags"] = tags + if wait_for_completion is not None: + __query["wait_for_completion"] = wait_for_completion __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "PUT", __path, params=__query, headers=__headers, body=__body @@ -2365,9 +2280,7 @@ async def put_trained_model_alias( model_id: str, model_alias: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, reassign: t.Optional[bool] = None, @@ -2417,9 +2330,7 @@ async def put_trained_model_definition_part( total_definition_length: int, total_parts: int, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -2480,14 +2391,13 @@ async def put_trained_model_vocabulary( self, *, model_id: str, - vocabulary: t.Union[t.List[str], t.Tuple[str, ...]], + vocabulary: t.Sequence[str], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - merges: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + merges: t.Optional[t.Sequence[str]] = None, pretty: t.Optional[bool] = None, + scores: t.Optional[t.Sequence[float]] = None, ) -> ObjectApiResponse[t.Any]: """ Creates a trained model vocabulary @@ -2497,6 +2407,7 @@ async def put_trained_model_vocabulary( :param model_id: The unique identifier of the trained model. :param vocabulary: The model vocabulary, which must not be empty. :param merges: The optional model merges if required by the tokenizer. + :param scores: The optional vocabulary value scores if required by the tokenizer. """ if model_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'model_id'") @@ -2517,6 +2428,8 @@ async def put_trained_model_vocabulary( __body["merges"] = merges if pretty is not None: __query["pretty"] = pretty + if scores is not None: + __body["scores"] = scores __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "PUT", __path, params=__query, headers=__headers, body=__body @@ -2529,9 +2442,7 @@ async def reset_job( job_id: str, delete_user_annotations: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, wait_for_completion: t.Optional[bool] = None, @@ -2575,9 +2486,7 @@ async def start_data_frame_analytics( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -2621,9 +2530,7 @@ async def start_datafeed( datafeed_id: str, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, start: t.Optional[t.Union[str, t.Any]] = None, @@ -2676,10 +2583,9 @@ async def start_trained_model_deployment( *, model_id: str, cache_size: t.Optional[t.Union[int, str]] = None, + deployment_id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, number_of_allocations: t.Optional[int] = None, pretty: t.Optional[bool] = None, @@ -2701,6 +2607,7 @@ async def start_trained_model_deployment( :param cache_size: The inference cache size (in memory outside the JVM heap) per node for the model. The default value is the same size as the `model_size_bytes`. To disable the cache, `0b` can be provided. + :param deployment_id: A unique identifier for the deployment of the model. :param number_of_allocations: The number of model allocations on each node where the model is deployed. All allocations on a node share the same copy of the model in memory but use a separate set of threads to evaluate the model. @@ -2727,6 +2634,8 @@ async def start_trained_model_deployment( __query: t.Dict[str, t.Any] = {} if cache_size is not None: __query["cache_size"] = cache_size + if deployment_id is not None: + __query["deployment_id"] = deployment_id if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -2759,9 +2668,7 @@ async def stop_data_frame_analytics( id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2819,9 +2726,7 @@ async def stop_datafeed( datafeed_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2876,9 +2781,7 @@ async def stop_trained_model_deployment( model_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2930,9 +2833,7 @@ async def update_data_frame_analytics( allow_lazy_start: t.Optional[bool] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_num_threads: t.Optional[int] = None, model_memory_limit: t.Optional[str] = None, @@ -2999,31 +2900,19 @@ async def update_datafeed( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indexes: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - indices: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + indexes: t.Optional[t.Sequence[str]] = None, + indices: t.Optional[t.Sequence[str]] = None, indices_options: t.Optional[t.Mapping[str, t.Any]] = None, job_id: t.Optional[str] = None, max_empty_searches: t.Optional[int] = None, @@ -3180,15 +3069,13 @@ async def update_filter( self, *, filter_id: str, - add_items: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + add_items: t.Optional[t.Sequence[str]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, - remove_items: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + remove_items: t.Optional[t.Sequence[str]] = None, ) -> ObjectApiResponse[t.Any]: """ Updates the description of a filter, adds items, or removes items. @@ -3236,20 +3123,14 @@ async def update_job( background_persist_interval: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, - categorization_filters: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, + categorization_filters: t.Optional[t.Sequence[str]] = None, custom_settings: t.Optional[t.Mapping[str, t.Any]] = None, daily_model_snapshot_retention_after_days: t.Optional[int] = None, description: t.Optional[str] = None, - detectors: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + detectors: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - groups: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + groups: t.Optional[t.Sequence[str]] = None, human: t.Optional[bool] = None, model_plot_config: t.Optional[t.Mapping[str, t.Any]] = None, model_prune_window: t.Optional[ diff --git a/elasticsearch_serverless/_async/client/query_ruleset.py b/elasticsearch_serverless/_async/client/query_ruleset.py index 2edfc60..a2e68e5 100644 --- a/elasticsearch_serverless/_async/client/query_ruleset.py +++ b/elasticsearch_serverless/_async/client/query_ruleset.py @@ -24,15 +24,14 @@ class QueryRulesetClient(NamespacedClient): + @_rewrite_parameters() async def delete( self, *, ruleset_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -66,9 +65,7 @@ async def get( *, ruleset_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -103,9 +100,7 @@ async def list( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -145,13 +140,9 @@ async def put( self, *, ruleset_id: str, - rules: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + rules: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_async/client/search_application.py b/elasticsearch_serverless/_async/client/search_application.py index 7defb82..c3b8e38 100644 --- a/elasticsearch_serverless/_async/client/search_application.py +++ b/elasticsearch_serverless/_async/client/search_application.py @@ -24,22 +24,21 @@ class SearchApplicationClient(NamespacedClient): + @_rewrite_parameters() async def delete( self, *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Deletes a search application. - ``_ + ``_ :param name: The name of the search application to delete """ @@ -66,9 +65,7 @@ async def delete_behavioral_analytics( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -102,9 +99,7 @@ async def get( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -136,11 +131,9 @@ async def get( async def get_behavioral_analytics( self, *, - name: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + name: t.Optional[t.Sequence[str]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -176,9 +169,7 @@ async def list( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -225,9 +216,7 @@ async def put( search_application: t.Mapping[str, t.Any], create: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -269,9 +258,7 @@ async def put_behavioral_analytics( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -308,9 +295,7 @@ async def search( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, params: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_async/client/security.py b/elasticsearch_serverless/_async/client/security.py index 9871b2b..aa46058 100644 --- a/elasticsearch_serverless/_async/client/security.py +++ b/elasticsearch_serverless/_async/client/security.py @@ -24,14 +24,13 @@ class SecurityClient(NamespacedClient): + @_rewrite_parameters() async def authenticate( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -64,9 +63,7 @@ async def create_api_key( *, error_trace: t.Optional[bool] = None, expiration: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, metadata: t.Optional[t.Mapping[str, t.Any]] = None, name: t.Optional[str] = None, @@ -129,10 +126,9 @@ async def create_api_key( async def get_api_key( self, *, + active_only: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, id: t.Optional[str] = None, name: t.Optional[str] = None, @@ -141,12 +137,18 @@ async def get_api_key( realm_name: t.Optional[str] = None, username: t.Optional[str] = None, with_limited_by: t.Optional[bool] = None, + with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Retrieves information for one or more API keys. ``_ + :param active_only: A boolean flag that can be used to query API keys that are + currently active. An API key is considered active if it is neither invalidated, + nor expired at query time. You can specify this together with other parameters + such as `owner` or `name`. If `active_only` is false, the response will include + both active and inactive (expired or invalidated) keys. :param id: An API key id. This parameter cannot be used with any of `name`, `realm_name` or `username`. :param name: An API key name. This parameter cannot be used with any of `id`, @@ -162,9 +164,13 @@ async def get_api_key( :param with_limited_by: Return the snapshot of the owner user's role descriptors associated with the API key. An API key's actual permission is the intersection of its assigned role descriptors and the owner user's role descriptors. + :param with_profile_uid: Determines whether to also retrieve the profile uid, + for the API key owner principal, if it exists. """ __path = "/_security/api_key" __query: t.Dict[str, t.Any] = {} + if active_only is not None: + __query["active_only"] = active_only if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -185,6 +191,8 @@ async def get_api_key( __query["username"] = username if with_limited_by is not None: __query["with_limited_by"] = with_limited_by + if with_profile_uid is not None: + __query["with_profile_uid"] = with_profile_uid __headers = {"accept": "application/json"} return await self.perform_request( # type: ignore[return-value] "GET", __path, params=__query, headers=__headers @@ -197,34 +205,19 @@ async def has_privileges( self, *, user: t.Optional[str] = None, - application: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + application: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, cluster: t.Optional[ - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'grant_api_key', 'manage', 'manage_api_key', 'manage_ccr', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'read_ccr', 'read_ilm', 'read_pipeline', 'read_slm', 'transport_client']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'grant_api_key', 'manage', 'manage_api_key', 'manage_ccr', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'read_ccr', 'read_ilm', 'read_pipeline', 'read_slm', 'transport_client']", - str, - ], - ..., - ], + t.Sequence[ + t.Union[ + "t.Literal['all', 'cancel_task', 'create_snapshot', 'grant_api_key', 'manage', 'manage_api_key', 'manage_ccr', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'read_ccr', 'read_ilm', 'read_pipeline', 'read_slm', 'transport_client']", + str, + ] ] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - index: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + index: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -269,12 +262,10 @@ async def invalidate_api_key( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, id: t.Optional[str] = None, - ids: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + ids: t.Optional[t.Sequence[str]] = None, name: t.Optional[str] = None, owner: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -335,44 +326,55 @@ async def invalidate_api_key( async def query_api_keys( self, *, + aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, + aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] ] = None, size: t.Optional[int] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, + typed_keys: t.Optional[bool] = None, with_limited_by: t.Optional[bool] = None, + with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Retrieves information for API keys using a subset of query DSL ``_ + :param aggregations: Any aggregations to run over the corpus of returned API + keys. Aggregations and queries work together. Aggregations are computed only + on the API keys that match the query. This supports only a subset of aggregation + types, namely: `terms`, `range`, `date_range`, `missing`, `cardinality`, + `value_count`, `composite`, `filter`, and `filters`. Additionally, aggregations + only run over the same subset of fields that query works with. + :param aggs: Any aggregations to run over the corpus of returned API keys. Aggregations + and queries work together. Aggregations are computed only on the API keys + that match the query. This supports only a subset of aggregation types, namely: + `terms`, `range`, `date_range`, `missing`, `cardinality`, `value_count`, + `composite`, `filter`, and `filters`. Additionally, aggregations only run + over the same subset of fields that query works with. :param from_: Starting document offset. By default, you cannot page through more than 10,000 hits using the from and size parameters. To page through more hits, use the `search_after` parameter. - :param query: A query to filter which API keys to return. The query supports - a subset of query types, including `match_all`, `bool`, `term`, `terms`, - `ids`, `prefix`, `wildcard`, and `range`. You can query all public information - associated with an API key. + :param query: A query to filter which API keys to return. If the query parameter + is missing, it is equivalent to a `match_all` query. The query supports a + subset of query types, including `match_all`, `bool`, `term`, `terms`, `match`, + `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. + You can query the following public information associated with an API key: + `id`, `type`, `name`, `creation`, `expiration`, `invalidated`, `invalidation`, + `username`, `realm`, and `metadata`. :param search_after: Search after definition :param size: The number of hits to return. By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through @@ -380,13 +382,17 @@ async def query_api_keys( :param sort: Other than `id`, all public fields of an API key are eligible for sorting. In addition, sort can also be applied to the `_doc` field to sort by index order. + :param typed_keys: Determines whether aggregation names are prefixed by their + respective types in the response. :param with_limited_by: Return the snapshot of the owner user's role descriptors associated with the API key. An API key's actual permission is the intersection of its assigned role descriptors and the owner user's role descriptors. + :param with_profile_uid: Determines whether to also retrieve the profile uid, + for the API key owner principal, if it exists. """ __path = "/_security/_query/api_key" - __query: t.Dict[str, t.Any] = {} __body: t.Dict[str, t.Any] = {} + __query: t.Dict[str, t.Any] = {} # The 'sort' parameter with a colon can't be encoded to the body. if sort is not None and ( (isinstance(sort, str) and ":" in sort) @@ -398,6 +404,10 @@ async def query_api_keys( ): __query["sort"] = sort sort = None + if aggregations is not None: + __body["aggregations"] = aggregations + if aggs is not None: + __body["aggs"] = aggs if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -416,8 +426,12 @@ async def query_api_keys( __body["size"] = size if sort is not None: __body["sort"] = sort + if typed_keys is not None: + __query["typed_keys"] = typed_keys if with_limited_by is not None: __query["with_limited_by"] = with_limited_by + if with_profile_uid is not None: + __query["with_profile_uid"] = with_profile_uid if not __body: __body = None # type: ignore[assignment] __headers = {"accept": "application/json"} @@ -435,9 +449,8 @@ async def update_api_key( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + expiration: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, metadata: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -449,6 +462,7 @@ async def update_api_key( ``_ :param id: The ID of the API key to update. + :param expiration: Expiration time for the API key. :param metadata: Arbitrary metadata that you want to associate with the API key. It supports nested data structure. Within the metadata object, keys beginning with _ are reserved for system usage. @@ -468,6 +482,8 @@ async def update_api_key( __body: t.Dict[str, t.Any] = {} if error_trace is not None: __query["error_trace"] = error_trace + if expiration is not None: + __body["expiration"] = expiration if filter_path is not None: __query["filter_path"] = filter_path if human is not None: diff --git a/elasticsearch_serverless/_async/client/sql.py b/elasticsearch_serverless/_async/client/sql.py index b5134d7..e869e2b 100644 --- a/elasticsearch_serverless/_async/client/sql.py +++ b/elasticsearch_serverless/_async/client/sql.py @@ -24,6 +24,7 @@ class SqlClient(NamespacedClient): + @_rewrite_parameters( body_fields=True, ) @@ -32,9 +33,7 @@ async def clear_cursor( *, cursor: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -71,9 +70,7 @@ async def delete_async( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -109,9 +106,7 @@ async def get_async( id: str, delimiter: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -168,9 +163,7 @@ async def get_async_status( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -213,9 +206,7 @@ async def query( fetch_size: t.Optional[int] = None, field_multi_value_leniency: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, index_using_frozen: t.Optional[bool] = None, @@ -330,9 +321,7 @@ async def translate( error_trace: t.Optional[bool] = None, fetch_size: t.Optional[int] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, time_zone: t.Optional[str] = None, diff --git a/elasticsearch_serverless/_async/client/synonyms.py b/elasticsearch_serverless/_async/client/synonyms.py index b6e7a0d..c685d79 100644 --- a/elasticsearch_serverless/_async/client/synonyms.py +++ b/elasticsearch_serverless/_async/client/synonyms.py @@ -24,15 +24,14 @@ class SynonymsClient(NamespacedClient): + @_rewrite_parameters() async def delete_synonym( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -67,9 +66,7 @@ async def delete_synonym_rule( set_id: str, rule_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -108,9 +105,7 @@ async def get_synonym( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -153,9 +148,7 @@ async def get_synonym_rule( set_id: str, rule_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -193,9 +186,7 @@ async def get_synonyms_sets( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -235,13 +226,9 @@ async def put_synonym( self, *, id: str, - synonyms_set: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + synonyms_set: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -283,11 +270,9 @@ async def put_synonym_rule( *, set_id: str, rule_id: str, - synonyms: t.Union[t.List[str], t.Tuple[str, ...]], + synonyms: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_async/client/tasks.py b/elasticsearch_serverless/_async/client/tasks.py index 68c5ac5..c7d9f99 100644 --- a/elasticsearch_serverless/_async/client/tasks.py +++ b/elasticsearch_serverless/_async/client/tasks.py @@ -24,15 +24,14 @@ class TasksClient(NamespacedClient): + @_rewrite_parameters() async def get( self, *, task_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, diff --git a/elasticsearch_serverless/_async/client/transform.py b/elasticsearch_serverless/_async/client/transform.py index d72bea2..a7cafbb 100644 --- a/elasticsearch_serverless/_async/client/transform.py +++ b/elasticsearch_serverless/_async/client/transform.py @@ -24,15 +24,15 @@ class TransformClient(NamespacedClient): + @_rewrite_parameters() async def delete_transform( self, *, transform_id: str, + delete_dest_index: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -44,6 +44,9 @@ async def delete_transform( ``_ :param transform_id: Identifier for the transform. + :param delete_dest_index: If this value is true, the destination index is deleted + together with the transform. If false, the destination index will not be + deleted :param force: If this value is false, the transform must be stopped before it can be deleted. If true, the transform is deleted regardless of its current state. @@ -54,6 +57,8 @@ async def delete_transform( raise ValueError("Empty value passed for parameter 'transform_id'") __path = f"/_transform/{_quote(transform_id)}" __query: t.Dict[str, t.Any] = {} + if delete_dest_index is not None: + __query["delete_dest_index"] = delete_dest_index if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -77,15 +82,11 @@ async def delete_transform( async def get_transform( self, *, - transform_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + transform_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -142,12 +143,10 @@ async def get_transform( async def get_transform_stats( self, *, - transform_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + transform_id: t.Union[str, t.Sequence[str]], allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -206,9 +205,7 @@ async def preview_transform( description: t.Optional[str] = None, dest: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, latest: t.Optional[t.Mapping[str, t.Any]] = None, @@ -303,9 +300,7 @@ async def put_transform( defer_validation: t.Optional[bool] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, latest: t.Optional[t.Mapping[str, t.Any]] = None, @@ -404,9 +399,7 @@ async def reset_transform( *, transform_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -448,9 +441,7 @@ async def schedule_now_transform( *, transform_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -490,9 +481,7 @@ async def start_transform( *, transform_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[str] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -538,9 +527,7 @@ async def stop_transform( transform_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -614,9 +601,7 @@ async def update_transform( description: t.Optional[str] = None, dest: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, meta: t.Optional[t.Mapping[str, t.Any]] = None, diff --git a/elasticsearch_serverless/_sync/client/__init__.py b/elasticsearch_serverless/_sync/client/__init__.py index 453df38..207026f 100644 --- a/elasticsearch_serverless/_sync/client/__init__.py +++ b/elasticsearch_serverless/_sync/client/__init__.py @@ -41,6 +41,7 @@ from .eql import EqlClient from .graph import GraphClient from .indices import IndicesClient +from .inference import InferenceClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient @@ -279,6 +280,7 @@ def __init__( self.cat = CatClient(self) self.cluster = ClusterClient(self) self.indices = IndicesClient(self) + self.inference = InferenceClient(self) self.ingest = IngestClient(self) self.tasks = TasksClient(self) @@ -440,14 +442,10 @@ def ping( def bulk( self, *, - operations: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + operations: t.Sequence[t.Mapping[str, t.Any]], index: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pipeline: t.Optional[str] = None, pretty: t.Optional[bool] = None, @@ -456,15 +454,9 @@ def bulk( ] = None, require_alias: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, wait_for_active_shards: t.Optional[ t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]] @@ -550,14 +542,10 @@ def clear_scroll( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, - scroll_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + scroll_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, ) -> ObjectApiResponse[t.Any]: """ Explicitly clears the search context for a scroll. @@ -596,9 +584,7 @@ def close_point_in_time( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -639,7 +625,7 @@ def close_point_in_time( def count( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, analyzer: t.Optional[str] = None, @@ -648,25 +634,13 @@ def count( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -784,9 +758,7 @@ def create( id: str, document: t.Mapping[str, t.Any], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pipeline: t.Optional[str] = None, pretty: t.Optional[bool] = None, @@ -879,9 +851,7 @@ def delete( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_primary_term: t.Optional[int] = None, if_seq_no: t.Optional[int] = None, @@ -966,7 +936,7 @@ def delete( def delete_by_query( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, analyzer: t.Optional[str] = None, @@ -976,25 +946,13 @@ def delete_by_query( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -1018,8 +976,8 @@ def delete_by_query( ] = None, slice: t.Optional[t.Mapping[str, t.Any]] = None, slices: t.Optional[t.Union[int, t.Union["t.Literal['auto']", str]]] = None, - sort: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + sort: t.Optional[t.Sequence[str]] = None, + stats: t.Optional[t.Sequence[str]] = None, terminate_after: t.Optional[int] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, version: t.Optional[bool] = None, @@ -1193,9 +1151,7 @@ def delete_script( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -1249,27 +1205,17 @@ def exists( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1355,24 +1301,16 @@ def exists_source( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1458,9 +1396,7 @@ def explain( default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, df: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, lenient: t.Optional[bool] = None, preference: t.Optional[str] = None, @@ -1468,18 +1404,10 @@ def explain( q: t.Optional[str] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, ) -> ObjectApiResponse[t.Any]: """ Returns information about why a specific matches (or doesn't match) a query. @@ -1568,41 +1496,28 @@ def explain( def field_caps( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, filters: t.Optional[str] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, + include_empty_fields: t.Optional[bool] = None, include_unmapped: t.Optional[bool] = None, index_filter: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, runtime_mappings: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, - types: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + types: t.Optional[t.Sequence[str]] = None, ) -> ObjectApiResponse[t.Any]: """ Returns the information about the capabilities of fields among multiple indices. @@ -1626,6 +1541,7 @@ def field_caps( :param filters: An optional set of filters: can include +metadata,-metadata,-nested,-multifield,-parent :param ignore_unavailable: If `true`, missing or closed indices are not included in the response. + :param include_empty_fields: If false, empty fields are not included in the response. :param include_unmapped: If true, unmapped fields are included in the response. :param index_filter: Allows to filter indices if the provided query rewrites to match_none on every shard. @@ -1658,6 +1574,8 @@ def field_caps( __query["human"] = human if ignore_unavailable is not None: __query["ignore_unavailable"] = ignore_unavailable + if include_empty_fields is not None: + __query["include_empty_fields"] = include_empty_fields if include_unmapped is not None: __query["include_unmapped"] = include_unmapped if index_filter is not None: @@ -1690,27 +1608,18 @@ def get( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + force_synthetic_source: t.Optional[bool] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1723,6 +1632,10 @@ def get( :param index: Name of the index that contains the document. :param id: Unique identifier of the document. + :param force_synthetic_source: Should this request force synthetic _source? Use + this to test if the mapping supports synthetic _source and to get a sense + of the worst case performance. Fetches with this enabled will be slower the + enabling synthetic source natively in the index. :param preference: Specifies the node or shard the operation should be performed on. Random by default. :param realtime: If `true`, the request is real-time as opposed to near-real-time. @@ -1753,6 +1666,8 @@ def get( __query["error_trace"] = error_trace if filter_path is not None: __query["filter_path"] = filter_path + if force_synthetic_source is not None: + __query["force_synthetic_source"] = force_synthetic_source if human is not None: __query["human"] = human if preference is not None: @@ -1788,9 +1703,7 @@ def get_script( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -1837,27 +1750,17 @@ def get_source( index: str, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, version: t.Optional[int] = None, version_type: t.Optional[ t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str] @@ -1937,9 +1840,7 @@ def index( document: t.Mapping[str, t.Any], id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_primary_term: t.Optional[int] = None, if_seq_no: t.Optional[int] = None, @@ -2052,9 +1953,7 @@ def info( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -2090,32 +1989,21 @@ def mget( self, *, index: t.Optional[str] = None, - docs: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docs: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + force_synthetic_source: t.Optional[bool] = None, human: t.Optional[bool] = None, - ids: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + ids: t.Optional[t.Union[str, t.Sequence[str]]] = None, preference: t.Optional[str] = None, pretty: t.Optional[bool] = None, realtime: t.Optional[bool] = None, refresh: t.Optional[bool] = None, routing: t.Optional[str] = None, - source: t.Optional[ - t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, ) -> ObjectApiResponse[t.Any]: """ Allows to get multiple documents in one request. @@ -2126,6 +2014,10 @@ def mget( or when a document in the `docs` array does not specify an index. :param docs: The documents you want to retrieve. Required if no index is specified in the request URI. + :param force_synthetic_source: Should this request force synthetic _source? Use + this to test if the mapping supports synthetic _source and to get a sense + of the worst case performance. Fetches with this enabled will be slower the + enabling synthetic source natively in the index. :param ids: The IDs of the documents you want to retrieve. Allowed when the index is specified in the request URI. :param preference: Specifies the node or shard the operation should be performed @@ -2159,6 +2051,8 @@ def mget( __query["error_trace"] = error_trace if filter_path is not None: __query["filter_path"] = filter_path + if force_synthetic_source is not None: + __query["force_synthetic_source"] = force_synthetic_source if human is not None: __query["human"] = human if ids is not None: @@ -2192,34 +2086,20 @@ def mget( def msearch( self, *, - searches: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + searches: t.Sequence[t.Mapping[str, t.Any]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, ccs_minimize_roundtrips: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -2329,15 +2209,11 @@ def msearch( def msearch_template( self, *, - search_templates: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + search_templates: t.Sequence[t.Mapping[str, t.Any]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, ccs_minimize_roundtrips: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_concurrent_searches: t.Optional[int] = None, pretty: t.Optional[bool] = None, @@ -2408,19 +2284,13 @@ def mtermvectors( self, *, index: t.Optional[str] = None, - docs: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docs: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, field_statistics: t.Optional[bool] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - ids: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + ids: t.Optional[t.Sequence[str]] = None, offsets: t.Optional[bool] = None, payloads: t.Optional[bool] = None, positions: t.Optional[bool] = None, @@ -2513,30 +2383,18 @@ def mtermvectors( def open_point_in_time( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], keep_alive: t.Union["t.Literal[-1]", "t.Literal[0]", str], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, preference: t.Optional[str] = None, @@ -2600,9 +2458,7 @@ def put_script( script: t.Mapping[str, t.Any], context: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -2665,33 +2521,19 @@ def put_script( def rank_eval( self, *, - requests: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + requests: t.Sequence[t.Mapping[str, t.Any]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, metric: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2765,9 +2607,7 @@ def reindex( source: t.Mapping[str, t.Any], conflicts: t.Optional[t.Union["t.Literal['abort', 'proceed']", str]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_docs: t.Optional[int] = None, pretty: t.Optional[bool] = None, @@ -2873,9 +2713,7 @@ def render_search_template( id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, file: t.Optional[str] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, params: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -2933,9 +2771,7 @@ def scripts_painless_execute( context: t.Optional[str] = None, context_setup: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, script: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2983,9 +2819,7 @@ def scroll( *, scroll_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, rest_total_hits_as_int: t.Optional[bool] = None, @@ -3042,7 +2876,7 @@ def scroll( def search( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, allow_no_indices: t.Optional[bool] = None, @@ -3054,51 +2888,29 @@ def search( collapse: t.Optional[t.Mapping[str, t.Any]] = None, default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, df: t.Optional[str] = None, - docvalue_fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docvalue_fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, ext: t.Optional[t.Mapping[str, t.Any]] = None, - fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + force_synthetic_source: t.Optional[bool] = None, from_: t.Optional[int] = None, highlight: t.Optional[t.Mapping[str, t.Any]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indices_boost: t.Optional[ - t.Union[t.List[t.Mapping[str, float]], t.Tuple[t.Mapping[str, float], ...]] - ] = None, + indices_boost: t.Optional[t.Sequence[t.Mapping[str, float]]] = None, knn: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, lenient: t.Optional[bool] = None, max_concurrent_shard_requests: t.Optional[int] = None, @@ -3115,12 +2927,7 @@ def search( rank: t.Optional[t.Mapping[str, t.Any]] = None, request_cache: t.Optional[bool] = None, rescore: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, rest_total_hits_as_int: t.Optional[bool] = None, routing: t.Optional[str] = None, @@ -3128,10 +2935,7 @@ def search( script_fields: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, scroll: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] ] = None, search_type: t.Optional[ t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] @@ -3141,24 +2945,15 @@ def search( slice: t.Optional[t.Mapping[str, t.Any]] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stats: t.Optional[t.Sequence[str]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, suggest: t.Optional[t.Mapping[str, t.Any]] = None, suggest_field: t.Optional[str] = None, suggest_mode: t.Optional[ @@ -3223,6 +3018,10 @@ def search( :param fields: Array of wildcard (`*`) patterns. The request returns values for field names matching these patterns in the `hits.fields` property of the response. + :param force_synthetic_source: Should this request force synthetic _source? Use + this to test if the mapping supports synthetic _source and to get a sense + of the worst case performance. Fetches with this enabled will be slower the + enabling synthetic source natively in the index. :param from_: Starting document offset. Needs to be non-negative. By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through more hits, use the `search_after` parameter. @@ -3413,6 +3212,8 @@ def search( __body["fields"] = fields if filter_path is not None: __query["filter_path"] = filter_path + if force_synthetic_source is not None: + __query["force_synthetic_source"] = force_synthetic_source if from_ is not None: __body["from"] = from_ if highlight is not None: @@ -3526,7 +3327,7 @@ def search( def search_mvt( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], field: str, zoom: int, x: int, @@ -3536,12 +3337,8 @@ def search_mvt( error_trace: t.Optional[bool] = None, exact_bounds: t.Optional[bool] = None, extent: t.Optional[int] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, grid_agg: t.Optional[t.Union["t.Literal['geohex', 'geotile']", str]] = None, grid_precision: t.Optional[int] = None, grid_type: t.Optional[ @@ -3554,11 +3351,8 @@ def search_mvt( size: t.Optional[int] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, track_total_hits: t.Optional[t.Union[bool, int]] = None, @@ -3689,32 +3483,20 @@ def search_mvt( def search_template( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, ccs_minimize_roundtrips: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, id: t.Optional[str] = None, ignore_throttled: t.Optional[bool] = None, @@ -3836,9 +3618,7 @@ def terms_enum( field: str, case_insensitive: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, index_filter: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -3920,13 +3700,9 @@ def termvectors( doc: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, field_statistics: t.Optional[bool] = None, - fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, offsets: t.Optional[bool] = None, payloads: t.Optional[bool] = None, @@ -4043,9 +3819,7 @@ def update( doc: t.Optional[t.Mapping[str, t.Any]] = None, doc_as_upsert: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_primary_term: t.Optional[int] = None, if_seq_no: t.Optional[int] = None, @@ -4060,12 +3834,8 @@ def update( script: t.Optional[t.Mapping[str, t.Any]] = None, scripted_upsert: t.Optional[bool] = None, source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, upsert: t.Optional[t.Mapping[str, t.Any]] = None, wait_for_active_shards: t.Optional[ @@ -4176,7 +3946,7 @@ def update( def update_by_query( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, analyzer: t.Optional[str] = None, @@ -4186,25 +3956,13 @@ def update_by_query( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -4229,8 +3987,8 @@ def update_by_query( ] = None, slice: t.Optional[t.Mapping[str, t.Any]] = None, slices: t.Optional[t.Union[int, t.Union["t.Literal['auto']", str]]] = None, - sort: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + sort: t.Optional[t.Sequence[str]] = None, + stats: t.Optional[t.Sequence[str]] = None, terminate_after: t.Optional[int] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, version: t.Optional[bool] = None, @@ -4241,8 +3999,9 @@ def update_by_query( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs an update on every document in the index without changing the source, - for example to pick up a mapping change. + Updates documents that match the specified query. If no query is specified, performs + an update on every document in the index without changing the source, for example + to pick up a mapping change. ``_ diff --git a/elasticsearch_serverless/_sync/client/async_search.py b/elasticsearch_serverless/_sync/client/async_search.py index 6934e6a..98e6e0a 100644 --- a/elasticsearch_serverless/_sync/client/async_search.py +++ b/elasticsearch_serverless/_sync/client/async_search.py @@ -24,15 +24,14 @@ class AsyncSearchClient(NamespacedClient): + @_rewrite_parameters() def delete( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -67,9 +66,7 @@ def get( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, pretty: t.Optional[bool] = None, @@ -129,9 +126,7 @@ def status( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -172,7 +167,7 @@ def status( def submit( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, allow_no_indices: t.Optional[bool] = None, @@ -184,53 +179,30 @@ def submit( collapse: t.Optional[t.Mapping[str, t.Any]] = None, default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None, df: t.Optional[str] = None, - docvalue_fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docvalue_fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, ext: t.Optional[t.Mapping[str, t.Any]] = None, - fields: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, highlight: t.Optional[t.Mapping[str, t.Any]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indices_boost: t.Optional[ - t.Union[t.List[t.Mapping[str, float]], t.Tuple[t.Mapping[str, float], ...]] - ] = None, + indices_boost: t.Optional[t.Sequence[t.Mapping[str, float]]] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, keep_on_completion: t.Optional[bool] = None, knn: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, lenient: t.Optional[bool] = None, max_concurrent_shard_requests: t.Optional[int] = None, @@ -246,12 +218,7 @@ def submit( query: t.Optional[t.Mapping[str, t.Any]] = None, request_cache: t.Optional[bool] = None, rescore: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, rest_total_hits_as_int: t.Optional[bool] = None, routing: t.Optional[str] = None, @@ -259,10 +226,7 @@ def submit( script_fields: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, scroll: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] ] = None, search_type: t.Optional[ t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str] @@ -272,24 +236,15 @@ def submit( slice: t.Optional[t.Mapping[str, t.Any]] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None, - source_excludes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - source_includes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - stored_fields: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + stats: t.Optional[t.Sequence[str]] = None, + stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None, suggest: t.Optional[t.Mapping[str, t.Any]] = None, suggest_field: t.Optional[str] = None, suggest_mode: t.Optional[ diff --git a/elasticsearch_serverless/_sync/client/cat.py b/elasticsearch_serverless/_sync/client/cat.py index 5edb42a..f1a2ef5 100644 --- a/elasticsearch_serverless/_sync/client/cat.py +++ b/elasticsearch_serverless/_sync/client/cat.py @@ -24,35 +24,24 @@ class CatClient(NamespacedClient): + @_rewrite_parameters() def aliases( self, *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -60,7 +49,7 @@ def aliases( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ @@ -128,11 +117,9 @@ def component_templates( *, name: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -140,7 +127,7 @@ def component_templates( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ @@ -201,13 +188,11 @@ def component_templates( def count( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -215,7 +200,7 @@ def count( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ @@ -279,11 +264,9 @@ def help( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -291,7 +274,7 @@ def help( t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, pretty: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, v: t.Optional[bool] = None, ) -> TextApiResponse: """ @@ -347,34 +330,22 @@ def help( def indices( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, bytes: t.Optional[ t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, - h: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + h: t.Optional[t.Union[str, t.Sequence[str]]] = None, health: t.Optional[t.Union["t.Literal['green', 'red', 'yellow']", str]] = None, help: t.Optional[bool] = None, human: t.Optional[bool] = None, @@ -385,7 +356,7 @@ def indices( ] = None, pretty: t.Optional[bool] = None, pri: t.Optional[bool] = None, - s: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + s: t.Optional[t.Union[str, t.Sequence[str]]] = None, time: t.Optional[ t.Union["t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str] ] = None, @@ -477,31 +448,20 @@ def ml_data_frame_analytics( t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -513,25 +473,16 @@ def ml_data_frame_analytics( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", - str, - ], - ..., - ], - ], ] ] = None, time: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -606,31 +557,20 @@ def ml_datafeeds( datafeed_id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", + str, + ] + ], t.Union[ "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -642,25 +582,16 @@ def ml_datafeeds( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", + str, + ] + ], t.Union[ "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", - str, - ], - ..., - ], - ], ] ] = None, time: t.Optional[ @@ -743,31 +674,20 @@ def ml_jobs( t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -779,25 +699,16 @@ def ml_jobs( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", + str, + ] + ], t.Union[ "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", - str, - ], - ..., - ], - ], ] ] = None, time: t.Optional[ @@ -884,32 +795,21 @@ def ml_trained_models( t.Union["t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, from_: t.Optional[int] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", + str, + ] + ], t.Union[ "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -921,25 +821,16 @@ def ml_trained_models( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", + str, + ] + ], t.Union[ "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", - str, - ], - ..., - ], - ], ] ] = None, size: t.Optional[int] = None, @@ -1024,32 +915,21 @@ def transforms( transform_id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, from_: t.Optional[int] = None, h: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", + str, + ] + ], t.Union[ "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ], - ..., - ], - ], ] ] = None, help: t.Optional[bool] = None, @@ -1061,25 +941,16 @@ def transforms( pretty: t.Optional[bool] = None, s: t.Optional[ t.Union[ + t.Sequence[ + t.Union[ + "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", + str, + ] + ], t.Union[ "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str, ], - t.Union[ - t.List[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", - str, - ], - ..., - ], - ], ] ] = None, size: t.Optional[int] = None, diff --git a/elasticsearch_serverless/_sync/client/cluster.py b/elasticsearch_serverless/_sync/client/cluster.py index add324b..1830479 100644 --- a/elasticsearch_serverless/_sync/client/cluster.py +++ b/elasticsearch_serverless/_sync/client/cluster.py @@ -24,15 +24,14 @@ class ClusterClient(NamespacedClient): + @_rewrite_parameters() def delete_component_template( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -78,11 +77,9 @@ def delete_component_template( def exists_component_template( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -131,9 +128,7 @@ def get_component_template( *, name: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, @@ -190,29 +185,17 @@ def info( self, *, target: t.Union[ - t.Union[ - "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", str + t.Sequence[ + t.Union[ + "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", str + ] ], t.Union[ - t.List[ - t.Union[ - "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", - str, - ], - ..., - ], + "t.Literal['_all', 'http', 'ingest', 'script', 'thread_pool']", str ], ], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -253,9 +236,7 @@ def put_component_template( allow_auto_create: t.Optional[bool] = None, create: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] diff --git a/elasticsearch_serverless/_sync/client/enrich.py b/elasticsearch_serverless/_sync/client/enrich.py index 337f2e8..03bc828 100644 --- a/elasticsearch_serverless/_sync/client/enrich.py +++ b/elasticsearch_serverless/_sync/client/enrich.py @@ -24,15 +24,14 @@ class EnrichClient(NamespacedClient): + @_rewrite_parameters() def delete_policy( self, *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -66,9 +65,7 @@ def execute_policy( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, wait_for_completion: t.Optional[bool] = None, @@ -105,11 +102,9 @@ def execute_policy( def get_policy( self, *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -147,9 +142,7 @@ def put_policy( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, geo_match: t.Optional[t.Mapping[str, t.Any]] = None, human: t.Optional[bool] = None, match: t.Optional[t.Mapping[str, t.Any]] = None, @@ -197,9 +190,7 @@ def stats( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_sync/client/eql.py b/elasticsearch_serverless/_sync/client/eql.py index c6b314b..8ecf452 100644 --- a/elasticsearch_serverless/_sync/client/eql.py +++ b/elasticsearch_serverless/_sync/client/eql.py @@ -24,15 +24,14 @@ class EqlClient(NamespacedClient): + @_rewrite_parameters() def delete( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -69,9 +68,7 @@ def get( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, pretty: t.Optional[bool] = None, @@ -82,7 +79,7 @@ def get( """ Returns async results from previously executed Event Query Language (EQL) search - `< https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-eql-search-api.html>`_ + ``_ :param id: Identifier for the search. :param keep_alive: Period for which the search and its results are stored on @@ -119,9 +116,7 @@ def get_status( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -129,7 +124,7 @@ def get_status( Returns the status of a previously submitted async or stored Event Query Language (EQL) search - `< https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-eql-status-api.html>`_ + ``_ :param id: Identifier for the search. """ @@ -156,7 +151,7 @@ def get_status( def search( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], query: str, allow_no_indices: t.Optional[bool] = None, case_sensitive: t.Optional[bool] = None, @@ -164,42 +159,20 @@ def search( event_category_field: t.Optional[str] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, fetch_size: t.Optional[int] = None, fields: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, filter: t.Optional[ - t.Union[ - t.Mapping[str, t.Any], - t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, diff --git a/elasticsearch_serverless/_sync/client/graph.py b/elasticsearch_serverless/_sync/client/graph.py index b362069..f86353a 100644 --- a/elasticsearch_serverless/_sync/client/graph.py +++ b/elasticsearch_serverless/_sync/client/graph.py @@ -24,27 +24,24 @@ class GraphClient(NamespacedClient): + @_rewrite_parameters( body_fields=True, ) def explore( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], connections: t.Optional[t.Mapping[str, t.Any]] = None, controls: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, routing: t.Optional[str] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - vertices: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + vertices: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, ) -> ObjectApiResponse[t.Any]: """ Explore extracted and summarized information about the documents and terms in diff --git a/elasticsearch_serverless/_sync/client/indices.py b/elasticsearch_serverless/_sync/client/indices.py index f0ebdb2..92fa84e 100644 --- a/elasticsearch_serverless/_sync/client/indices.py +++ b/elasticsearch_serverless/_sync/client/indices.py @@ -24,6 +24,7 @@ class IndicesClient(NamespacedClient): + @_rewrite_parameters() def add_block( self, @@ -34,25 +35,13 @@ def add_block( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -115,29 +104,17 @@ def analyze( *, index: t.Optional[str] = None, analyzer: t.Optional[str] = None, - attributes: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - char_filter: t.Optional[ - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ] - ] = None, + attributes: t.Optional[t.Sequence[str]] = None, + char_filter: t.Optional[t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]] = None, error_trace: t.Optional[bool] = None, explain: t.Optional[bool] = None, field: t.Optional[str] = None, - filter: t.Optional[ - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ] - ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter: t.Optional[t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, normalizer: t.Optional[str] = None, pretty: t.Optional[bool] = None, - text: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + text: t.Optional[t.Union[str, t.Sequence[str]]] = None, tokenizer: t.Optional[t.Union[str, t.Mapping[str, t.Any]]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -217,9 +194,7 @@ def create( index: str, aliases: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, mappings: t.Optional[t.Mapping[str, t.Any]] = None, master_timeout: t.Optional[ @@ -291,9 +266,7 @@ def create_data_stream( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -333,25 +306,13 @@ def data_streams_stats( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -390,30 +351,18 @@ def data_streams_stats( def delete( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -477,12 +426,10 @@ def delete( def delete_alias( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -532,29 +479,17 @@ def delete_alias( def delete_data_lifecycle( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -601,29 +536,17 @@ def delete_data_lifecycle( def delete_data_stream( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -660,11 +583,9 @@ def delete_data_stream( def delete_index_template( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -675,7 +596,7 @@ def delete_index_template( """ Deletes an index template. - ``_ + ``_ :param name: Comma-separated list of index template names used to limit the request. Wildcard (*) expressions are supported. @@ -710,30 +631,18 @@ def delete_index_template( def exists( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -795,31 +704,19 @@ def exists( def exists_alias( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Union[str, t.Sequence[str]], + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -882,9 +779,7 @@ def exists_index_template( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -894,7 +789,7 @@ def exists_index_template( """ Returns information about whether a particular index template exists. - ``_ + ``_ :param name: Comma-separated list of index template names used to limit the request. Wildcard (*) expressions are supported. @@ -925,11 +820,9 @@ def exists_index_template( def explain_data_lifecycle( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -973,44 +866,26 @@ def explain_data_lifecycle( def get( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, features: t.Optional[ t.Union[ - t.Union["t.Literal['aliases', 'mappings', 'settings']", str], - t.Union[ - t.List[ - t.Union["t.Literal['aliases', 'mappings', 'settings']", str] - ], - t.Tuple[ - t.Union["t.Literal['aliases', 'mappings', 'settings']", str], - ..., - ], + t.Sequence[ + t.Union["t.Literal['aliases', 'mappings', 'settings']", str] ], + t.Union["t.Literal['aliases', 'mappings', 'settings']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -1086,31 +961,19 @@ def get( def get_alias( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -1172,29 +1035,17 @@ def get_alias( def get_data_lifecycle( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1237,29 +1088,17 @@ def get_data_lifecycle( def get_data_stream( self, *, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1305,9 +1144,7 @@ def get_index_template( *, name: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, @@ -1320,7 +1157,7 @@ def get_index_template( """ Returns an index template. - ``_ + ``_ :param name: Comma-separated list of index template names used to limit the request. Wildcard (*) expressions are supported. @@ -1364,30 +1201,18 @@ def get_index_template( def get_mapping( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, local: t.Optional[bool] = None, @@ -1451,31 +1276,19 @@ def get_mapping( def get_settings( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, - name: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, + name: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -1556,9 +1369,7 @@ def migrate_to_data_stream( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1592,13 +1403,9 @@ def migrate_to_data_stream( def modify_data_stream( self, *, - actions: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + actions: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1635,13 +1442,11 @@ def modify_data_stream( def put_alias( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], name: str, error_trace: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, index_routing: t.Optional[str] = None, is_write_index: t.Optional[bool] = None, @@ -1728,7 +1533,7 @@ def put_alias( def put_data_lifecycle( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], data_retention: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, @@ -1736,25 +1541,13 @@ def put_data_lifecycle( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -1825,17 +1618,13 @@ def put_index_template( self, *, name: str, - composed_of: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + composed_of: t.Optional[t.Sequence[str]] = None, create: t.Optional[bool] = None, data_stream: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - index_patterns: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + index_patterns: t.Optional[t.Union[str, t.Sequence[str]]] = None, meta: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, priority: t.Optional[int] = None, @@ -1845,7 +1634,7 @@ def put_index_template( """ Creates or updates an index template. - ``_ + ``_ :param name: Index or template name :param composed_of: An ordered list of component template names. Component templates @@ -1915,47 +1704,30 @@ def put_index_template( def put_mapping( self, *, - index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + index: t.Union[str, t.Sequence[str]], allow_no_indices: t.Optional[bool] = None, date_detection: t.Optional[bool] = None, dynamic: t.Optional[ t.Union["t.Literal['false', 'runtime', 'strict', 'true']", str] ] = None, - dynamic_date_formats: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, + dynamic_date_formats: t.Optional[t.Sequence[str]] = None, dynamic_templates: t.Optional[ t.Union[ t.Mapping[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Mapping[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Mapping[str, t.Mapping[str, t.Any]], ...], - ], + t.Sequence[t.Mapping[str, t.Mapping[str, t.Any]]], ] ] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, field_names: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, master_timeout: t.Optional[ @@ -2071,30 +1843,18 @@ def put_settings( self, *, settings: t.Mapping[str, t.Any], - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, @@ -2177,14 +1937,10 @@ def put_template( aliases: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, create: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, flat_settings: t.Optional[bool] = None, human: t.Optional[bool] = None, - index_patterns: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + index_patterns: t.Optional[t.Union[str, t.Sequence[str]]] = None, mappings: t.Optional[t.Mapping[str, t.Any]] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -2198,7 +1954,7 @@ def put_template( """ Creates or updates an index template. - ``_ + ``_ :param name: The name of the template :param aliases: Aliases for the index. @@ -2263,30 +2019,18 @@ def put_template( def refresh( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_indices: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2337,29 +2081,17 @@ def refresh( def resolve_index( self, *, - name: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + name: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -2407,9 +2139,7 @@ def rollover( conditions: t.Optional[t.Mapping[str, t.Any]] = None, dry_run: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, mappings: t.Optional[t.Mapping[str, t.Any]] = None, master_timeout: t.Optional[ @@ -2497,90 +2227,36 @@ def rollover( "POST", __path, params=__query, headers=__headers, body=__body ) - @_rewrite_parameters( - body_fields=True, - parameter_aliases={"_meta": "meta"}, - ) + @_rewrite_parameters() def simulate_index_template( self, *, name: str, - allow_auto_create: t.Optional[bool] = None, - composed_of: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - create: t.Optional[bool] = None, - data_stream: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, include_defaults: t.Optional[bool] = None, - index_patterns: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, - meta: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, - priority: t.Optional[int] = None, - template: t.Optional[t.Mapping[str, t.Any]] = None, - version: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ Simulate matching the given index name against the index templates in the system - ``_ + ``_ - :param name: Index or template name to simulate - :param allow_auto_create: This setting overrides the value of the `action.auto_create_index` - cluster setting. If set to `true` in a template, then indices can be automatically - created using that template even if auto-creation of indices is disabled - via `actions.auto_create_index`. If set to `false`, then indices or data - streams matching the template must always be explicitly created, and may - never be automatically created. - :param composed_of: An ordered list of component template names. Component templates - are merged in the order specified, meaning that the last component template - specified has the highest precedence. - :param create: If `true`, the template passed in the body is only used if no - existing templates match the same index patterns. If `false`, the simulation - uses the template with the highest priority. Note that the template is not - permanently added or updated in either case; it is only used for the simulation. - :param data_stream: If this object is included, the template is used to create - data streams and their backing indices. Supports an empty object. Data streams - require a matching index template with a `data_stream` object. + :param name: Name of the index to simulate :param include_defaults: If true, returns all relevant default configurations for the index template. - :param index_patterns: Array of wildcard (`*`) expressions used to match the - names of data streams and indices during creation. :param master_timeout: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. - :param meta: Optional user metadata about the index template. May have any contents. - This map is not automatically generated by Elasticsearch. - :param priority: Priority to determine index template precedence when a new data - stream or index is created. The index template with the highest priority - is chosen. If no priority is specified the template is treated as though - it is of priority 0 (lowest priority). This number is not automatically generated - by Elasticsearch. - :param template: Template to be applied. It may optionally include an `aliases`, - `mappings`, or `settings` configuration. - :param version: Version number used to manage index templates externally. This - number is not automatically generated by Elasticsearch. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'name'") __path = f"/_index_template/_simulate_index/{_quote(name)}" - __body: t.Dict[str, t.Any] = {} __query: t.Dict[str, t.Any] = {} - if allow_auto_create is not None: - __body["allow_auto_create"] = allow_auto_create - if composed_of is not None: - __body["composed_of"] = composed_of - if create is not None: - __query["create"] = create - if data_stream is not None: - __body["data_stream"] = data_stream if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -2589,90 +2265,130 @@ def simulate_index_template( __query["human"] = human if include_defaults is not None: __query["include_defaults"] = include_defaults - if index_patterns is not None: - __body["index_patterns"] = index_patterns if master_timeout is not None: __query["master_timeout"] = master_timeout - if meta is not None: - __body["_meta"] = meta if pretty is not None: __query["pretty"] = pretty - if priority is not None: - __body["priority"] = priority - if template is not None: - __body["template"] = template - if version is not None: - __body["version"] = version - if not __body: - __body = None # type: ignore[assignment] __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" return self.perform_request( # type: ignore[return-value] - "POST", __path, params=__query, headers=__headers, body=__body + "POST", __path, params=__query, headers=__headers ) @_rewrite_parameters( - body_name="template", + body_fields=True, + parameter_aliases={"_meta": "meta"}, ) def simulate_template( self, *, name: t.Optional[str] = None, + allow_auto_create: t.Optional[bool] = None, + composed_of: t.Optional[t.Sequence[str]] = None, create: t.Optional[bool] = None, + data_stream: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, + ignore_missing_component_templates: t.Optional[t.Sequence[str]] = None, include_defaults: t.Optional[bool] = None, + index_patterns: t.Optional[t.Union[str, t.Sequence[str]]] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, + meta: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, + priority: t.Optional[int] = None, template: t.Optional[t.Mapping[str, t.Any]] = None, + version: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ Simulate resolving the given template name or body - ``_ + ``_ :param name: Name of the index template to simulate. To test a template configuration before you add it to the cluster, omit this parameter and specify the template configuration in the request body. + :param allow_auto_create: This setting overrides the value of the `action.auto_create_index` + cluster setting. If set to `true` in a template, then indices can be automatically + created using that template even if auto-creation of indices is disabled + via `actions.auto_create_index`. If set to `false`, then indices or data + streams matching the template must always be explicitly created, and may + never be automatically created. + :param composed_of: An ordered list of component template names. Component templates + are merged in the order specified, meaning that the last component template + specified has the highest precedence. :param create: If true, the template passed in the body is only used if no existing templates match the same index patterns. If false, the simulation uses the template with the highest priority. Note that the template is not permanently added or updated in either case; it is only used for the simulation. + :param data_stream: If this object is included, the template is used to create + data streams and their backing indices. Supports an empty object. Data streams + require a matching index template with a `data_stream` object. + :param ignore_missing_component_templates: The configuration option ignore_missing_component_templates + can be used when an index template references a component template that might + not exist :param include_defaults: If true, returns all relevant default configurations for the index template. + :param index_patterns: Array of wildcard (`*`) expressions used to match the + names of data streams and indices during creation. :param master_timeout: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. - :param template: + :param meta: Optional user metadata about the index template. May have any contents. + This map is not automatically generated by Elasticsearch. + :param priority: Priority to determine index template precedence when a new data + stream or index is created. The index template with the highest priority + is chosen. If no priority is specified the template is treated as though + it is of priority 0 (lowest priority). This number is not automatically generated + by Elasticsearch. + :param template: Template to be applied. It may optionally include an `aliases`, + `mappings`, or `settings` configuration. + :param version: Version number used to manage index templates externally. This + number is not automatically generated by Elasticsearch. """ if name not in SKIP_IN_PATH: __path = f"/_index_template/_simulate/{_quote(name)}" else: __path = "/_index_template/_simulate" + __body: t.Dict[str, t.Any] = {} __query: t.Dict[str, t.Any] = {} + if allow_auto_create is not None: + __body["allow_auto_create"] = allow_auto_create + if composed_of is not None: + __body["composed_of"] = composed_of if create is not None: __query["create"] = create + if data_stream is not None: + __body["data_stream"] = data_stream if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: __query["filter_path"] = filter_path if human is not None: __query["human"] = human + if ignore_missing_component_templates is not None: + __body["ignore_missing_component_templates"] = ( + ignore_missing_component_templates + ) if include_defaults is not None: __query["include_defaults"] = include_defaults + if index_patterns is not None: + __body["index_patterns"] = index_patterns if master_timeout is not None: __query["master_timeout"] = master_timeout + if meta is not None: + __body["_meta"] = meta if pretty is not None: __query["pretty"] = pretty - __body = template + if priority is not None: + __body["priority"] = priority + if template is not None: + __body["template"] = template + if version is not None: + __body["version"] = version if not __body: - __body = None + __body = None # type: ignore[assignment] __headers = {"accept": "application/json"} if __body is not None: __headers["content-type"] = "application/json" @@ -2686,13 +2402,9 @@ def simulate_template( def update_aliases( self, *, - actions: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + actions: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -2740,7 +2452,7 @@ def update_aliases( def validate_query( self, *, - index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, all_shards: t.Optional[bool] = None, allow_no_indices: t.Optional[bool] = None, analyze_wildcard: t.Optional[bool] = None, @@ -2750,26 +2462,14 @@ def validate_query( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, explain: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, lenient: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_sync/client/inference.py b/elasticsearch_serverless/_sync/client/inference.py new file mode 100644 index 0000000..fd85a94 --- /dev/null +++ b/elasticsearch_serverless/_sync/client/inference.py @@ -0,0 +1,238 @@ +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import typing as t + +from elastic_transport import ObjectApiResponse + +from ._base import NamespacedClient +from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters + + +class InferenceClient(NamespacedClient): + + @_rewrite_parameters() + def delete_model( + self, + *, + inference_id: str, + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Delete model in the Inference API + + ``_ + + :param inference_id: The inference Id + :param task_type: The task type + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __query: t.Dict[str, t.Any] = {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + __headers = {"accept": "application/json"} + return self.perform_request( # type: ignore[return-value] + "DELETE", __path, params=__query, headers=__headers + ) + + @_rewrite_parameters() + def get_model( + self, + *, + inference_id: str, + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Get a model in the Inference API + + ``_ + + :param inference_id: The inference Id + :param task_type: The task type + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __query: t.Dict[str, t.Any] = {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + __headers = {"accept": "application/json"} + return self.perform_request( # type: ignore[return-value] + "GET", __path, params=__query, headers=__headers + ) + + @_rewrite_parameters( + body_fields=True, + ) + def inference( + self, + *, + inference_id: str, + input: t.Union[str, t.Sequence[str]], + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + query: t.Optional[str] = None, + task_settings: t.Optional[t.Any] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Perform inference on a model + + ``_ + + :param inference_id: The inference Id + :param input: Text input to the model. Either a string or an array of strings. + :param task_type: The task type + :param query: Query input, required for rerank task. Not required for other tasks. + :param task_settings: Optional task settings + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if input is None: + raise ValueError("Empty value passed for parameter 'input'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __body: t.Dict[str, t.Any] = {} + __query: t.Dict[str, t.Any] = {} + if input is not None: + __body["input"] = input + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if query is not None: + __body["query"] = query + if task_settings is not None: + __body["task_settings"] = task_settings + if not __body: + __body = None # type: ignore[assignment] + __headers = {"accept": "application/json"} + if __body is not None: + __headers["content-type"] = "application/json" + return self.perform_request( # type: ignore[return-value] + "POST", __path, params=__query, headers=__headers, body=__body + ) + + @_rewrite_parameters( + body_name="model_config", + ) + def put_model( + self, + *, + inference_id: str, + model_config: t.Mapping[str, t.Any], + task_type: t.Optional[ + t.Union[ + "t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']", + str, + ] + ] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Configure a model for use in the Inference API + + ``_ + + :param inference_id: The inference Id + :param model_config: + :param task_type: The task type + """ + if inference_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'inference_id'") + if model_config is None: + raise ValueError("Empty value passed for parameter 'model_config'") + if task_type not in SKIP_IN_PATH and inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(task_type)}/{_quote(inference_id)}" + elif inference_id not in SKIP_IN_PATH: + __path = f"/_inference/{_quote(inference_id)}" + else: + raise ValueError("Couldn't find a path for the given parameters") + __query: t.Dict[str, t.Any] = {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + __body = model_config + __headers = {"accept": "application/json", "content-type": "application/json"} + return self.perform_request( # type: ignore[return-value] + "PUT", __path, params=__query, headers=__headers, body=__body + ) diff --git a/elasticsearch_serverless/_sync/client/ingest.py b/elasticsearch_serverless/_sync/client/ingest.py index c8255fc..cfcce36 100644 --- a/elasticsearch_serverless/_sync/client/ingest.py +++ b/elasticsearch_serverless/_sync/client/ingest.py @@ -24,15 +24,14 @@ class IngestClient(NamespacedClient): + @_rewrite_parameters() def delete_pipeline( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -80,9 +79,7 @@ def get_pipeline( *, id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] @@ -129,9 +126,7 @@ def processor_grok( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -165,22 +160,16 @@ def put_pipeline( id: str, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, if_version: t.Optional[int] = None, master_timeout: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, meta: t.Optional[t.Mapping[str, t.Any]] = None, - on_failure: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + on_failure: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, pretty: t.Optional[bool] = None, - processors: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + processors: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, version: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: @@ -253,13 +242,9 @@ def simulate( self, *, id: t.Optional[str] = None, - docs: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + docs: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pipeline: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_sync/client/license.py b/elasticsearch_serverless/_sync/client/license.py index 3faddf0..7ecca5b 100644 --- a/elasticsearch_serverless/_sync/client/license.py +++ b/elasticsearch_serverless/_sync/client/license.py @@ -24,15 +24,14 @@ class LicenseClient(NamespacedClient): + @_rewrite_parameters() def get( self, *, accept_enterprise: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, local: t.Optional[bool] = None, pretty: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_sync/client/logstash.py b/elasticsearch_serverless/_sync/client/logstash.py index 5726c1f..a51825e 100644 --- a/elasticsearch_serverless/_sync/client/logstash.py +++ b/elasticsearch_serverless/_sync/client/logstash.py @@ -24,15 +24,14 @@ class LogstashClient(NamespacedClient): + @_rewrite_parameters() def delete_pipeline( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -64,11 +63,9 @@ def delete_pipeline( def get_pipeline( self, *, - id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + id: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -79,8 +76,6 @@ def get_pipeline( :param id: Comma-separated list of pipeline identifiers. """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for parameter 'id'") if id not in SKIP_IN_PATH: __path = f"/_logstash/pipeline/{_quote(id)}" else: @@ -108,9 +103,7 @@ def put_pipeline( id: str, pipeline: t.Mapping[str, t.Any], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_sync/client/ml.py b/elasticsearch_serverless/_sync/client/ml.py index bc184f8..27148a1 100644 --- a/elasticsearch_serverless/_sync/client/ml.py +++ b/elasticsearch_serverless/_sync/client/ml.py @@ -24,6 +24,7 @@ class MlClient(NamespacedClient): + @_rewrite_parameters( body_fields=True, ) @@ -33,9 +34,7 @@ def close_job( job_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -91,9 +90,7 @@ def delete_calendar( *, calendar_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -128,9 +125,7 @@ def delete_calendar_event( calendar_id: str, event_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -167,11 +162,9 @@ def delete_calendar_job( self, *, calendar_id: str, - job_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + job_id: t.Union[str, t.Sequence[str]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -209,9 +202,7 @@ def delete_data_frame_analytics( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -254,9 +245,7 @@ def delete_datafeed( *, datafeed_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -298,9 +287,7 @@ def delete_filter( *, filter_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -335,9 +322,7 @@ def delete_job( job_id: str, delete_user_annotations: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -386,9 +371,7 @@ def delete_trained_model( *, model_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -429,9 +412,7 @@ def delete_trained_model_alias( model_id: str, model_alias: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -470,9 +451,7 @@ def estimate_model_memory( *, analysis_config: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_bucket_cardinality: t.Optional[t.Mapping[str, int]] = None, overall_cardinality: t.Optional[t.Mapping[str, int]] = None, @@ -529,9 +508,7 @@ def evaluate_data_frame( evaluation: t.Mapping[str, t.Any], index: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, @@ -583,9 +560,7 @@ def flush_job( calc_interim: t.Optional[bool] = None, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, skip_time: t.Optional[t.Union[str, t.Any]] = None, @@ -644,9 +619,7 @@ def get_calendar_events( calendar_id: str, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, job_id: t.Optional[str] = None, @@ -706,9 +679,7 @@ def get_calendars( *, calendar_id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, page: t.Optional[t.Mapping[str, t.Any]] = None, @@ -769,9 +740,7 @@ def get_data_frame_analytics( allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -834,9 +803,7 @@ def get_data_frame_analytics_stats( id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -893,14 +860,10 @@ def get_data_frame_analytics_stats( def get_datafeed_stats( self, *, - datafeed_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + datafeed_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -944,15 +907,11 @@ def get_datafeed_stats( def get_datafeeds( self, *, - datafeed_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + datafeed_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1003,13 +962,9 @@ def get_datafeeds( def get_filters( self, *, - filter_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1053,9 +1008,7 @@ def get_job_stats( job_id: t.Optional[str] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1100,15 +1053,11 @@ def get_job_stats( def get_jobs( self, *, - job_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + job_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1165,9 +1114,7 @@ def get_overall_buckets( end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, exclude_interim: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, overall_score: t.Optional[t.Union[float, str]] = None, pretty: t.Optional[bool] = None, @@ -1242,9 +1189,7 @@ def get_trained_models( decompress_definition: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, include: t.Optional[ @@ -1320,14 +1265,10 @@ def get_trained_models( def get_trained_models_stats( self, *, - model_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + model_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -1379,13 +1320,9 @@ def infer_trained_model( self, *, model_id: str, - docs: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + docs: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, inference_config: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -1439,9 +1376,7 @@ def open_job( *, job_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -1485,13 +1420,9 @@ def post_calendar_events( self, *, calendar_id: str, - events: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + events: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1536,9 +1467,7 @@ def preview_data_frame_analytics( id: t.Optional[str] = None, config: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1587,9 +1516,7 @@ def preview_datafeed( datafeed_config: t.Optional[t.Mapping[str, t.Any]] = None, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, job_config: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -1655,11 +1582,9 @@ def put_calendar( calendar_id: str, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - job_ids: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + job_ids: t.Optional[t.Sequence[str]] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -1704,9 +1629,7 @@ def put_calendar_job( calendar_id: str, job_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -1753,12 +1676,8 @@ def put_data_frame_analytics( analyzed_fields: t.Optional[t.Mapping[str, t.Any]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - headers: t.Optional[ - t.Mapping[str, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + headers: t.Optional[t.Mapping[str, t.Union[str, t.Sequence[str]]]] = None, human: t.Optional[bool] = None, max_num_threads: t.Optional[int] = None, model_memory_limit: t.Optional[str] = None, @@ -1884,38 +1803,20 @@ def put_datafeed( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - headers: t.Optional[ - t.Mapping[str, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] - ] = None, + headers: t.Optional[t.Mapping[str, t.Union[str, t.Sequence[str]]]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indexes: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - indices: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + indexes: t.Optional[t.Union[str, t.Sequence[str]]] = None, + indices: t.Optional[t.Union[str, t.Sequence[str]]] = None, indices_options: t.Optional[t.Mapping[str, t.Any]] = None, job_id: t.Optional[str] = None, max_empty_searches: t.Optional[int] = None, @@ -2065,11 +1966,9 @@ def put_filter( filter_id: str, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - items: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + items: t.Optional[t.Sequence[str]] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -2123,10 +2022,8 @@ def put_job( datafeed_config: t.Optional[t.Mapping[str, t.Any]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - groups: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + groups: t.Optional[t.Sequence[str]] = None, human: t.Optional[bool] = None, model_plot_config: t.Optional[t.Mapping[str, t.Any]] = None, model_snapshot_retention_days: t.Optional[int] = None, @@ -2279,9 +2176,7 @@ def put_trained_model( definition: t.Optional[t.Mapping[str, t.Any]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, inference_config: t.Optional[t.Mapping[str, t.Any]] = None, input: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2290,8 +2185,11 @@ def put_trained_model( model_type: t.Optional[ t.Union["t.Literal['lang_ident', 'pytorch', 'tree_ensemble']", str] ] = None, + platform_architecture: t.Optional[str] = None, + prefix_strings: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, - tags: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + tags: t.Optional[t.Sequence[str]] = None, + wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Creates an inference trained model. @@ -2318,7 +2216,18 @@ def put_trained_model( model in memory. This property is supported only if defer_definition_decompression is true or the model definition is not supplied. :param model_type: The model type. + :param platform_architecture: The platform architecture (if applicable) of the + trained mode. If the model only works on one platform, because it is heavily + optimized for a particular processor architecture and OS combination, then + this field specifies which. The format of the string must match the platform + identifiers used by Elasticsearch, so one of, `linux-x86_64`, `linux-aarch64`, + `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models + (those that work independent of processor architecture or OS features), leave + this field unset. + :param prefix_strings: Optional prefix strings applied at inference :param tags: An array of tags to organize the model. + :param wait_for_completion: Whether to wait for all child operations (e.g. model + download) to complete. """ if model_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'model_id'") @@ -2349,10 +2258,16 @@ def put_trained_model( __body["model_size_bytes"] = model_size_bytes if model_type is not None: __body["model_type"] = model_type + if platform_architecture is not None: + __body["platform_architecture"] = platform_architecture + if prefix_strings is not None: + __body["prefix_strings"] = prefix_strings if pretty is not None: __query["pretty"] = pretty if tags is not None: __body["tags"] = tags + if wait_for_completion is not None: + __query["wait_for_completion"] = wait_for_completion __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "PUT", __path, params=__query, headers=__headers, body=__body @@ -2365,9 +2280,7 @@ def put_trained_model_alias( model_id: str, model_alias: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, reassign: t.Optional[bool] = None, @@ -2417,9 +2330,7 @@ def put_trained_model_definition_part( total_definition_length: int, total_parts: int, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -2480,14 +2391,13 @@ def put_trained_model_vocabulary( self, *, model_id: str, - vocabulary: t.Union[t.List[str], t.Tuple[str, ...]], + vocabulary: t.Sequence[str], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - merges: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + merges: t.Optional[t.Sequence[str]] = None, pretty: t.Optional[bool] = None, + scores: t.Optional[t.Sequence[float]] = None, ) -> ObjectApiResponse[t.Any]: """ Creates a trained model vocabulary @@ -2497,6 +2407,7 @@ def put_trained_model_vocabulary( :param model_id: The unique identifier of the trained model. :param vocabulary: The model vocabulary, which must not be empty. :param merges: The optional model merges if required by the tokenizer. + :param scores: The optional vocabulary value scores if required by the tokenizer. """ if model_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'model_id'") @@ -2517,6 +2428,8 @@ def put_trained_model_vocabulary( __body["merges"] = merges if pretty is not None: __query["pretty"] = pretty + if scores is not None: + __body["scores"] = scores __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "PUT", __path, params=__query, headers=__headers, body=__body @@ -2529,9 +2442,7 @@ def reset_job( job_id: str, delete_user_annotations: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, wait_for_completion: t.Optional[bool] = None, @@ -2575,9 +2486,7 @@ def start_data_frame_analytics( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -2621,9 +2530,7 @@ def start_datafeed( datafeed_id: str, end: t.Optional[t.Union[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, start: t.Optional[t.Union[str, t.Any]] = None, @@ -2676,10 +2583,9 @@ def start_trained_model_deployment( *, model_id: str, cache_size: t.Optional[t.Union[int, str]] = None, + deployment_id: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, number_of_allocations: t.Optional[int] = None, pretty: t.Optional[bool] = None, @@ -2701,6 +2607,7 @@ def start_trained_model_deployment( :param cache_size: The inference cache size (in memory outside the JVM heap) per node for the model. The default value is the same size as the `model_size_bytes`. To disable the cache, `0b` can be provided. + :param deployment_id: A unique identifier for the deployment of the model. :param number_of_allocations: The number of model allocations on each node where the model is deployed. All allocations on a node share the same copy of the model in memory but use a separate set of threads to evaluate the model. @@ -2727,6 +2634,8 @@ def start_trained_model_deployment( __query: t.Dict[str, t.Any] = {} if cache_size is not None: __query["cache_size"] = cache_size + if deployment_id is not None: + __query["deployment_id"] = deployment_id if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -2759,9 +2668,7 @@ def stop_data_frame_analytics( id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2819,9 +2726,7 @@ def stop_datafeed( datafeed_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2876,9 +2781,7 @@ def stop_trained_model_deployment( model_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -2930,9 +2833,7 @@ def update_data_frame_analytics( allow_lazy_start: t.Optional[bool] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, max_num_threads: t.Optional[int] = None, model_memory_limit: t.Optional[str] = None, @@ -2999,31 +2900,19 @@ def update_datafeed( error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ - t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'closed', 'hidden', 'none', 'open']", str - ], - ..., - ], + t.Sequence[ + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str] ], + t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str], ] ] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, ignore_throttled: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - indexes: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, - indices: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + indexes: t.Optional[t.Sequence[str]] = None, + indices: t.Optional[t.Sequence[str]] = None, indices_options: t.Optional[t.Mapping[str, t.Any]] = None, job_id: t.Optional[str] = None, max_empty_searches: t.Optional[int] = None, @@ -3180,15 +3069,13 @@ def update_filter( self, *, filter_id: str, - add_items: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + add_items: t.Optional[t.Sequence[str]] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, - remove_items: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + remove_items: t.Optional[t.Sequence[str]] = None, ) -> ObjectApiResponse[t.Any]: """ Updates the description of a filter, adds items, or removes items. @@ -3236,20 +3123,14 @@ def update_job( background_persist_interval: t.Optional[ t.Union["t.Literal[-1]", "t.Literal[0]", str] ] = None, - categorization_filters: t.Optional[ - t.Union[t.List[str], t.Tuple[str, ...]] - ] = None, + categorization_filters: t.Optional[t.Sequence[str]] = None, custom_settings: t.Optional[t.Mapping[str, t.Any]] = None, daily_model_snapshot_retention_after_days: t.Optional[int] = None, description: t.Optional[str] = None, - detectors: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + detectors: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, - groups: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + groups: t.Optional[t.Sequence[str]] = None, human: t.Optional[bool] = None, model_plot_config: t.Optional[t.Mapping[str, t.Any]] = None, model_prune_window: t.Optional[ diff --git a/elasticsearch_serverless/_sync/client/query_ruleset.py b/elasticsearch_serverless/_sync/client/query_ruleset.py index 6680028..33b09b5 100644 --- a/elasticsearch_serverless/_sync/client/query_ruleset.py +++ b/elasticsearch_serverless/_sync/client/query_ruleset.py @@ -24,15 +24,14 @@ class QueryRulesetClient(NamespacedClient): + @_rewrite_parameters() def delete( self, *, ruleset_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -66,9 +65,7 @@ def get( *, ruleset_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -103,9 +100,7 @@ def list( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -145,13 +140,9 @@ def put( self, *, ruleset_id: str, - rules: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + rules: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_sync/client/search_application.py b/elasticsearch_serverless/_sync/client/search_application.py index 17bfd3b..646b520 100644 --- a/elasticsearch_serverless/_sync/client/search_application.py +++ b/elasticsearch_serverless/_sync/client/search_application.py @@ -24,22 +24,21 @@ class SearchApplicationClient(NamespacedClient): + @_rewrite_parameters() def delete( self, *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Deletes a search application. - ``_ + ``_ :param name: The name of the search application to delete """ @@ -66,9 +65,7 @@ def delete_behavioral_analytics( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -102,9 +99,7 @@ def get( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -136,11 +131,9 @@ def get( def get_behavioral_analytics( self, *, - name: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + name: t.Optional[t.Sequence[str]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -176,9 +169,7 @@ def list( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -225,9 +216,7 @@ def put( search_application: t.Mapping[str, t.Any], create: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -269,9 +258,7 @@ def put_behavioral_analytics( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -308,9 +295,7 @@ def search( *, name: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, params: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_sync/client/security.py b/elasticsearch_serverless/_sync/client/security.py index f8b6134..a10b079 100644 --- a/elasticsearch_serverless/_sync/client/security.py +++ b/elasticsearch_serverless/_sync/client/security.py @@ -24,14 +24,13 @@ class SecurityClient(NamespacedClient): + @_rewrite_parameters() def authenticate( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -64,9 +63,7 @@ def create_api_key( *, error_trace: t.Optional[bool] = None, expiration: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, metadata: t.Optional[t.Mapping[str, t.Any]] = None, name: t.Optional[str] = None, @@ -129,10 +126,9 @@ def create_api_key( def get_api_key( self, *, + active_only: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, id: t.Optional[str] = None, name: t.Optional[str] = None, @@ -141,12 +137,18 @@ def get_api_key( realm_name: t.Optional[str] = None, username: t.Optional[str] = None, with_limited_by: t.Optional[bool] = None, + with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Retrieves information for one or more API keys. ``_ + :param active_only: A boolean flag that can be used to query API keys that are + currently active. An API key is considered active if it is neither invalidated, + nor expired at query time. You can specify this together with other parameters + such as `owner` or `name`. If `active_only` is false, the response will include + both active and inactive (expired or invalidated) keys. :param id: An API key id. This parameter cannot be used with any of `name`, `realm_name` or `username`. :param name: An API key name. This parameter cannot be used with any of `id`, @@ -162,9 +164,13 @@ def get_api_key( :param with_limited_by: Return the snapshot of the owner user's role descriptors associated with the API key. An API key's actual permission is the intersection of its assigned role descriptors and the owner user's role descriptors. + :param with_profile_uid: Determines whether to also retrieve the profile uid, + for the API key owner principal, if it exists. """ __path = "/_security/api_key" __query: t.Dict[str, t.Any] = {} + if active_only is not None: + __query["active_only"] = active_only if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -185,6 +191,8 @@ def get_api_key( __query["username"] = username if with_limited_by is not None: __query["with_limited_by"] = with_limited_by + if with_profile_uid is not None: + __query["with_profile_uid"] = with_profile_uid __headers = {"accept": "application/json"} return self.perform_request( # type: ignore[return-value] "GET", __path, params=__query, headers=__headers @@ -197,34 +205,19 @@ def has_privileges( self, *, user: t.Optional[str] = None, - application: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + application: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, cluster: t.Optional[ - t.Union[ - t.List[ - t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'grant_api_key', 'manage', 'manage_api_key', 'manage_ccr', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'read_ccr', 'read_ilm', 'read_pipeline', 'read_slm', 'transport_client']", - str, - ] - ], - t.Tuple[ - t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'grant_api_key', 'manage', 'manage_api_key', 'manage_ccr', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'read_ccr', 'read_ilm', 'read_pipeline', 'read_slm', 'transport_client']", - str, - ], - ..., - ], + t.Sequence[ + t.Union[ + "t.Literal['all', 'cancel_task', 'create_snapshot', 'grant_api_key', 'manage', 'manage_api_key', 'manage_ccr', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'read_ccr', 'read_ilm', 'read_pipeline', 'read_slm', 'transport_client']", + str, + ] ] ] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, - index: t.Optional[ - t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]] - ] = None, + index: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -269,12 +262,10 @@ def invalidate_api_key( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, id: t.Optional[str] = None, - ids: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None, + ids: t.Optional[t.Sequence[str]] = None, name: t.Optional[str] = None, owner: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -335,44 +326,55 @@ def invalidate_api_key( def query_api_keys( self, *, + aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, + aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, query: t.Optional[t.Mapping[str, t.Any]] = None, search_after: t.Optional[ - t.Union[ - t.List[t.Union[None, bool, float, int, str, t.Any]], - t.Tuple[t.Union[None, bool, float, int, str, t.Any], ...], - ] + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] ] = None, size: t.Optional[int] = None, sort: t.Optional[ t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], t.Union[str, t.Mapping[str, t.Any]], - t.Union[ - t.List[t.Union[str, t.Mapping[str, t.Any]]], - t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...], - ], ] ] = None, + typed_keys: t.Optional[bool] = None, with_limited_by: t.Optional[bool] = None, + with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ Retrieves information for API keys using a subset of query DSL ``_ + :param aggregations: Any aggregations to run over the corpus of returned API + keys. Aggregations and queries work together. Aggregations are computed only + on the API keys that match the query. This supports only a subset of aggregation + types, namely: `terms`, `range`, `date_range`, `missing`, `cardinality`, + `value_count`, `composite`, `filter`, and `filters`. Additionally, aggregations + only run over the same subset of fields that query works with. + :param aggs: Any aggregations to run over the corpus of returned API keys. Aggregations + and queries work together. Aggregations are computed only on the API keys + that match the query. This supports only a subset of aggregation types, namely: + `terms`, `range`, `date_range`, `missing`, `cardinality`, `value_count`, + `composite`, `filter`, and `filters`. Additionally, aggregations only run + over the same subset of fields that query works with. :param from_: Starting document offset. By default, you cannot page through more than 10,000 hits using the from and size parameters. To page through more hits, use the `search_after` parameter. - :param query: A query to filter which API keys to return. The query supports - a subset of query types, including `match_all`, `bool`, `term`, `terms`, - `ids`, `prefix`, `wildcard`, and `range`. You can query all public information - associated with an API key. + :param query: A query to filter which API keys to return. If the query parameter + is missing, it is equivalent to a `match_all` query. The query supports a + subset of query types, including `match_all`, `bool`, `term`, `terms`, `match`, + `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. + You can query the following public information associated with an API key: + `id`, `type`, `name`, `creation`, `expiration`, `invalidated`, `invalidation`, + `username`, `realm`, and `metadata`. :param search_after: Search after definition :param size: The number of hits to return. By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through @@ -380,13 +382,17 @@ def query_api_keys( :param sort: Other than `id`, all public fields of an API key are eligible for sorting. In addition, sort can also be applied to the `_doc` field to sort by index order. + :param typed_keys: Determines whether aggregation names are prefixed by their + respective types in the response. :param with_limited_by: Return the snapshot of the owner user's role descriptors associated with the API key. An API key's actual permission is the intersection of its assigned role descriptors and the owner user's role descriptors. + :param with_profile_uid: Determines whether to also retrieve the profile uid, + for the API key owner principal, if it exists. """ __path = "/_security/_query/api_key" - __query: t.Dict[str, t.Any] = {} __body: t.Dict[str, t.Any] = {} + __query: t.Dict[str, t.Any] = {} # The 'sort' parameter with a colon can't be encoded to the body. if sort is not None and ( (isinstance(sort, str) and ":" in sort) @@ -398,6 +404,10 @@ def query_api_keys( ): __query["sort"] = sort sort = None + if aggregations is not None: + __body["aggregations"] = aggregations + if aggs is not None: + __body["aggs"] = aggs if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -416,8 +426,12 @@ def query_api_keys( __body["size"] = size if sort is not None: __body["sort"] = sort + if typed_keys is not None: + __query["typed_keys"] = typed_keys if with_limited_by is not None: __query["with_limited_by"] = with_limited_by + if with_profile_uid is not None: + __query["with_profile_uid"] = with_profile_uid if not __body: __body = None # type: ignore[assignment] __headers = {"accept": "application/json"} @@ -435,9 +449,8 @@ def update_api_key( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + expiration: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, metadata: t.Optional[t.Mapping[str, t.Any]] = None, pretty: t.Optional[bool] = None, @@ -449,6 +462,7 @@ def update_api_key( ``_ :param id: The ID of the API key to update. + :param expiration: Expiration time for the API key. :param metadata: Arbitrary metadata that you want to associate with the API key. It supports nested data structure. Within the metadata object, keys beginning with _ are reserved for system usage. @@ -468,6 +482,8 @@ def update_api_key( __body: t.Dict[str, t.Any] = {} if error_trace is not None: __query["error_trace"] = error_trace + if expiration is not None: + __body["expiration"] = expiration if filter_path is not None: __query["filter_path"] = filter_path if human is not None: diff --git a/elasticsearch_serverless/_sync/client/sql.py b/elasticsearch_serverless/_sync/client/sql.py index 4f8b37e..8ef750a 100644 --- a/elasticsearch_serverless/_sync/client/sql.py +++ b/elasticsearch_serverless/_sync/client/sql.py @@ -24,6 +24,7 @@ class SqlClient(NamespacedClient): + @_rewrite_parameters( body_fields=True, ) @@ -32,9 +33,7 @@ def clear_cursor( *, cursor: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -71,9 +70,7 @@ def delete_async( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -109,9 +106,7 @@ def get_async( id: str, delimiter: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, keep_alive: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -168,9 +163,7 @@ def get_async_status( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -213,9 +206,7 @@ def query( fetch_size: t.Optional[int] = None, field_multi_value_leniency: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, index_using_frozen: t.Optional[bool] = None, @@ -330,9 +321,7 @@ def translate( error_trace: t.Optional[bool] = None, fetch_size: t.Optional[int] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, time_zone: t.Optional[str] = None, diff --git a/elasticsearch_serverless/_sync/client/synonyms.py b/elasticsearch_serverless/_sync/client/synonyms.py index 6fcec2e..07f0812 100644 --- a/elasticsearch_serverless/_sync/client/synonyms.py +++ b/elasticsearch_serverless/_sync/client/synonyms.py @@ -24,15 +24,14 @@ class SynonymsClient(NamespacedClient): + @_rewrite_parameters() def delete_synonym( self, *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -67,9 +66,7 @@ def delete_synonym_rule( set_id: str, rule_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -108,9 +105,7 @@ def get_synonym( *, id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -153,9 +148,7 @@ def get_synonym_rule( set_id: str, rule_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -193,9 +186,7 @@ def get_synonyms_sets( self, *, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -235,13 +226,9 @@ def put_synonym( self, *, id: str, - synonyms_set: t.Union[ - t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...] - ], + synonyms_set: t.Sequence[t.Mapping[str, t.Any]], error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: @@ -283,11 +270,9 @@ def put_synonym_rule( *, set_id: str, rule_id: str, - synonyms: t.Union[t.List[str], t.Tuple[str, ...]], + synonyms: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: diff --git a/elasticsearch_serverless/_sync/client/tasks.py b/elasticsearch_serverless/_sync/client/tasks.py index dd166b7..078e850 100644 --- a/elasticsearch_serverless/_sync/client/tasks.py +++ b/elasticsearch_serverless/_sync/client/tasks.py @@ -24,15 +24,14 @@ class TasksClient(NamespacedClient): + @_rewrite_parameters() def get( self, *, task_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, diff --git a/elasticsearch_serverless/_sync/client/transform.py b/elasticsearch_serverless/_sync/client/transform.py index a5be968..ac0776f 100644 --- a/elasticsearch_serverless/_sync/client/transform.py +++ b/elasticsearch_serverless/_sync/client/transform.py @@ -24,15 +24,15 @@ class TransformClient(NamespacedClient): + @_rewrite_parameters() def delete_transform( self, *, transform_id: str, + delete_dest_index: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -44,6 +44,9 @@ def delete_transform( ``_ :param transform_id: Identifier for the transform. + :param delete_dest_index: If this value is true, the destination index is deleted + together with the transform. If false, the destination index will not be + deleted :param force: If this value is false, the transform must be stopped before it can be deleted. If true, the transform is deleted regardless of its current state. @@ -54,6 +57,8 @@ def delete_transform( raise ValueError("Empty value passed for parameter 'transform_id'") __path = f"/_transform/{_quote(transform_id)}" __query: t.Dict[str, t.Any] = {} + if delete_dest_index is not None: + __query["delete_dest_index"] = delete_dest_index if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -77,15 +82,11 @@ def delete_transform( def get_transform( self, *, - transform_id: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + transform_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, exclude_generated: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -142,12 +143,10 @@ def get_transform( def get_transform_stats( self, *, - transform_id: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]], + transform_id: t.Union[str, t.Sequence[str]], allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[int] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -206,9 +205,7 @@ def preview_transform( description: t.Optional[str] = None, dest: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, latest: t.Optional[t.Mapping[str, t.Any]] = None, @@ -303,9 +300,7 @@ def put_transform( defer_validation: t.Optional[bool] = None, description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, latest: t.Optional[t.Mapping[str, t.Any]] = None, @@ -404,9 +399,7 @@ def reset_transform( *, transform_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -448,9 +441,7 @@ def schedule_now_transform( *, transform_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, @@ -490,9 +481,7 @@ def start_transform( *, transform_id: str, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, from_: t.Optional[str] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -538,9 +527,7 @@ def stop_transform( transform_id: str, allow_no_match: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, force: t.Optional[bool] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, @@ -614,9 +601,7 @@ def update_transform( description: t.Optional[str] = None, dest: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, - filter_path: t.Optional[ - t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]] - ] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, frequency: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, human: t.Optional[bool] = None, meta: t.Optional[t.Mapping[str, t.Any]] = None, diff --git a/elasticsearch_serverless/client.py b/elasticsearch_serverless/client.py index 802268b..80e1667 100644 --- a/elasticsearch_serverless/client.py +++ b/elasticsearch_serverless/client.py @@ -27,6 +27,7 @@ from ._sync.client.eql import EqlClient as EqlClient # noqa: F401 from ._sync.client.graph import GraphClient as GraphClient # noqa: F401 from ._sync.client.indices import IndicesClient as IndicesClient # noqa: F401 +from ._sync.client.inference import InferenceClient as InferenceClient # noqa: F401 from ._sync.client.ingest import IngestClient as IngestClient # noqa: F401 from ._sync.client.license import LicenseClient as LicenseClient # noqa: F401 from ._sync.client.logstash import LogstashClient as LogstashClient # noqa: F401 @@ -58,6 +59,7 @@ "EqlClient", "GraphClient", "IndicesClient", + "InferenceClient", "IngestClient", "LicenseClient", "LogstashClient",