From 332b761b3bda81a03f6526bec076dc9972e2cbbf Mon Sep 17 00:00:00 2001 From: Elastic Machine Date: Mon, 21 Oct 2024 06:03:34 +0000 Subject: [PATCH 1/3] Auto-generated API code --- .../_async/client/async_search.py | 36 +++++++++---------- .../_async/client/indices.py | 3 +- .../_sync/client/async_search.py | 36 +++++++++---------- .../_sync/client/indices.py | 3 +- 4 files changed, 40 insertions(+), 38 deletions(-) diff --git a/elasticsearch_serverless/_async/client/async_search.py b/elasticsearch_serverless/_async/client/async_search.py index 74bfdc0..98bb435 100644 --- a/elasticsearch_serverless/_async/client/async_search.py +++ b/elasticsearch_serverless/_async/client/async_search.py @@ -36,11 +36,11 @@ async def delete( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an async search by identifier. If the search is still running, the search - request will be cancelled. Otherwise, the saved search results are deleted. If - the Elasticsearch security features are enabled, the deletion of a specific async - search is restricted to: the authenticated user that submitted the original search - request; users that have the `cancel_task` cluster privilege. + Delete an async search. If the asynchronous search is still running, it is cancelled. + Otherwise, the saved search results are deleted. If the Elasticsearch security + features are enabled, the deletion of a specific async search is restricted to: + the authenticated user that submitted the original search request; users that + have the `cancel_task` cluster privilege. ``_ @@ -85,9 +85,9 @@ async def get( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the results of a previously submitted async search request given its - identifier. If the Elasticsearch security features are enabled, access to the - results of a specific async search is restricted to the user or API key that + Get async search results. Retrieve the results of a previously submitted asynchronous + search request. If the Elasticsearch security features are enabled, access to + the results of a specific async search is restricted to the user or API key that submitted it. ``_ @@ -148,7 +148,7 @@ async def status( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Get async search status Retrieves the status of a previously submitted async + Get async search status. Retrieve the status of a previously submitted async search request given its identifier, without retrieving search results. If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role. @@ -323,15 +323,15 @@ async def submit( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a search request asynchronously. When the primary sort of the results is - an indexed field, shards get sorted based on minimum and maximum value that they - hold for that field, hence partial results become available following the sort - criteria that was requested. Warning: Async search does not support scroll nor - search requests that only include the suggest section. By default, Elasticsearch - doesn’t allow you to store an async search response larger than 10Mb and an attempt - to do this results in an error. The maximum allowed size for a stored async search - response can be set by changing the `search.max_async_search_response_size` cluster - level setting. + Run an async search. When the primary sort of the results is an indexed field, + shards get sorted based on minimum and maximum value that they hold for that + field. Partial results become available following the sort criteria that was + requested. Warning: Asynchronous search does not support scroll or search requests + that include only the suggest section. By default, Elasticsearch does not allow + you to store an async search response larger than 10Mb and an attempt to do this + results in an error. The maximum allowed size for a stored async search response + can be set by changing the `search.max_async_search_response_size` cluster level + setting. ``_ diff --git a/elasticsearch_serverless/_async/client/indices.py b/elasticsearch_serverless/_async/client/indices.py index 4562271..b867ffa 100644 --- a/elasticsearch_serverless/_async/client/indices.py +++ b/elasticsearch_serverless/_async/client/indices.py @@ -137,7 +137,8 @@ async def analyze( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs analysis on a text string and returns the resulting tokens. + Get tokens from text analysis. The analyze API performs [analysis](https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis.html) + on a text string and returns the resulting tokens. ``_ diff --git a/elasticsearch_serverless/_sync/client/async_search.py b/elasticsearch_serverless/_sync/client/async_search.py index 8fbf318..167fece 100644 --- a/elasticsearch_serverless/_sync/client/async_search.py +++ b/elasticsearch_serverless/_sync/client/async_search.py @@ -36,11 +36,11 @@ def delete( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an async search by identifier. If the search is still running, the search - request will be cancelled. Otherwise, the saved search results are deleted. If - the Elasticsearch security features are enabled, the deletion of a specific async - search is restricted to: the authenticated user that submitted the original search - request; users that have the `cancel_task` cluster privilege. + Delete an async search. If the asynchronous search is still running, it is cancelled. + Otherwise, the saved search results are deleted. If the Elasticsearch security + features are enabled, the deletion of a specific async search is restricted to: + the authenticated user that submitted the original search request; users that + have the `cancel_task` cluster privilege. ``_ @@ -85,9 +85,9 @@ def get( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the results of a previously submitted async search request given its - identifier. If the Elasticsearch security features are enabled, access to the - results of a specific async search is restricted to the user or API key that + Get async search results. Retrieve the results of a previously submitted asynchronous + search request. If the Elasticsearch security features are enabled, access to + the results of a specific async search is restricted to the user or API key that submitted it. ``_ @@ -148,7 +148,7 @@ def status( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Get async search status Retrieves the status of a previously submitted async + Get async search status. Retrieve the status of a previously submitted async search request given its identifier, without retrieving search results. If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role. @@ -323,15 +323,15 @@ def submit( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a search request asynchronously. When the primary sort of the results is - an indexed field, shards get sorted based on minimum and maximum value that they - hold for that field, hence partial results become available following the sort - criteria that was requested. Warning: Async search does not support scroll nor - search requests that only include the suggest section. By default, Elasticsearch - doesn’t allow you to store an async search response larger than 10Mb and an attempt - to do this results in an error. The maximum allowed size for a stored async search - response can be set by changing the `search.max_async_search_response_size` cluster - level setting. + Run an async search. When the primary sort of the results is an indexed field, + shards get sorted based on minimum and maximum value that they hold for that + field. Partial results become available following the sort criteria that was + requested. Warning: Asynchronous search does not support scroll or search requests + that include only the suggest section. By default, Elasticsearch does not allow + you to store an async search response larger than 10Mb and an attempt to do this + results in an error. The maximum allowed size for a stored async search response + can be set by changing the `search.max_async_search_response_size` cluster level + setting. ``_ diff --git a/elasticsearch_serverless/_sync/client/indices.py b/elasticsearch_serverless/_sync/client/indices.py index e3d63bc..1171daa 100644 --- a/elasticsearch_serverless/_sync/client/indices.py +++ b/elasticsearch_serverless/_sync/client/indices.py @@ -137,7 +137,8 @@ def analyze( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs analysis on a text string and returns the resulting tokens. + Get tokens from text analysis. The analyze API performs [analysis](https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis.html) + on a text string and returns the resulting tokens. ``_ From 9fc9277efef6eebf640c9294e16242dc00a06f8b Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Wed, 6 Nov 2024 11:00:20 +0400 Subject: [PATCH 2/3] Auto-generated API code --- .../_async/client/__init__.py | 104 ++++++++++++------ .../_async/client/async_search.py | 8 +- .../_async/client/connector.py | 77 +++++++++---- elasticsearch_serverless/_async/client/eql.py | 9 ++ .../_async/client/indices.py | 44 ++------ elasticsearch_serverless/_async/client/ml.py | 2 +- .../_async/client/query_rules.py | 53 +++++++++ .../_async/client/security.py | 43 ++++---- .../_sync/client/__init__.py | 104 ++++++++++++------ .../_sync/client/async_search.py | 8 +- .../_sync/client/connector.py | 77 +++++++++---- elasticsearch_serverless/_sync/client/eql.py | 9 ++ .../_sync/client/indices.py | 44 ++------ elasticsearch_serverless/_sync/client/ml.py | 2 +- .../_sync/client/query_rules.py | 53 +++++++++ .../_sync/client/security.py | 43 ++++---- 16 files changed, 456 insertions(+), 224 deletions(-) diff --git a/elasticsearch_serverless/_async/client/__init__.py b/elasticsearch_serverless/_async/client/__init__.py index 1bcbe9f..127bf8a 100644 --- a/elasticsearch_serverless/_async/client/__init__.py +++ b/elasticsearch_serverless/_async/client/__init__.py @@ -571,7 +571,8 @@ async def clear_scroll( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Clears the search context and results for a scrolling search. + Clear a scrolling search. Clear the search context and results for a scrolling + search. ``_ @@ -621,7 +622,11 @@ async def close_point_in_time( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Closes a point-in-time. + Close a point in time. A point in time must be opened explicitly before being + used in search requests. The `keep_alive` parameter tells Elasticsearch how long + it should persist. A point in time is automatically closed when the `keep_alive` + period has elapsed. However, keeping points in time has a cost; close them as + soon as they are no longer required for search requests. ``_ @@ -1624,10 +1629,11 @@ async def field_caps( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - The field capabilities API returns the information about the capabilities of - fields among multiple indices. The field capabilities API returns runtime fields - like any other field. For example, a runtime field with a type of keyword is - returned as any other field that belongs to the `keyword` family. + Get the field capabilities. Get information about the capabilities of fields + among multiple indices. For data streams, the API returns field capabilities + among the stream’s backing indices. It returns runtime fields like any other + field. For example, a runtime field with a type of keyword is returned the same + as any other field that belongs to the `keyword` family. ``_ @@ -2163,7 +2169,10 @@ async def mget( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to get multiple documents in one request. + Get multiple documents. Get multiple JSON documents by ID from one or more indices. + If you specify an index in the request URI, you only need to specify the document + IDs in the request body. To ensure fast responses, this multi get (mget) API + responds with partial results if one or more shards fail. ``_ @@ -2284,7 +2293,13 @@ async def msearch( typed_keys: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to execute several search operations in one request. + Run multiple searches. The format of the request is similar to the bulk API format + and makes use of the newline delimited JSON (NDJSON) format. The structure is + as follows: ``` header\\n body\\n header\\n body\\n ``` This structure is specifically + optimized to reduce parsing if a specific search ends up redirected to another + node. IMPORTANT: The final line of data must end with a newline character `\\n`. + Each newline character may be preceded by a carriage return `\\r`. When sending + requests to this endpoint the `Content-Type` header should be set to `application/x-ndjson`. ``_ @@ -2416,7 +2431,7 @@ async def msearch_template( typed_keys: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs multiple templated searches with a single request. + Run multiple templated searches. ``_ @@ -2511,7 +2526,11 @@ async def mtermvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns multiple termvectors in one request. + Get multiple term vectors. You can specify existing documents by index and ID + or provide artificial documents in the body of the request. You can specify the + index in the request body or request URI. The response contains a `docs` array + with all the fetched termvectors. Each element has the structure provided by + the termvectors API. ``_ @@ -2622,13 +2641,15 @@ async def open_point_in_time( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - A search request by default executes against the most recent visible data of - the target indices, which is called point in time. Elasticsearch pit (point in - time) is a lightweight view into the state of the data as it existed when initiated. - In some cases, it’s preferred to perform multiple search requests using the same - point in time. For example, if refreshes happen between `search_after` requests, - then the results of those requests might not be consistent as changes happening - between searches are only visible to the more recent point in time. + Open a point in time. A search request by default runs against the most recent + visible data of the target indices, which is called point in time. Elasticsearch + pit (point in time) is a lightweight view into the state of the data as it existed + when initiated. In some cases, it’s preferred to perform multiple search requests + using the same point in time. For example, if refreshes happen between `search_after` + requests, then the results of those requests might not be consistent as changes + happening between searches are only visible to the more recent point in time. + A point in time must be opened explicitly before being used in search requests. + The `keep_alive` parameter tells Elasticsearch how long it should persist. ``_ @@ -2795,8 +2816,8 @@ async def rank_eval( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Enables you to evaluate the quality of ranked search results over a set of typical - search queries. + Evaluate ranked search results. Evaluate the quality of ranked search results + over a set of typical search queries. ``_ @@ -2994,7 +3015,7 @@ async def render_search_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Renders a search template as a search request body. + Render a search template. Render a search template as a search request body. ``_ @@ -3120,7 +3141,22 @@ async def scroll( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to retrieve a large numbers of results from a single search request. + Run a scrolling search. IMPORTANT: The scroll API is no longer recommend for + deep pagination. If you need to preserve the index state while paging through + more than 10,000 hits, use the `search_after` parameter with a point in time + (PIT). The scroll API gets large sets of results from a single scrolling search + request. To get the necessary scroll ID, submit a search API request that includes + an argument for the `scroll` query parameter. The `scroll` parameter indicates + how long Elasticsearch should retain the search context for the request. The + search response returns a scroll ID in the `_scroll_id` response body parameter. + You can then use the scroll ID with the scroll API to retrieve the next batch + of results for the request. If the Elasticsearch security features are enabled, + the access to the results of a specific scroll ID is restricted to the user or + API key that submitted the search. You can also use the scroll API to specify + a new scroll parameter that extends or shortens the retention period for the + search context. IMPORTANT: Results from a scrolling search reflect the state + of the index at the time of the initial search request. Subsequent indexing or + document changes only affect later search and scroll requests. ``_ @@ -3310,9 +3346,9 @@ async def search( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns search hits that match the query defined in the request. You can provide - search queries using the `q` query string parameter or the request body. If both - are specified, only the query parameter is used. + Run a search. Get search hits that match the query defined in the request. You + can provide search queries using the `q` query string parameter or the request + body. If both are specified, only the query parameter is used. ``_ @@ -3742,7 +3778,7 @@ async def search_mvt( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> BinaryApiResponse: """ - Search a vector tile. Searches a vector tile for geospatial values. + Search a vector tile. Search a vector tile for geospatial values. ``_ @@ -3912,7 +3948,7 @@ async def search_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a search with a search template. + Run a search with a search template. ``_ @@ -4044,9 +4080,15 @@ async def terms_enum( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - The terms enum API can be used to discover terms in the index that begin with - the provided string. It is designed for low-latency look-ups used in auto-complete - scenarios. + Get terms in an index. Discover terms that match a partial string in an index. + This "terms enum" API is designed for low-latency look-ups used in auto-complete + scenarios. If the `complete` property in the response is false, the returned + terms set may be incomplete and should be treated as approximate. This can occur + due to a few reasons, such as a request timeout or a node error. NOTE: The terms + enum API may return terms from deleted documents. Deleted documents are initially + only marked as deleted. It is not until their segments are merged that documents + are actually deleted. Until that happens, the terms enum API will return terms + from these documents. ``_ @@ -4144,8 +4186,8 @@ async def termvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Get term vector information. Returns information and statistics about terms in - the fields of a particular document. + Get term vector information. Get information and statistics about terms in the + fields of a particular document. ``_ diff --git a/elasticsearch_serverless/_async/client/async_search.py b/elasticsearch_serverless/_async/client/async_search.py index 98bb435..1f556c4 100644 --- a/elasticsearch_serverless/_async/client/async_search.py +++ b/elasticsearch_serverless/_async/client/async_search.py @@ -148,10 +148,10 @@ async def status( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Get async search status. Retrieve the status of a previously submitted async - search request given its identifier, without retrieving search results. If the - Elasticsearch security features are enabled, use of this API is restricted to - the `monitoring_user` role. + Get the async search status. Get the status of a previously submitted async search + request given its identifier, without retrieving search results. If the Elasticsearch + security features are enabled, use of this API is restricted to the `monitoring_user` + role. ``_ diff --git a/elasticsearch_serverless/_async/client/connector.py b/elasticsearch_serverless/_async/client/connector.py index 9164776..28186bc 100644 --- a/elasticsearch_serverless/_async/client/connector.py +++ b/elasticsearch_serverless/_async/client/connector.py @@ -36,7 +36,8 @@ async def check_in( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the last_seen field in the connector, and sets it to current timestamp + Check in a connector. Update the `last_seen` field in the connector and set it + to the current timestamp. ``_ @@ -77,7 +78,10 @@ async def delete( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a connector. + Delete a connector. Removes a connector and associated sync jobs. This is a destructive + action that is not recoverable. NOTE: This action doesn’t delete any API keys, + ingest pipelines, or data indices associated with the connector. These need to + be removed manually. ``_ @@ -121,7 +125,7 @@ async def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a connector. + Get a connector. Get the details about a connector. ``_ @@ -168,7 +172,7 @@ async def list( size: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns existing connectors. + Get all connectors. Get information about all connectors. ``_ @@ -242,7 +246,11 @@ async def post( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a connector. + Create a connector. Connectors are Elasticsearch integrations that bring content + from third-party data sources, which can be deployed on Elastic Cloud or hosted + on your own infrastructure. Elastic managed connectors (Native connectors) are + a managed service on Elastic Cloud. Self-managed connectors (Connector clients) + are self-managed on your infrastructure. ``_ @@ -320,7 +328,7 @@ async def put( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a connector. + Create or update a connector. ``_ @@ -389,7 +397,10 @@ async def sync_job_cancel( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Cancels a connector sync job. + Cancel a connector sync job. Cancel a connector sync job, which sets the status + to cancelling and updates `cancellation_requested_at` to the current time. The + connector service is then responsible for setting the status of connector sync + jobs to cancelled. ``_ @@ -433,7 +444,8 @@ async def sync_job_delete( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a connector sync job. + Delete a connector sync job. Remove a connector sync job and its associated data. + This is a destructive action that is not recoverable. ``_ @@ -476,7 +488,7 @@ async def sync_job_get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a connector sync job. + Get a connector sync job. ``_ @@ -544,7 +556,8 @@ async def sync_job_list( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Lists connector sync jobs. + Get all connector sync jobs. Get information about all stored connector sync + jobs listed by their creation date in ascending order. ``_ @@ -605,7 +618,8 @@ async def sync_job_post( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a connector sync job. + Create a connector sync job. Create a connector sync job document in the internal + index and initialize its counters and timestamps with default values. ``_ @@ -656,7 +670,8 @@ async def update_active_filtering( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Activates the valid draft filtering for a connector. + Activate the connector draft filter. Activates the valid draft filtering for + a connector. ``_ @@ -701,7 +716,11 @@ async def update_api_key_id( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the API key id in the connector document + Update the connector API key ID. Update the `api_key_id` and `api_key_secret_id` + fields of a connector. You can specify the ID of the API key used for authorization + and the ID of the connector secret where the API key is stored. The connector + secret ID is required only for Elastic managed (native) connectors. Self-managed + connectors (connector clients) do not use this field. ``_ @@ -755,7 +774,8 @@ async def update_configuration( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the configuration field in the connector document + Update the connector configuration. Update the configuration field in the connector + document. ``_ @@ -808,7 +828,10 @@ async def update_error( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the filtering field in the connector document + Update the connector error field. Set the error field for the connector. If the + error provided in the request body is non-null, the connector’s status is updated + to error. Otherwise, if the error is reset to null, the connector status is updated + to connected. ``_ @@ -862,7 +885,10 @@ async def update_filtering( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the filtering field in the connector document + Update the connector filtering. Update the draft filtering configuration of a + connector and marks the draft validation state as edited. The filtering draft + is activated once validated by the running Elastic connector service. The filtering + property is used to configure sync rules (both basic and advanced) for a connector. ``_ @@ -918,7 +944,8 @@ async def update_filtering_validation( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the draft filtering validation info for a connector. + Update the connector draft filtering validation. Update the draft filtering validation + info for a connector. ``_ @@ -970,7 +997,8 @@ async def update_index_name( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the index_name in the connector document + Update the connector index name. Update the `index_name` field of a connector, + specifying the index where the data ingested by the connector is stored. ``_ @@ -1023,7 +1051,7 @@ async def update_name( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the name and description fields in the connector document + Update the connector name and description. ``_ @@ -1076,7 +1104,7 @@ async def update_native( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the is_native flag in the connector document + Update the connector is_native flag. ``_ @@ -1128,7 +1156,8 @@ async def update_pipeline( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the pipeline field in the connector document + Update the connector pipeline. When you create a new connector, the configuration + of an ingest pipeline is populated with default settings. ``_ @@ -1180,7 +1209,7 @@ async def update_scheduling( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the scheduling field in the connector document + Update the connector scheduling. ``_ @@ -1232,7 +1261,7 @@ async def update_service_type( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the service type of the connector + Update the connector service type. ``_ @@ -1291,7 +1320,7 @@ async def update_status( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the status of the connector + Update the connector status. ``_ diff --git a/elasticsearch_serverless/_async/client/eql.py b/elasticsearch_serverless/_async/client/eql.py index 3944130..ed21ddb 100644 --- a/elasticsearch_serverless/_async/client/eql.py +++ b/elasticsearch_serverless/_async/client/eql.py @@ -174,6 +174,7 @@ async def get_status( "filter", "keep_alive", "keep_on_completion", + "max_samples_per_key", "result_position", "runtime_mappings", "size", @@ -211,6 +212,7 @@ async def search( ignore_unavailable: t.Optional[bool] = None, keep_alive: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, keep_on_completion: t.Optional[bool] = None, + max_samples_per_key: t.Optional[int] = None, pretty: t.Optional[bool] = None, result_position: t.Optional[t.Union[str, t.Literal["head", "tail"]]] = None, runtime_mappings: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, @@ -244,6 +246,11 @@ async def search( in the response. :param keep_alive: :param keep_on_completion: + :param max_samples_per_key: By default, the response of a sample query contains + up to `10` samples, with one sample per unique set of join keys. Use the + `size` parameter to get a smaller or larger set of samples. To retrieve more + than one sample per set of join keys, use the `max_samples_per_key` parameter. + Pipes are not supported for sample queries. :param result_position: :param runtime_mappings: :param size: For basic queries, the maximum number of matching events to return. @@ -292,6 +299,8 @@ async def search( __body["keep_alive"] = keep_alive if keep_on_completion is not None: __body["keep_on_completion"] = keep_on_completion + if max_samples_per_key is not None: + __body["max_samples_per_key"] = max_samples_per_key if result_position is not None: __body["result_position"] = result_position if runtime_mappings is not None: diff --git a/elasticsearch_serverless/_async/client/indices.py b/elasticsearch_serverless/_async/client/indices.py index b867ffa..bd4c29d 100644 --- a/elasticsearch_serverless/_async/client/indices.py +++ b/elasticsearch_serverless/_async/client/indices.py @@ -701,7 +701,6 @@ async def exists_alias( filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - local: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ @@ -722,8 +721,6 @@ async def exists_alias( as `open,hidden`. Valid values are: `all`, `open`, `closed`, `hidden`, `none`. :param ignore_unavailable: If `false`, requests that include a missing data stream or index in the target indices or data streams return an error. - :param local: If `true`, the request retrieves information from the local node - only. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'name'") @@ -749,8 +746,6 @@ async def exists_alias( __query["human"] = human if ignore_unavailable is not None: __query["ignore_unavailable"] = ignore_unavailable - if local is not None: - __query["local"] = local if pretty is not None: __query["pretty"] = pretty __headers = {"accept": "application/json"} @@ -979,7 +974,6 @@ async def get_alias( filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - local: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -1001,8 +995,6 @@ async def get_alias( as `open,hidden`. Valid values are: `all`, `open`, `closed`, `hidden`, `none`. :param ignore_unavailable: If `false`, the request returns an error if it targets a missing or closed index. - :param local: If `true`, the request retrieves information from the local node - only. """ __path_parts: t.Dict[str, str] if index not in SKIP_IN_PATH and name not in SKIP_IN_PATH: @@ -1030,8 +1022,6 @@ async def get_alias( __query["human"] = human if ignore_unavailable is not None: __query["ignore_unavailable"] = ignore_unavailable - if local is not None: - __query["local"] = local if pretty is not None: __query["pretty"] = pretty __headers = {"accept": "application/json"} @@ -1642,14 +1632,14 @@ async def put_alias( ) @_rewrite_parameters( - body_fields=("data_retention", "downsampling"), + body_name="lifecycle", ) async def put_data_lifecycle( self, *, name: t.Union[str, t.Sequence[str]], - data_retention: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, - downsampling: t.Optional[t.Mapping[str, t.Any]] = None, + lifecycle: t.Optional[t.Mapping[str, t.Any]] = None, + body: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ @@ -1664,7 +1654,6 @@ async def put_data_lifecycle( master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, - body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ Update data stream lifecycles. Update the data stream lifecycle of the specified @@ -1674,13 +1663,7 @@ async def put_data_lifecycle( :param name: Comma-separated list of data streams used to limit the request. Supports wildcards (`*`). To target all data streams use `*` or `_all`. - :param data_retention: If defined, every document added to this data stream will - be stored at least for this time frame. Any time after this duration the - document could be deleted. When empty, every document in this data stream - will be stored indefinitely. - :param downsampling: If defined, every backing index will execute the configured - downsampling configuration after the backing index is not the data stream - write index anymore. + :param lifecycle: :param expand_wildcards: Type of data stream that wildcard patterns can match. Supports comma-separated values, such as `open,hidden`. Valid values are: `all`, `hidden`, `open`, `closed`, `none`. @@ -1692,10 +1675,15 @@ async def put_data_lifecycle( """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'name'") + if lifecycle is None and body is None: + raise ValueError( + "Empty value passed for parameters 'lifecycle' and 'body', one of them should be set." + ) + elif lifecycle is not None and body is not None: + raise ValueError("Cannot set both 'lifecycle' and 'body'") __path_parts: t.Dict[str, str] = {"name": _quote(name)} __path = f'/_data_stream/{__path_parts["name"]}/_lifecycle' __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = body if body is not None else {} if error_trace is not None: __query["error_trace"] = error_trace if expand_wildcards is not None: @@ -1710,16 +1698,8 @@ async def put_data_lifecycle( __query["pretty"] = pretty if timeout is not None: __query["timeout"] = timeout - if not __body: - if data_retention is not None: - __body["data_retention"] = data_retention - if downsampling is not None: - __body["downsampling"] = downsampling - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" + __body = lifecycle if lifecycle is not None else body + __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "PUT", __path, diff --git a/elasticsearch_serverless/_async/client/ml.py b/elasticsearch_serverless/_async/client/ml.py index e8dd9be..cc36142 100644 --- a/elasticsearch_serverless/_async/client/ml.py +++ b/elasticsearch_serverless/_async/client/ml.py @@ -2231,7 +2231,7 @@ async def put_datafeed( Create a datafeed. Datafeeds retrieve data from Elasticsearch for analysis by an anomaly detection job. You can associate only one datafeed with each anomaly detection job. The datafeed contains a query that runs at a defined interval - (`frequency`). If you are concerned about delayed data, you can add a delay (`query_delay') + (`frequency`). If you are concerned about delayed data, you can add a delay (`query_delay`) at each interval. When Elasticsearch security features are enabled, your datafeed remembers which roles the user who created it had at the time of creation and runs the query using those same roles. If you provide secondary authorization diff --git a/elasticsearch_serverless/_async/client/query_rules.py b/elasticsearch_serverless/_async/client/query_rules.py index f37fbf4..390e61c 100644 --- a/elasticsearch_serverless/_async/client/query_rules.py +++ b/elasticsearch_serverless/_async/client/query_rules.py @@ -383,3 +383,56 @@ async def put_ruleset( endpoint_id="query_rules.put_ruleset", path_parts=__path_parts, ) + + @_rewrite_parameters( + body_fields=("match_criteria",), + ) + async def test( + self, + *, + ruleset_id: str, + match_criteria: t.Optional[t.Mapping[str, t.Any]] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Creates or updates a query ruleset. + + ``_ + + :param ruleset_id: The unique identifier of the query ruleset to be created or + updated + :param match_criteria: + """ + if ruleset_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'ruleset_id'") + if match_criteria is None and body is None: + raise ValueError("Empty value passed for parameter 'match_criteria'") + __path_parts: t.Dict[str, str] = {"ruleset_id": _quote(ruleset_id)} + __path = f'/_query_rules/{__path_parts["ruleset_id"]}/_test' + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if not __body: + if match_criteria is not None: + __body["match_criteria"] = match_criteria + __headers = {"accept": "application/json", "content-type": "application/json"} + return await self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="query_rules.test", + path_parts=__path_parts, + ) diff --git a/elasticsearch_serverless/_async/client/security.py b/elasticsearch_serverless/_async/client/security.py index 7d48c06..931258b 100644 --- a/elasticsearch_serverless/_async/client/security.py +++ b/elasticsearch_serverless/_async/client/security.py @@ -85,7 +85,7 @@ async def create_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Create an API key. Creates an API key for access without requiring basic authentication. + Create an API key. Create an API key for access without requiring basic authentication. A successful request returns a JSON structure that contains the API key, its unique id, and its name. If applicable, it also returns expiration information for the API key in milliseconds. NOTE: By default, API keys never expire. You @@ -159,7 +159,7 @@ async def delete_role( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes roles in the native realm. + Delete roles. Delete roles in the native realm. ``_ @@ -289,8 +289,8 @@ async def get_builtin_privileges( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the list of cluster privileges and index privileges that are available - in this version of Elasticsearch. + Get builtin privileges. Get the list of cluster privileges and index privileges + that are available in this version of Elasticsearch. ``_ """ @@ -326,9 +326,7 @@ async def get_role( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - The role management APIs are generally the preferred way to manage roles, rather - than using file-based role management. The get roles API cannot retrieve roles - that are defined in roles files. + Get roles. Get roles in the native realm. ``_ @@ -445,8 +443,8 @@ async def has_privileges( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Check user privileges. Determines whether the specified user has a specified - list of privileges. + Check user privileges. Determine whether the specified user has a specified list + of privileges. ``_ @@ -509,13 +507,17 @@ async def invalidate_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Invalidate API keys. Invalidates one or more API keys. The `manage_api_key` privilege - allows deleting any API keys. The `manage_own_api_key` only allows deleting API - keys that are owned by the user. In addition, with the `manage_own_api_key` privilege, - an invalidation request must be issued in one of the three formats: - Set the - parameter `owner=true`. - Or, set both `username` and `realm_name` to match the - user’s identity. - Or, if the request is issued by an API key, i.e. an API key - invalidates itself, specify its ID in the `ids` field. + Invalidate API keys. This API invalidates API keys created by the create API + key or grant API key APIs. Invalidated API keys fail authentication, but they + can still be viewed using the get API key information and query API key information + APIs, for at least the configured retention period, until they are automatically + deleted. The `manage_api_key` privilege allows deleting any API keys. The `manage_own_api_key` + only allows deleting API keys that are owned by the user. In addition, with the + `manage_own_api_key` privilege, an invalidation request must be issued in one + of the three formats: - Set the parameter `owner=true`. - Or, set both `username` + and `realm_name` to match the user’s identity. - Or, if the request is issued + by an API key, that is to say an API key invalidates itself, specify its ID in + the `ids` field. ``_ @@ -672,9 +674,10 @@ async def put_role( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - The role management APIs are generally the preferred way to manage roles, rather - than using file-based role management. The create or update roles API cannot - update roles that are defined in roles files. + Create or update roles. The role management APIs are generally the preferred + way to manage roles in the native realm, rather than using file-based role management. + The create or update roles API cannot update roles that are defined in roles + files. File-based role management is not available in Elastic Serverless. ``_ @@ -792,7 +795,7 @@ async def query_api_keys( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Query API keys. Retrieves a paginated list of API keys and their information. + Find API keys with a query. Get a paginated list of API keys and their information. You can optionally filter the results with a query. ``_ diff --git a/elasticsearch_serverless/_sync/client/__init__.py b/elasticsearch_serverless/_sync/client/__init__.py index 8065532..46d8d1d 100644 --- a/elasticsearch_serverless/_sync/client/__init__.py +++ b/elasticsearch_serverless/_sync/client/__init__.py @@ -569,7 +569,8 @@ def clear_scroll( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Clears the search context and results for a scrolling search. + Clear a scrolling search. Clear the search context and results for a scrolling + search. ``_ @@ -619,7 +620,11 @@ def close_point_in_time( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Closes a point-in-time. + Close a point in time. A point in time must be opened explicitly before being + used in search requests. The `keep_alive` parameter tells Elasticsearch how long + it should persist. A point in time is automatically closed when the `keep_alive` + period has elapsed. However, keeping points in time has a cost; close them as + soon as they are no longer required for search requests. ``_ @@ -1622,10 +1627,11 @@ def field_caps( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - The field capabilities API returns the information about the capabilities of - fields among multiple indices. The field capabilities API returns runtime fields - like any other field. For example, a runtime field with a type of keyword is - returned as any other field that belongs to the `keyword` family. + Get the field capabilities. Get information about the capabilities of fields + among multiple indices. For data streams, the API returns field capabilities + among the stream’s backing indices. It returns runtime fields like any other + field. For example, a runtime field with a type of keyword is returned the same + as any other field that belongs to the `keyword` family. ``_ @@ -2161,7 +2167,10 @@ def mget( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to get multiple documents in one request. + Get multiple documents. Get multiple JSON documents by ID from one or more indices. + If you specify an index in the request URI, you only need to specify the document + IDs in the request body. To ensure fast responses, this multi get (mget) API + responds with partial results if one or more shards fail. ``_ @@ -2282,7 +2291,13 @@ def msearch( typed_keys: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to execute several search operations in one request. + Run multiple searches. The format of the request is similar to the bulk API format + and makes use of the newline delimited JSON (NDJSON) format. The structure is + as follows: ``` header\\n body\\n header\\n body\\n ``` This structure is specifically + optimized to reduce parsing if a specific search ends up redirected to another + node. IMPORTANT: The final line of data must end with a newline character `\\n`. + Each newline character may be preceded by a carriage return `\\r`. When sending + requests to this endpoint the `Content-Type` header should be set to `application/x-ndjson`. ``_ @@ -2414,7 +2429,7 @@ def msearch_template( typed_keys: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs multiple templated searches with a single request. + Run multiple templated searches. ``_ @@ -2509,7 +2524,11 @@ def mtermvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns multiple termvectors in one request. + Get multiple term vectors. You can specify existing documents by index and ID + or provide artificial documents in the body of the request. You can specify the + index in the request body or request URI. The response contains a `docs` array + with all the fetched termvectors. Each element has the structure provided by + the termvectors API. ``_ @@ -2620,13 +2639,15 @@ def open_point_in_time( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - A search request by default executes against the most recent visible data of - the target indices, which is called point in time. Elasticsearch pit (point in - time) is a lightweight view into the state of the data as it existed when initiated. - In some cases, it’s preferred to perform multiple search requests using the same - point in time. For example, if refreshes happen between `search_after` requests, - then the results of those requests might not be consistent as changes happening - between searches are only visible to the more recent point in time. + Open a point in time. A search request by default runs against the most recent + visible data of the target indices, which is called point in time. Elasticsearch + pit (point in time) is a lightweight view into the state of the data as it existed + when initiated. In some cases, it’s preferred to perform multiple search requests + using the same point in time. For example, if refreshes happen between `search_after` + requests, then the results of those requests might not be consistent as changes + happening between searches are only visible to the more recent point in time. + A point in time must be opened explicitly before being used in search requests. + The `keep_alive` parameter tells Elasticsearch how long it should persist. ``_ @@ -2793,8 +2814,8 @@ def rank_eval( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Enables you to evaluate the quality of ranked search results over a set of typical - search queries. + Evaluate ranked search results. Evaluate the quality of ranked search results + over a set of typical search queries. ``_ @@ -2992,7 +3013,7 @@ def render_search_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Renders a search template as a search request body. + Render a search template. Render a search template as a search request body. ``_ @@ -3118,7 +3139,22 @@ def scroll( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to retrieve a large numbers of results from a single search request. + Run a scrolling search. IMPORTANT: The scroll API is no longer recommend for + deep pagination. If you need to preserve the index state while paging through + more than 10,000 hits, use the `search_after` parameter with a point in time + (PIT). The scroll API gets large sets of results from a single scrolling search + request. To get the necessary scroll ID, submit a search API request that includes + an argument for the `scroll` query parameter. The `scroll` parameter indicates + how long Elasticsearch should retain the search context for the request. The + search response returns a scroll ID in the `_scroll_id` response body parameter. + You can then use the scroll ID with the scroll API to retrieve the next batch + of results for the request. If the Elasticsearch security features are enabled, + the access to the results of a specific scroll ID is restricted to the user or + API key that submitted the search. You can also use the scroll API to specify + a new scroll parameter that extends or shortens the retention period for the + search context. IMPORTANT: Results from a scrolling search reflect the state + of the index at the time of the initial search request. Subsequent indexing or + document changes only affect later search and scroll requests. ``_ @@ -3308,9 +3344,9 @@ def search( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns search hits that match the query defined in the request. You can provide - search queries using the `q` query string parameter or the request body. If both - are specified, only the query parameter is used. + Run a search. Get search hits that match the query defined in the request. You + can provide search queries using the `q` query string parameter or the request + body. If both are specified, only the query parameter is used. ``_ @@ -3740,7 +3776,7 @@ def search_mvt( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> BinaryApiResponse: """ - Search a vector tile. Searches a vector tile for geospatial values. + Search a vector tile. Search a vector tile for geospatial values. ``_ @@ -3910,7 +3946,7 @@ def search_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a search with a search template. + Run a search with a search template. ``_ @@ -4042,9 +4078,15 @@ def terms_enum( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - The terms enum API can be used to discover terms in the index that begin with - the provided string. It is designed for low-latency look-ups used in auto-complete - scenarios. + Get terms in an index. Discover terms that match a partial string in an index. + This "terms enum" API is designed for low-latency look-ups used in auto-complete + scenarios. If the `complete` property in the response is false, the returned + terms set may be incomplete and should be treated as approximate. This can occur + due to a few reasons, such as a request timeout or a node error. NOTE: The terms + enum API may return terms from deleted documents. Deleted documents are initially + only marked as deleted. It is not until their segments are merged that documents + are actually deleted. Until that happens, the terms enum API will return terms + from these documents. ``_ @@ -4142,8 +4184,8 @@ def termvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Get term vector information. Returns information and statistics about terms in - the fields of a particular document. + Get term vector information. Get information and statistics about terms in the + fields of a particular document. ``_ diff --git a/elasticsearch_serverless/_sync/client/async_search.py b/elasticsearch_serverless/_sync/client/async_search.py index 167fece..427db59 100644 --- a/elasticsearch_serverless/_sync/client/async_search.py +++ b/elasticsearch_serverless/_sync/client/async_search.py @@ -148,10 +148,10 @@ def status( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Get async search status. Retrieve the status of a previously submitted async - search request given its identifier, without retrieving search results. If the - Elasticsearch security features are enabled, use of this API is restricted to - the `monitoring_user` role. + Get the async search status. Get the status of a previously submitted async search + request given its identifier, without retrieving search results. If the Elasticsearch + security features are enabled, use of this API is restricted to the `monitoring_user` + role. ``_ diff --git a/elasticsearch_serverless/_sync/client/connector.py b/elasticsearch_serverless/_sync/client/connector.py index fa78997..1ade6ac 100644 --- a/elasticsearch_serverless/_sync/client/connector.py +++ b/elasticsearch_serverless/_sync/client/connector.py @@ -36,7 +36,8 @@ def check_in( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the last_seen field in the connector, and sets it to current timestamp + Check in a connector. Update the `last_seen` field in the connector and set it + to the current timestamp. ``_ @@ -77,7 +78,10 @@ def delete( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a connector. + Delete a connector. Removes a connector and associated sync jobs. This is a destructive + action that is not recoverable. NOTE: This action doesn’t delete any API keys, + ingest pipelines, or data indices associated with the connector. These need to + be removed manually. ``_ @@ -121,7 +125,7 @@ def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a connector. + Get a connector. Get the details about a connector. ``_ @@ -168,7 +172,7 @@ def list( size: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns existing connectors. + Get all connectors. Get information about all connectors. ``_ @@ -242,7 +246,11 @@ def post( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a connector. + Create a connector. Connectors are Elasticsearch integrations that bring content + from third-party data sources, which can be deployed on Elastic Cloud or hosted + on your own infrastructure. Elastic managed connectors (Native connectors) are + a managed service on Elastic Cloud. Self-managed connectors (Connector clients) + are self-managed on your infrastructure. ``_ @@ -320,7 +328,7 @@ def put( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a connector. + Create or update a connector. ``_ @@ -389,7 +397,10 @@ def sync_job_cancel( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Cancels a connector sync job. + Cancel a connector sync job. Cancel a connector sync job, which sets the status + to cancelling and updates `cancellation_requested_at` to the current time. The + connector service is then responsible for setting the status of connector sync + jobs to cancelled. ``_ @@ -433,7 +444,8 @@ def sync_job_delete( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a connector sync job. + Delete a connector sync job. Remove a connector sync job and its associated data. + This is a destructive action that is not recoverable. ``_ @@ -476,7 +488,7 @@ def sync_job_get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a connector sync job. + Get a connector sync job. ``_ @@ -544,7 +556,8 @@ def sync_job_list( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Lists connector sync jobs. + Get all connector sync jobs. Get information about all stored connector sync + jobs listed by their creation date in ascending order. ``_ @@ -605,7 +618,8 @@ def sync_job_post( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a connector sync job. + Create a connector sync job. Create a connector sync job document in the internal + index and initialize its counters and timestamps with default values. ``_ @@ -656,7 +670,8 @@ def update_active_filtering( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Activates the valid draft filtering for a connector. + Activate the connector draft filter. Activates the valid draft filtering for + a connector. ``_ @@ -701,7 +716,11 @@ def update_api_key_id( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the API key id in the connector document + Update the connector API key ID. Update the `api_key_id` and `api_key_secret_id` + fields of a connector. You can specify the ID of the API key used for authorization + and the ID of the connector secret where the API key is stored. The connector + secret ID is required only for Elastic managed (native) connectors. Self-managed + connectors (connector clients) do not use this field. ``_ @@ -755,7 +774,8 @@ def update_configuration( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the configuration field in the connector document + Update the connector configuration. Update the configuration field in the connector + document. ``_ @@ -808,7 +828,10 @@ def update_error( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the filtering field in the connector document + Update the connector error field. Set the error field for the connector. If the + error provided in the request body is non-null, the connector’s status is updated + to error. Otherwise, if the error is reset to null, the connector status is updated + to connected. ``_ @@ -862,7 +885,10 @@ def update_filtering( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the filtering field in the connector document + Update the connector filtering. Update the draft filtering configuration of a + connector and marks the draft validation state as edited. The filtering draft + is activated once validated by the running Elastic connector service. The filtering + property is used to configure sync rules (both basic and advanced) for a connector. ``_ @@ -918,7 +944,8 @@ def update_filtering_validation( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the draft filtering validation info for a connector. + Update the connector draft filtering validation. Update the draft filtering validation + info for a connector. ``_ @@ -970,7 +997,8 @@ def update_index_name( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the index_name in the connector document + Update the connector index name. Update the `index_name` field of a connector, + specifying the index where the data ingested by the connector is stored. ``_ @@ -1023,7 +1051,7 @@ def update_name( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the name and description fields in the connector document + Update the connector name and description. ``_ @@ -1076,7 +1104,7 @@ def update_native( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the is_native flag in the connector document + Update the connector is_native flag. ``_ @@ -1128,7 +1156,8 @@ def update_pipeline( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the pipeline field in the connector document + Update the connector pipeline. When you create a new connector, the configuration + of an ingest pipeline is populated with default settings. ``_ @@ -1180,7 +1209,7 @@ def update_scheduling( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the scheduling field in the connector document + Update the connector scheduling. ``_ @@ -1232,7 +1261,7 @@ def update_service_type( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the service type of the connector + Update the connector service type. ``_ @@ -1291,7 +1320,7 @@ def update_status( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates the status of the connector + Update the connector status. ``_ diff --git a/elasticsearch_serverless/_sync/client/eql.py b/elasticsearch_serverless/_sync/client/eql.py index ce3e515..63ef319 100644 --- a/elasticsearch_serverless/_sync/client/eql.py +++ b/elasticsearch_serverless/_sync/client/eql.py @@ -174,6 +174,7 @@ def get_status( "filter", "keep_alive", "keep_on_completion", + "max_samples_per_key", "result_position", "runtime_mappings", "size", @@ -211,6 +212,7 @@ def search( ignore_unavailable: t.Optional[bool] = None, keep_alive: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, keep_on_completion: t.Optional[bool] = None, + max_samples_per_key: t.Optional[int] = None, pretty: t.Optional[bool] = None, result_position: t.Optional[t.Union[str, t.Literal["head", "tail"]]] = None, runtime_mappings: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, @@ -244,6 +246,11 @@ def search( in the response. :param keep_alive: :param keep_on_completion: + :param max_samples_per_key: By default, the response of a sample query contains + up to `10` samples, with one sample per unique set of join keys. Use the + `size` parameter to get a smaller or larger set of samples. To retrieve more + than one sample per set of join keys, use the `max_samples_per_key` parameter. + Pipes are not supported for sample queries. :param result_position: :param runtime_mappings: :param size: For basic queries, the maximum number of matching events to return. @@ -292,6 +299,8 @@ def search( __body["keep_alive"] = keep_alive if keep_on_completion is not None: __body["keep_on_completion"] = keep_on_completion + if max_samples_per_key is not None: + __body["max_samples_per_key"] = max_samples_per_key if result_position is not None: __body["result_position"] = result_position if runtime_mappings is not None: diff --git a/elasticsearch_serverless/_sync/client/indices.py b/elasticsearch_serverless/_sync/client/indices.py index 1171daa..65910c1 100644 --- a/elasticsearch_serverless/_sync/client/indices.py +++ b/elasticsearch_serverless/_sync/client/indices.py @@ -701,7 +701,6 @@ def exists_alias( filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - local: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ @@ -722,8 +721,6 @@ def exists_alias( as `open,hidden`. Valid values are: `all`, `open`, `closed`, `hidden`, `none`. :param ignore_unavailable: If `false`, requests that include a missing data stream or index in the target indices or data streams return an error. - :param local: If `true`, the request retrieves information from the local node - only. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'name'") @@ -749,8 +746,6 @@ def exists_alias( __query["human"] = human if ignore_unavailable is not None: __query["ignore_unavailable"] = ignore_unavailable - if local is not None: - __query["local"] = local if pretty is not None: __query["pretty"] = pretty __headers = {"accept": "application/json"} @@ -979,7 +974,6 @@ def get_alias( filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, ignore_unavailable: t.Optional[bool] = None, - local: t.Optional[bool] = None, pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -1001,8 +995,6 @@ def get_alias( as `open,hidden`. Valid values are: `all`, `open`, `closed`, `hidden`, `none`. :param ignore_unavailable: If `false`, the request returns an error if it targets a missing or closed index. - :param local: If `true`, the request retrieves information from the local node - only. """ __path_parts: t.Dict[str, str] if index not in SKIP_IN_PATH and name not in SKIP_IN_PATH: @@ -1030,8 +1022,6 @@ def get_alias( __query["human"] = human if ignore_unavailable is not None: __query["ignore_unavailable"] = ignore_unavailable - if local is not None: - __query["local"] = local if pretty is not None: __query["pretty"] = pretty __headers = {"accept": "application/json"} @@ -1642,14 +1632,14 @@ def put_alias( ) @_rewrite_parameters( - body_fields=("data_retention", "downsampling"), + body_name="lifecycle", ) def put_data_lifecycle( self, *, name: t.Union[str, t.Sequence[str]], - data_retention: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, - downsampling: t.Optional[t.Mapping[str, t.Any]] = None, + lifecycle: t.Optional[t.Mapping[str, t.Any]] = None, + body: t.Optional[t.Mapping[str, t.Any]] = None, error_trace: t.Optional[bool] = None, expand_wildcards: t.Optional[ t.Union[ @@ -1664,7 +1654,6 @@ def put_data_lifecycle( master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, pretty: t.Optional[bool] = None, timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, - body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ Update data stream lifecycles. Update the data stream lifecycle of the specified @@ -1674,13 +1663,7 @@ def put_data_lifecycle( :param name: Comma-separated list of data streams used to limit the request. Supports wildcards (`*`). To target all data streams use `*` or `_all`. - :param data_retention: If defined, every document added to this data stream will - be stored at least for this time frame. Any time after this duration the - document could be deleted. When empty, every document in this data stream - will be stored indefinitely. - :param downsampling: If defined, every backing index will execute the configured - downsampling configuration after the backing index is not the data stream - write index anymore. + :param lifecycle: :param expand_wildcards: Type of data stream that wildcard patterns can match. Supports comma-separated values, such as `open,hidden`. Valid values are: `all`, `hidden`, `open`, `closed`, `none`. @@ -1692,10 +1675,15 @@ def put_data_lifecycle( """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'name'") + if lifecycle is None and body is None: + raise ValueError( + "Empty value passed for parameters 'lifecycle' and 'body', one of them should be set." + ) + elif lifecycle is not None and body is not None: + raise ValueError("Cannot set both 'lifecycle' and 'body'") __path_parts: t.Dict[str, str] = {"name": _quote(name)} __path = f'/_data_stream/{__path_parts["name"]}/_lifecycle' __query: t.Dict[str, t.Any] = {} - __body: t.Dict[str, t.Any] = body if body is not None else {} if error_trace is not None: __query["error_trace"] = error_trace if expand_wildcards is not None: @@ -1710,16 +1698,8 @@ def put_data_lifecycle( __query["pretty"] = pretty if timeout is not None: __query["timeout"] = timeout - if not __body: - if data_retention is not None: - __body["data_retention"] = data_retention - if downsampling is not None: - __body["downsampling"] = downsampling - if not __body: - __body = None # type: ignore[assignment] - __headers = {"accept": "application/json"} - if __body is not None: - __headers["content-type"] = "application/json" + __body = lifecycle if lifecycle is not None else body + __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "PUT", __path, diff --git a/elasticsearch_serverless/_sync/client/ml.py b/elasticsearch_serverless/_sync/client/ml.py index 1e4afac..7db7748 100644 --- a/elasticsearch_serverless/_sync/client/ml.py +++ b/elasticsearch_serverless/_sync/client/ml.py @@ -2231,7 +2231,7 @@ def put_datafeed( Create a datafeed. Datafeeds retrieve data from Elasticsearch for analysis by an anomaly detection job. You can associate only one datafeed with each anomaly detection job. The datafeed contains a query that runs at a defined interval - (`frequency`). If you are concerned about delayed data, you can add a delay (`query_delay') + (`frequency`). If you are concerned about delayed data, you can add a delay (`query_delay`) at each interval. When Elasticsearch security features are enabled, your datafeed remembers which roles the user who created it had at the time of creation and runs the query using those same roles. If you provide secondary authorization diff --git a/elasticsearch_serverless/_sync/client/query_rules.py b/elasticsearch_serverless/_sync/client/query_rules.py index 72150e9..1a2cc84 100644 --- a/elasticsearch_serverless/_sync/client/query_rules.py +++ b/elasticsearch_serverless/_sync/client/query_rules.py @@ -383,3 +383,56 @@ def put_ruleset( endpoint_id="query_rules.put_ruleset", path_parts=__path_parts, ) + + @_rewrite_parameters( + body_fields=("match_criteria",), + ) + def test( + self, + *, + ruleset_id: str, + match_criteria: t.Optional[t.Mapping[str, t.Any]] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Creates or updates a query ruleset. + + ``_ + + :param ruleset_id: The unique identifier of the query ruleset to be created or + updated + :param match_criteria: + """ + if ruleset_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for parameter 'ruleset_id'") + if match_criteria is None and body is None: + raise ValueError("Empty value passed for parameter 'match_criteria'") + __path_parts: t.Dict[str, str] = {"ruleset_id": _quote(ruleset_id)} + __path = f'/_query_rules/{__path_parts["ruleset_id"]}/_test' + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if not __body: + if match_criteria is not None: + __body["match_criteria"] = match_criteria + __headers = {"accept": "application/json", "content-type": "application/json"} + return self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="query_rules.test", + path_parts=__path_parts, + ) diff --git a/elasticsearch_serverless/_sync/client/security.py b/elasticsearch_serverless/_sync/client/security.py index b21d795..f13e024 100644 --- a/elasticsearch_serverless/_sync/client/security.py +++ b/elasticsearch_serverless/_sync/client/security.py @@ -85,7 +85,7 @@ def create_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Create an API key. Creates an API key for access without requiring basic authentication. + Create an API key. Create an API key for access without requiring basic authentication. A successful request returns a JSON structure that contains the API key, its unique id, and its name. If applicable, it also returns expiration information for the API key in milliseconds. NOTE: By default, API keys never expire. You @@ -159,7 +159,7 @@ def delete_role( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes roles in the native realm. + Delete roles. Delete roles in the native realm. ``_ @@ -289,8 +289,8 @@ def get_builtin_privileges( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the list of cluster privileges and index privileges that are available - in this version of Elasticsearch. + Get builtin privileges. Get the list of cluster privileges and index privileges + that are available in this version of Elasticsearch. ``_ """ @@ -326,9 +326,7 @@ def get_role( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - The role management APIs are generally the preferred way to manage roles, rather - than using file-based role management. The get roles API cannot retrieve roles - that are defined in roles files. + Get roles. Get roles in the native realm. ``_ @@ -445,8 +443,8 @@ def has_privileges( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Check user privileges. Determines whether the specified user has a specified - list of privileges. + Check user privileges. Determine whether the specified user has a specified list + of privileges. ``_ @@ -509,13 +507,17 @@ def invalidate_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Invalidate API keys. Invalidates one or more API keys. The `manage_api_key` privilege - allows deleting any API keys. The `manage_own_api_key` only allows deleting API - keys that are owned by the user. In addition, with the `manage_own_api_key` privilege, - an invalidation request must be issued in one of the three formats: - Set the - parameter `owner=true`. - Or, set both `username` and `realm_name` to match the - user’s identity. - Or, if the request is issued by an API key, i.e. an API key - invalidates itself, specify its ID in the `ids` field. + Invalidate API keys. This API invalidates API keys created by the create API + key or grant API key APIs. Invalidated API keys fail authentication, but they + can still be viewed using the get API key information and query API key information + APIs, for at least the configured retention period, until they are automatically + deleted. The `manage_api_key` privilege allows deleting any API keys. The `manage_own_api_key` + only allows deleting API keys that are owned by the user. In addition, with the + `manage_own_api_key` privilege, an invalidation request must be issued in one + of the three formats: - Set the parameter `owner=true`. - Or, set both `username` + and `realm_name` to match the user’s identity. - Or, if the request is issued + by an API key, that is to say an API key invalidates itself, specify its ID in + the `ids` field. ``_ @@ -672,9 +674,10 @@ def put_role( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - The role management APIs are generally the preferred way to manage roles, rather - than using file-based role management. The create or update roles API cannot - update roles that are defined in roles files. + Create or update roles. The role management APIs are generally the preferred + way to manage roles in the native realm, rather than using file-based role management. + The create or update roles API cannot update roles that are defined in roles + files. File-based role management is not available in Elastic Serverless. ``_ @@ -792,7 +795,7 @@ def query_api_keys( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Query API keys. Retrieves a paginated list of API keys and their information. + Find API keys with a query. Get a paginated list of API keys and their information. You can optionally filter the results with a query. ``_ From caa295b8adfe2501a9e43f97850474376fe80f2f Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Wed, 6 Nov 2024 11:11:25 +0400 Subject: [PATCH 3/3] Fix is_xpack_template --- test_elasticsearch_serverless/utils.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/test_elasticsearch_serverless/utils.py b/test_elasticsearch_serverless/utils.py index 49f7ba9..99dfa83 100644 --- a/test_elasticsearch_serverless/utils.py +++ b/test_elasticsearch_serverless/utils.py @@ -123,11 +123,7 @@ def wipe_transforms(client: Elasticsearch, timeout=30): def is_xpack_template(name): - if name.startswith(".alerts-"): - return True - elif name.startswith(".kibana-data-quality-dashboard-"): - return True - elif name.startswith(".kibana-elastic-ai-assistant-component-template-"): + if name.startswith("."): return True elif name.startswith("behavioral_analytics-events"): return True @@ -153,6 +149,7 @@ def is_xpack_template(name): "logs@mappings", "logs-settings", "logs@settings", + "logs-fleet_server.output_health@package", "metrics-apm@mappings", "metrics-apm.service_destination@mappings", "metrics-apm.service_summary@mappings", @@ -165,6 +162,8 @@ def is_xpack_template(name): "metrics@settings", "metrics-tsdb-settings", "metrics@tsdb-settings", + "metrics-fleet_server.agent_status@package", + "metrics-fleet_server.agent_versions@package", "synthetics-mappings", "synthetics@mappings", "synthetics-settings",