diff --git a/elasticsearch_serverless/_async/client/__init__.py b/elasticsearch_serverless/_async/client/__init__.py index c952305..6bfab02 100644 --- a/elasticsearch_serverless/_async/client/__init__.py +++ b/elasticsearch_serverless/_async/client/__init__.py @@ -468,8 +468,9 @@ async def bulk( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs multiple indexing or delete operations in a single API call. This reduces - overhead and can greatly increase indexing speed. + Bulk index or delete documents. Performs multiple indexing or delete operations + in a single API call. This reduces overhead and can greatly increase indexing + speed. ``_ @@ -825,9 +826,9 @@ async def create( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -931,7 +932,7 @@ async def delete( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a JSON document from the specified index. + Delete a document. Removes a JSON document from the specified index. ``_ @@ -1055,7 +1056,7 @@ async def delete_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes documents that match the specified query. + Delete documents. Deletes documents that match the specified query. ``_ @@ -1236,7 +1237,7 @@ async def delete_script( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a stored script or search template. + Delete a script or search template. Deletes a stored script or search template. ``_ @@ -1304,7 +1305,7 @@ async def exists( ] = None, ) -> HeadApiResponse: """ - Checks if a document in an index exists. + Check a document. Checks if a specified document exists. ``_ @@ -1405,7 +1406,7 @@ async def exists_source( ] = None, ) -> HeadApiResponse: """ - Checks if a document's `_source` is stored. + Check for a document source. Checks if a document's `_source` is stored. ``_ @@ -1505,8 +1506,8 @@ async def explain( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about why a specific document matches (or doesn’t match) - a query. + Explain a document match result. Returns information about why a specific document + matches, or doesn’t match, a query. ``_ @@ -1744,7 +1745,8 @@ async def get( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns a document. + Get a document by its ID. Retrieves the document with the specified ID from an + index. ``_ @@ -1835,7 +1837,7 @@ async def get_script( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a stored script or search template. + Get a script or search template. Retrieves a stored script or search template. ``_ @@ -1897,7 +1899,7 @@ async def get_source( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns the source of a document. + Get a document's source. Returns the source of a document. ``_ @@ -1999,9 +2001,9 @@ async def index( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -2110,7 +2112,7 @@ async def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns basic information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ """ @@ -2688,7 +2690,8 @@ async def put_script( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a stored script or search template. + Create or update a script or search template. Creates or updates a stored script + or search template. ``_ @@ -2868,9 +2871,9 @@ async def reindex( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to copy documents from one index to another, optionally filtering the - source documents by a query, changing the destination index settings, or fetching - the documents from a remote cluster. + Reindex documents. Copies documents from a source to a destination. The source + can be any existing index, alias, or data stream. The destination must differ + from the source. For example, you cannot reindex a data stream into itself. ``_ @@ -3040,7 +3043,7 @@ async def scripts_painless_execute( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a script and returns a result. + Run a script. Runs a script and returns a result. ``_ @@ -3710,8 +3713,7 @@ async def search_mvt( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> BinaryApiResponse: """ - Searches a vector tile for geospatial values. Returns results as a binary Mapbox - vector tile. + Search a vector tile. Searches a vector tile for geospatial values. ``_ @@ -4113,8 +4115,8 @@ async def termvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information and statistics about terms in the fields of a particular - document. + Get term vector information. Returns information and statistics about terms in + the fields of a particular document. ``_ @@ -4256,7 +4258,8 @@ async def update( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates a document with a script or partial document. + Update a document. Updates a document by running a script or passing a partial + document. ``_ @@ -4421,9 +4424,9 @@ async def update_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates documents that match the specified query. If no query is specified, performs - an update on every document in the data stream or index without modifying the - source, which is useful for picking up mapping changes. + Update documents. Updates documents that match the specified query. If no query + is specified, performs an update on every document in the data stream or index + without modifying the source, which is useful for picking up mapping changes. ``_ diff --git a/elasticsearch_serverless/_async/client/cat.py b/elasticsearch_serverless/_async/client/cat.py index a789ca4..fa9b927 100644 --- a/elasticsearch_serverless/_async/client/cat.py +++ b/elasticsearch_serverless/_async/client/cat.py @@ -53,11 +53,11 @@ async def aliases( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Retrieves the cluster’s index aliases, including filter and routing information. - The API does not return data stream aliases. IMPORTANT: cat APIs are only intended + Get aliases. Retrieves the cluster’s index aliases, including filter and routing + information. The API does not return data stream aliases. CAT APIs are only intended for human consumption using the command line or the Kibana console. They are - not intended for use by applications. For application consumption, use the aliases - API. + not intended for use by applications. For application consumption, use the /_alias + endpoints. ``_ @@ -142,11 +142,12 @@ async def component_templates( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns information about component templates in a cluster. Component templates - are building blocks for constructing index templates that specify index mappings, - settings, and aliases. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get component template API. + Get component templates. Returns information about component templates in a cluster. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. CAT APIs are only intended for + human consumption using the command line or Kibana console. They are not intended + for use by applications. For application consumption, use the /_component_template + endpoints. ``_ @@ -227,12 +228,12 @@ async def count( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Provides quick access to a document count for a data stream, an index, or an - entire cluster. NOTE: The document count only includes live documents, not deleted - documents which have not yet been removed by the merge process. IMPORTANT: cat + Get a document count. Provides quick access to a document count for a data stream, + an index, or an entire cluster.n/ The document count only includes live documents, + not deleted documents which have not yet been removed by the merge process. CAT APIs are only intended for human consumption using the command line or Kibana console. They are not intended for use by applications. For application consumption, - use the count API. + use /_count endpoints. ``_ @@ -313,7 +314,7 @@ async def help( v: t.Optional[bool] = None, ) -> TextApiResponse: """ - Returns help for the Cat APIs. + Get CAT help. Returns help for the CAT APIs. ``_ @@ -404,16 +405,16 @@ async def indices( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns high-level information about indices in a cluster, including backing - indices for data streams. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get index API. Use the cat indices API to - get the following information for each index in a cluster: shard count; document - count; deleted document count; primary store size; total store size of all shards, - including shard replicas. These metrics are retrieved directly from Lucene, which - Elasticsearch uses internally to power indexing and search. As a result, all - document counts include hidden nested documents. To get an accurate count of - Elasticsearch documents, use the cat count or count APIs. + Get index information. Returns high-level information about indices in a cluster, + including backing indices for data streams. Use this request to get the following + information for each index in a cluster: - shard count - document count - deleted + document count - primary store size - total store size of all shards, including + shard replicas These metrics are retrieved directly from Lucene, which Elasticsearch + uses internally to power indexing and search. As a result, all document counts + include hidden nested documents. To get an accurate count of Elasticsearch documents, + use the /_cat/count or _count endpoints. CAT APIs are only intended for human + consumption using the command line or Kibana console. They are not intended for + use by applications. For application consumption, use an index endpoint. ``_ @@ -546,10 +547,10 @@ async def ml_data_frame_analytics( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about data frame analytics jobs. - IMPORTANT: cat APIs are only intended for human consumption using the Kibana - console or command line. They are not intended for use by applications. For application - consumption, use the get data frame analytics jobs statistics API. + Get data frame analytics jobs. Returns configuration and usage information about + data frame analytics jobs. CAT APIs are only intended for human consumption using + the Kibana console or command line. They are not intended for use by applications. + For application consumption, use the /_ml/data_frame/analytics endpoints. ``_ @@ -668,12 +669,12 @@ async def ml_datafeeds( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about datafeeds. This API returns - a maximum of 10,000 datafeeds. If the Elasticsearch security features are enabled, - you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges - to use this API. IMPORTANT: cat APIs are only intended for human consumption + Get datafeeds. Returns configuration and usage information about datafeeds. This + API returns a maximum of 10,000 datafeeds. If the Elasticsearch security features + are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` + cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. - For application consumption, use the get datafeed statistics API. + For application consumption, use the /_ml/datafeeds endpoints. ``_ @@ -798,13 +799,13 @@ async def ml_jobs( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information for anomaly detection jobs. This - API returns a maximum of 10,000 jobs. If the Elasticsearch security features - are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` - cluster privileges to use this API. IMPORTANT: cat APIs are only intended for + Get anomaly detection jobs. Returns configuration and usage information for anomaly + detection jobs. This API returns a maximum of 10,000 jobs. If the Elasticsearch + security features are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, + or `manage` cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended - for use by applications. For application consumption, use the get anomaly detection - job statistics API. + for use by applications. For application consumption, use the /_ml/anomaly_detectors + endpoints. ``_ @@ -932,10 +933,10 @@ async def ml_trained_models( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about inference trained models. IMPORTANT: - cat APIs are only intended for human consumption using the Kibana console or - command line. They are not intended for use by applications. For application - consumption, use the get trained models statistics API. + Get trained models. Returns configuration and usage information about inference + trained models. CAT APIs are only intended for human consumption using the Kibana + console or command line. They are not intended for use by applications. For application + consumption, use the /_ml/trained_models endpoints. ``_ @@ -1066,10 +1067,10 @@ async def transforms( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about transforms. IMPORTANT: cat - APIs are only intended for human consumption using the Kibana console or command - line. They are not intended for use by applications. For application consumption, - use the get transform statistics API. + Get transforms. Returns configuration and usage information about transforms. + CAT APIs are only intended for human consumption using the Kibana console or + command line. They are not intended for use by applications. For application + consumption, use the /_transform endpoints. ``_ diff --git a/elasticsearch_serverless/_async/client/cluster.py b/elasticsearch_serverless/_async/client/cluster.py index 1b77504..2f97214 100644 --- a/elasticsearch_serverless/_async/client/cluster.py +++ b/elasticsearch_serverless/_async/client/cluster.py @@ -40,8 +40,9 @@ async def delete_component_template( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes component templates. Component templates are building blocks for constructing - index templates that specify index mappings, settings, and aliases. + Delete component templates. Deletes component templates. Component templates + are building blocks for constructing index templates that specify index mappings, + settings, and aliases. ``_ @@ -95,7 +96,8 @@ async def exists_component_template( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Returns information about whether a particular component template exist + Check component templates. Returns information about whether a particular component + template exists. ``_ @@ -152,7 +154,7 @@ async def get_component_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about component templates. + Get component templates. Retrieves information about component templates. ``_ @@ -221,7 +223,7 @@ async def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns different information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ @@ -274,17 +276,18 @@ async def put_component_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a component template. Component templates are building blocks - for constructing index templates that specify index mappings, settings, and aliases. - An index template can be composed of multiple component templates. To use a component - template, specify it in an index template’s `composed_of` list. Component templates - are only applied to new data streams and indices as part of a matching index - template. Settings and mappings specified directly in the index template or the - create index request override any settings or mappings specified in a component - template. Component templates are only used during index creation. For data streams, - this includes data stream creation and the creation of a stream’s backing indices. - Changes to component templates do not affect existing indices, including a stream’s - backing indices. You can use C-style `/* *\\/` block comments in component templates. + Create or update a component template. Creates or updates a component template. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. An index template can be composed + of multiple component templates. To use a component template, specify it in an + index template’s `composed_of` list. Component templates are only applied to + new data streams and indices as part of a matching index template. Settings and + mappings specified directly in the index template or the create index request + override any settings or mappings specified in a component template. Component + templates are only used during index creation. For data streams, this includes + data stream creation and the creation of a stream’s backing indices. Changes + to component templates do not affect existing indices, including a stream’s backing + indices. You can use C-style `/* *\\/` block comments in component templates. You can include comments anywhere in the request body except before the opening curly bracket. diff --git a/elasticsearch_serverless/_async/client/enrich.py b/elasticsearch_serverless/_async/client/enrich.py index 63c7e8e..6663826 100644 --- a/elasticsearch_serverless/_async/client/enrich.py +++ b/elasticsearch_serverless/_async/client/enrich.py @@ -36,7 +36,7 @@ async def delete_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing enrich policy and its enrich index. + Delete an enrich policy. Deletes an existing enrich policy and its enrich index. ``_ @@ -121,7 +121,7 @@ async def get_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about an enrich policy. + Get an enrich policy. Returns information about an enrich policy. ``_ @@ -171,7 +171,7 @@ async def put_policy( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an enrich policy. + Create an enrich policy. Creates an enrich policy. ``_ @@ -224,8 +224,8 @@ async def stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns enrich coordinator statistics and information about enrich policies that - are currently executing. + Get enrich stats. Returns enrich coordinator statistics and information about + enrich policies that are currently executing. ``_ """ diff --git a/elasticsearch_serverless/_async/client/esql.py b/elasticsearch_serverless/_async/client/esql.py index 9973903..d084964 100644 --- a/elasticsearch_serverless/_async/client/esql.py +++ b/elasticsearch_serverless/_async/client/esql.py @@ -26,7 +26,15 @@ class EsqlClient(NamespacedClient): @_rewrite_parameters( - body_fields=("query", "columnar", "filter", "locale", "params"), + body_fields=( + "query", + "columnar", + "filter", + "locale", + "params", + "profile", + "tables", + ), ignore_deprecated_options={"params"}, ) async def query( @@ -35,14 +43,21 @@ async def query( query: t.Optional[str] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, + drop_null_columns: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, locale: t.Optional[str] = None, - params: t.Optional[t.Sequence[t.Union[None, bool, float, int, str]]] = None, + params: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, pretty: t.Optional[bool] = None, + profile: t.Optional[bool] = None, + tables: t.Optional[ + t.Mapping[str, t.Mapping[str, t.Mapping[str, t.Any]]] + ] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -58,6 +73,10 @@ async def query( row represents all the values of a certain column in the results. :param delimiter: The character to use between values within a CSV row. Only valid for the CSV format. + :param drop_null_columns: Should columns that are entirely `null` be removed + from the `columns` and `values` portion of the results? Defaults to `false`. + If `true` then the response will include an extra section under the name + `all_columns` which has the name of all columns. :param filter: Specify a Query DSL query in the filter parameter to filter the set of documents that an ES|QL query runs on. :param format: A short version of the Accept header, e.g. json, yaml. @@ -65,6 +84,12 @@ async def query( :param params: To avoid any attempts of hacking or code injection, extract the values in a separate list of parameters. Use question mark placeholders (?) in the query string for each of the parameters. + :param profile: If provided and `true` the response will include an extra `profile` + object with information on how the query was executed. This information is + for human debugging and its format can change at any time but it can give + some insight into the performance of each part of the query. + :param tables: Tables to use with the LOOKUP operation. The top level key is + the table name and the next level key is the column name. """ if query is None and body is None: raise ValueError("Empty value passed for parameter 'query'") @@ -74,6 +99,8 @@ async def query( __body: t.Dict[str, t.Any] = body if body is not None else {} if delimiter is not None: __query["delimiter"] = delimiter + if drop_null_columns is not None: + __query["drop_null_columns"] = drop_null_columns if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -95,6 +122,10 @@ async def query( __body["locale"] = locale if params is not None: __body["params"] = params + if profile is not None: + __body["profile"] = profile + if tables is not None: + __body["tables"] = tables __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "POST", diff --git a/elasticsearch_serverless/_async/client/indices.py b/elasticsearch_serverless/_async/client/indices.py index c372441..befc983 100644 --- a/elasticsearch_serverless/_async/client/indices.py +++ b/elasticsearch_serverless/_async/client/indices.py @@ -51,7 +51,8 @@ async def add_block( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a block to an index. + Add an index block. Limits the operations allowed on an index by blocking specific + operation types. ``_ @@ -238,7 +239,7 @@ async def create( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index. + Create an index. Creates a new index. ``_ @@ -309,8 +310,8 @@ async def create_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a data stream. You must have a matching index template with data stream - enabled. + Create a data stream. Creates a data stream. You must have a matching index template + with data stream enabled. ``_ @@ -362,7 +363,7 @@ async def data_streams_stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves statistics for one or more data streams. + Get data stream stats. Retrieves statistics for one or more data streams. ``_ @@ -425,7 +426,7 @@ async def delete( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more indices. + Delete indices. Deletes one or more indices. ``_ @@ -497,7 +498,7 @@ async def delete_alias( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a data stream or index from an alias. + Delete an alias. Removes a data stream or index from an alias. ``_ @@ -563,8 +564,8 @@ async def delete_data_lifecycle( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes the data lifecycle from a data stream rendering it not managed by the - data stream lifecycle + Delete data stream lifecycles. Removes the data stream lifecycle from a data + stream, rendering it not managed by the data stream lifecycle. ``_ @@ -623,7 +624,7 @@ async def delete_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more data streams and their backing indices. + Delete data streams. Deletes one or more data streams and their backing indices. ``_ @@ -738,7 +739,8 @@ async def exists( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if a data stream, index, or alias exists. + Check indices. Checks if one or more indices, index aliases, or data streams + exist. ``_ @@ -816,7 +818,7 @@ async def exists_alias( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if an alias exists. + Check aliases. Checks if one or more data stream or index aliases exist. ``_ @@ -938,8 +940,10 @@ async def explain_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about the index's current data stream lifecycle, such as - any potential encountered error, time since creation etc. + Get the status for a data stream lifecycle. Retrieves information about an index + or data stream’s current data stream lifecycle status, such as time since index + creation, time since rollover, the lifecycle configuration managing the index, + or any errors encountered during lifecycle execution. ``_ @@ -1010,8 +1014,8 @@ async def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about one or more indices. For data streams, the API returns - information about the stream’s backing indices. + Get index information. Returns information about one or more indices. For data + streams, the API returns information about the stream’s backing indices. ``_ @@ -1100,7 +1104,7 @@ async def get_alias( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more aliases. + Get aliases. Retrieves information for one or more data stream or index aliases. ``_ @@ -1181,7 +1185,8 @@ async def get_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the data stream lifecycle configuration of one or more data streams. + Get data stream lifecycles. Retrieves the data stream lifecycle configuration + of one or more data streams. ``_ @@ -1240,7 +1245,7 @@ async def get_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about one or more data streams. + Get data streams. Retrieves information about one or more data streams. ``_ @@ -1374,8 +1379,8 @@ async def get_mapping( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves mapping definitions for one or more indices. For data streams, the - API retrieves mappings for the stream’s backing indices. + Get mapping definitions. Retrieves mapping definitions for one or more indices. + For data streams, the API retrieves mappings for the stream’s backing indices. ``_ @@ -1461,8 +1466,8 @@ async def get_settings( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns setting information for one or more indices. For data streams, returns - setting information for the stream’s backing indices. + Get index settings. Returns setting information for one or more indices. For + data streams, returns setting information for the stream’s backing indices. ``_ @@ -1546,14 +1551,14 @@ async def migrate_to_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Converts an index alias to a data stream. You must have a matching index template - that is data stream enabled. The alias must meet the following criteria: The - alias must have a write index; All indices for the alias must have a `@timestamp` - field mapping of a `date` or `date_nanos` field type; The alias must not have - any filters; The alias must not use custom routing. If successful, the request - removes the alias and creates a data stream with the same name. The indices for - the alias become hidden backing indices for the stream. The write index for the - alias becomes the write index for the stream. + Convert an index alias to a data stream. Converts an index alias to a data stream. + You must have a matching index template that is data stream enabled. The alias + must meet the following criteria: The alias must have a write index; All indices + for the alias must have a `@timestamp` field mapping of a `date` or `date_nanos` + field type; The alias must not have any filters; The alias must not use custom + routing. If successful, the request removes the alias and creates a data stream + with the same name. The indices for the alias become hidden backing indices for + the stream. The write index for the alias becomes the write index for the stream. ``_ @@ -1596,7 +1601,8 @@ async def modify_data_stream( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs one or more data stream modification actions in a single atomic operation. + Update data streams. Performs one or more data stream modification actions in + a single atomic operation. ``_ @@ -1660,7 +1666,7 @@ async def put_alias( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -1766,7 +1772,8 @@ async def put_data_lifecycle( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Update the data lifecycle of the specified data streams. + Update data stream lifecycles. Update the data stream lifecycle of the specified + data streams. ``_ @@ -2031,9 +2038,9 @@ async def put_mapping( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds new fields to an existing data stream or index. You can also use this API - to change the search settings of existing fields. For data streams, these changes - are applied to all backing indices by default. + Update field mappings. Adds new fields to an existing data stream or index. You + can also use this API to change the search settings of existing fields. For data + streams, these changes are applied to all backing indices by default. ``_ @@ -2164,8 +2171,8 @@ async def put_settings( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Changes a dynamic index setting in real time. For data streams, index setting - changes are applied to all backing indices by default. + Update index settings. Changes dynamic index settings in real time. For data + streams, index setting changes are applied to all backing indices by default. ``_ @@ -2360,9 +2367,9 @@ async def refresh( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - A refresh makes recent operations performed on one or more indices available - for search. For data streams, the API runs the refresh operation on the stream’s - backing indices. + Refresh an index. A refresh makes recent operations performed on one or more + indices available for search. For data streams, the API runs the refresh operation + on the stream’s backing indices. ``_ @@ -2495,7 +2502,7 @@ async def rollover( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index for a data stream or index alias. + Roll over to a new index. Creates a new index for a data stream or index alias. ``_ @@ -2801,7 +2808,7 @@ async def update_aliases( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -2876,7 +2883,7 @@ async def validate_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Validates a potentially expensive query without executing it. + Validate a query. Validates a query without running it. ``_ diff --git a/elasticsearch_serverless/_async/client/license.py b/elasticsearch_serverless/_async/client/license.py index cadc727..70bb65c 100644 --- a/elasticsearch_serverless/_async/client/license.py +++ b/elasticsearch_serverless/_async/client/license.py @@ -37,9 +37,9 @@ async def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - This API returns information about the type of license, when it was issued, and - when it expires, for example. For more information about the different types - of licenses, see https://www.elastic.co/subscriptions. + Get license information. Returns information about your Elastic license, including + its type, its status, when it was issued, and when it expires. For more information + about the different types of licenses, refer to [Elastic Stack subscriptions](https://www.elastic.co/subscriptions). ``_ diff --git a/elasticsearch_serverless/_async/client/ml.py b/elasticsearch_serverless/_async/client/ml.py index a2eb508..2f7d82f 100644 --- a/elasticsearch_serverless/_async/client/ml.py +++ b/elasticsearch_serverless/_async/client/ml.py @@ -42,7 +42,7 @@ async def close_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Close anomaly detection jobs A job can be opened and closed multiple times throughout + Close anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating @@ -115,7 +115,8 @@ async def delete_calendar( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes all scheduled events from a calendar, then deletes it. + Delete a calendar. Removes all scheduled events from a calendar, then deletes + it. ``_ @@ -156,7 +157,7 @@ async def delete_calendar_event( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes scheduled events from a calendar. + Delete events from a calendar. ``_ @@ -204,7 +205,7 @@ async def delete_calendar_job( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes anomaly detection jobs from a calendar. + Delete anomaly jobs from a calendar. ``_ @@ -253,7 +254,7 @@ async def delete_data_frame_analytics( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a data frame analytics job. + Delete a data frame analytics job. ``_ @@ -301,7 +302,7 @@ async def delete_datafeed( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing datafeed. + Delete a datafeed. ``_ @@ -348,7 +349,7 @@ async def delete_filter( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a filter. If an anomaly detection job references the filter, you cannot + Delete a filter. If an anomaly detection job references the filter, you cannot delete the filter. You must update or delete the job before you can delete the filter. @@ -393,7 +394,7 @@ async def delete_job( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an anomaly detection job. All job configuration, model state and results + Delete an anomaly detection job. All job configuration, model state and results are deleted. It is not currently possible to delete multiple jobs using wildcards or a comma separated list. If you delete a job that has a datafeed, the request first tries to delete the datafeed. This behavior is equivalent to calling the @@ -452,8 +453,8 @@ async def delete_trained_model( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing trained inference model that is currently not referenced - by an ingest pipeline. + Delete an unreferenced trained model. The request deletes a trained inference + model that is not referenced by an ingest pipeline. ``_ @@ -498,9 +499,9 @@ async def delete_trained_model_alias( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a trained model alias. This API deletes an existing model alias that - refers to a trained model. If the model alias is missing or refers to a model - other than the one identified by the `model_id`, this API returns an error. + Delete a trained model alias. This API deletes an existing model alias that refers + to a trained model. If the model alias is missing or refers to a model other + than the one identified by the `model_id`, this API returns an error. ``_ @@ -555,9 +556,9 @@ async def estimate_model_memory( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Makes an estimation of the memory usage for an anomaly detection job model. It - is based on analysis configuration details for the job and cardinality estimates - for the fields it references. + Estimate job model memory usage. Makes an estimation of the memory usage for + an anomaly detection job model. It is based on analysis configuration details + for the job and cardinality estimates for the fields it references. ``_ @@ -622,10 +623,10 @@ async def evaluate_data_frame( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Evaluates the data frame analytics for an annotated index. The API packages together - commonly used evaluation metrics for various types of machine learning features. - This has been designed for use on indexes created by data frame analytics. Evaluation - requires both a ground truth field and an analytics result field to be present. + Evaluate data frame analytics. The API packages together commonly used evaluation + metrics for various types of machine learning features. This has been designed + for use on indexes created by data frame analytics. Evaluation requires both + a ground truth field and an analytics result field to be present. ``_ @@ -1665,12 +1666,12 @@ async def open_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Opens one or more anomaly detection jobs. An anomaly detection job must be opened - in order for it to be ready to receive and analyze data. It can be opened and - closed multiple times throughout its lifecycle. When you open a new job, it starts - with an empty model. When you open an existing job, the most recent model state - is automatically loaded. The job is ready to resume its analysis from where it - left off, once new data is received. + Open anomaly detection jobs. An anomaly detection job must be opened in order + for it to be ready to receive and analyze data. It can be opened and closed multiple + times throughout its lifecycle. When you open a new job, it starts with an empty + model. When you open an existing job, the most recent model state is automatically + loaded. The job is ready to resume its analysis from where it left off, once + new data is received. ``_ @@ -2471,8 +2472,8 @@ async def put_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Instantiates an anomaly detection job. If you include a `datafeed_config`, you - must have read index privileges on the source index. + Create an anomaly detection job. If you include a `datafeed_config`, you must + have read index privileges on the source index. ``_ diff --git a/elasticsearch_serverless/_async/client/query_rules.py b/elasticsearch_serverless/_async/client/query_rules.py index 203313e..9fc6ce5 100644 --- a/elasticsearch_serverless/_async/client/query_rules.py +++ b/elasticsearch_serverless/_async/client/query_rules.py @@ -251,7 +251,7 @@ async def list_rulesets( ) @_rewrite_parameters( - body_fields=("actions", "criteria", "type"), + body_fields=("actions", "criteria", "type", "priority"), ) async def put_rule( self, @@ -259,12 +259,15 @@ async def put_rule( ruleset_id: str, rule_id: str, actions: t.Optional[t.Mapping[str, t.Any]] = None, - criteria: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + criteria: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, type: t.Optional[t.Union["t.Literal['pinned']", str]] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, + priority: t.Optional[int] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -279,6 +282,7 @@ async def put_rule( :param actions: :param criteria: :param type: + :param priority: """ if ruleset_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'ruleset_id'") @@ -312,6 +316,8 @@ async def put_rule( __body["criteria"] = criteria if type is not None: __body["type"] = type + if priority is not None: + __body["priority"] = priority __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "PUT", @@ -330,7 +336,9 @@ async def put_ruleset( self, *, ruleset_id: str, - rules: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + rules: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_async/client/security.py b/elasticsearch_serverless/_async/client/security.py index 9291e6c..31d98b3 100644 --- a/elasticsearch_serverless/_async/client/security.py +++ b/elasticsearch_serverless/_async/client/security.py @@ -35,12 +35,12 @@ async def authenticate( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Enables you to submit a request with a basic auth header to authenticate a user - and retrieve information about the authenticated user. A successful call returns - a JSON structure that shows user information such as their username, the roles - that are assigned to the user, any assigned metadata, and information about the - realms that authenticated and authorized the user. If the user cannot be authenticated, - this API returns a 401 status code. + Authenticate a user. Authenticates a user and returns information about the authenticated + user. Include the user information in a [basic auth header](https://en.wikipedia.org/wiki/Basic_access_authentication). + A successful call returns a JSON structure that shows user information such as + their username, the roles that are assigned to the user, any assigned metadata, + and information about the realms that authenticated and authorized the user. + If the user cannot be authenticated, this API returns a 401 status code. ``_ """ @@ -85,11 +85,11 @@ async def create_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an API key for access without requiring basic authentication. A successful - request returns a JSON structure that contains the API key, its unique id, and - its name. If applicable, it also returns expiration information for the API key - in milliseconds. NOTE: By default, API keys never expire. You can specify expiration - information when you create the API keys. + Create an API key. Creates an API key for access without requiring basic authentication. + A successful request returns a JSON structure that contains the API key, its + unique id, and its name. If applicable, it also returns expiration information + for the API key in milliseconds. NOTE: By default, API keys never expire. You + can specify expiration information when you create the API keys. ``_ @@ -163,10 +163,11 @@ async def get_api_key( with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more API keys. NOTE: If you have only the `manage_own_api_key` - privilege, this API returns only the API keys that you own. If you have `read_security`, - `manage_api_key` or greater privileges (including `manage_security`), this API - returns all API keys regardless of ownership. + Get API key information. Retrieves information for one or more API keys. NOTE: + If you have only the `manage_own_api_key` privilege, this API returns only the + API keys that you own. If you have `read_security`, `manage_api_key` or greater + privileges (including `manage_security`), this API returns all API keys regardless + of ownership. ``_ @@ -241,7 +242,7 @@ async def has_privileges( cluster: t.Optional[ t.Sequence[ t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_connector_secrets', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", + "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", str, ] ] @@ -254,7 +255,8 @@ async def has_privileges( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Determines whether the specified user has a specified list of privileges. + Check user privileges. Determines whether the specified user has a specified + list of privileges. ``_ @@ -317,13 +319,13 @@ async def invalidate_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Invalidates one or more API keys. The `manage_api_key` privilege allows deleting - any API keys. The `manage_own_api_key` only allows deleting API keys that are - owned by the user. In addition, with the `manage_own_api_key` privilege, an invalidation - request must be issued in one of the three formats: - Set the parameter `owner=true`. - - Or, set both `username` and `realm_name` to match the user’s identity. - Or, - if the request is issued by an API key, i.e. an API key invalidates itself, specify - its ID in the `ids` field. + Invalidate API keys. Invalidates one or more API keys. The `manage_api_key` privilege + allows deleting any API keys. The `manage_own_api_key` only allows deleting API + keys that are owned by the user. In addition, with the `manage_own_api_key` privilege, + an invalidation request must be issued in one of the three formats: - Set the + parameter `owner=true`. - Or, set both `username` and `realm_name` to match the + user’s identity. - Or, if the request is issued by an API key, i.e. an API key + invalidates itself, specify its ID in the `ids` field. ``_ @@ -416,8 +418,8 @@ async def query_api_keys( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for API keys in a paginated manner. You can optionally - filter the results with a query. + Query API keys. Retrieves a paginated list of API keys and their information. + You can optionally filter the results with a query. ``_ @@ -534,22 +536,22 @@ async def update_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates attributes of an existing API key. Users can only update API keys that - they created or that were granted to them. Use this API to update API keys created - by the create API Key or grant API Key APIs. If you need to apply the same update - to many API keys, you can use bulk update API Keys to reduce overhead. It’s not - possible to update expired API keys, or API keys that have been invalidated by - invalidate API Key. This API supports updates to an API key’s access scope and - metadata. The access scope of an API key is derived from the `role_descriptors` - you specify in the request, and a snapshot of the owner user’s permissions at - the time of the request. The snapshot of the owner’s permissions is updated automatically - on every call. If you don’t specify `role_descriptors` in the request, a call - to this API might still change the API key’s access scope. This change can occur - if the owner user’s permissions have changed since the API key was created or - last modified. To update another user’s API key, use the `run_as` feature to - submit a request on behalf of another user. IMPORTANT: It’s not possible to use - an API key as the authentication credential for this API. To update an API key, - the owner user’s credentials are required. + Update an API key. Updates attributes of an existing API key. Users can only + update API keys that they created or that were granted to them. Use this API + to update API keys created by the create API Key or grant API Key APIs. If you + need to apply the same update to many API keys, you can use bulk update API Keys + to reduce overhead. It’s not possible to update expired API keys, or API keys + that have been invalidated by invalidate API Key. This API supports updates to + an API key’s access scope and metadata. The access scope of an API key is derived + from the `role_descriptors` you specify in the request, and a snapshot of the + owner user’s permissions at the time of the request. The snapshot of the owner’s + permissions is updated automatically on every call. If you don’t specify `role_descriptors` + in the request, a call to this API might still change the API key’s access scope. + This change can occur if the owner user’s permissions have changed since the + API key was created or last modified. To update another user’s API key, use the + `run_as` feature to submit a request on behalf of another user. IMPORTANT: It’s + not possible to use an API key as the authentication credential for this API. + To update an API key, the owner user’s credentials are required. ``_ diff --git a/elasticsearch_serverless/_async/client/tasks.py b/elasticsearch_serverless/_async/client/tasks.py index 9306a84..648d288 100644 --- a/elasticsearch_serverless/_async/client/tasks.py +++ b/elasticsearch_serverless/_async/client/tasks.py @@ -38,7 +38,8 @@ async def get( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about a task. + Get task information. Returns information about the tasks currently executing + in the cluster. ``_ diff --git a/elasticsearch_serverless/_async/client/transform.py b/elasticsearch_serverless/_async/client/transform.py index faa6557..e70dde2 100644 --- a/elasticsearch_serverless/_async/client/transform.py +++ b/elasticsearch_serverless/_async/client/transform.py @@ -39,7 +39,7 @@ async def delete_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a transform. + Delete a transform. Deletes a transform. ``_ @@ -99,7 +99,7 @@ async def get_transform( size: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves configuration information for transforms. + Get transforms. Retrieves configuration information for transforms. ``_ @@ -168,7 +168,7 @@ async def get_transform_stats( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves usage information for transforms. + Get transform stats. Retrieves usage information for transforms. ``_ @@ -249,10 +249,12 @@ async def preview_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Previews a transform. It returns a maximum of 100 results. The calculations are - based on all the current data in the source index. It also generates a list of - mappings and settings for the destination index. These values are determined - based on the field types of the source index and the transform aggregations. + Preview a transform. Generates a preview of the results that you will get when + you create a transform with the same configuration. It returns a maximum of 100 + results. The calculations are based on all the current data in the source index. + It also generates a list of mappings and settings for the destination index. + These values are determined based on the field types of the source index and + the transform aggregations. ``_ @@ -369,26 +371,27 @@ async def put_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a transform. A transform copies data from source indices, transforms - it, and persists it into an entity-centric destination index. You can also think - of the destination index as a two-dimensional tabular data structure (known as - a data frame). The ID for each document in the data frame is generated from a - hash of the entity, so there is a unique row per entity. You must choose either - the latest or pivot method for your transform; you cannot use both in a single - transform. If you choose to use the pivot method for your transform, the entities - are defined by the set of `group_by` fields in the pivot object. If you choose - to use the latest method, the entities are defined by the `unique_key` field - values in the latest object. You must have `create_index`, `index`, and `read` - privileges on the destination index and `read` and `view_index_metadata` privileges - on the source indices. When Elasticsearch security features are enabled, the - transform remembers which roles the user that created it had at the time of creation - and uses those same roles. If those roles do not have the required privileges - on the source and destination indices, the transform fails when it attempts unauthorized - operations. NOTE: You must use Kibana or this API to create a transform. Do not - add a transform directly into any `.transform-internal*` indices using the Elasticsearch - index API. If Elasticsearch security features are enabled, do not give users - any privileges on `.transform-internal*` indices. If you used transforms prior - to 7.5, also do not give users any privileges on `.data-frame-internal*` indices. + Create a transform. Creates a transform. A transform copies data from source + indices, transforms it, and persists it into an entity-centric destination index. + You can also think of the destination index as a two-dimensional tabular data + structure (known as a data frame). The ID for each document in the data frame + is generated from a hash of the entity, so there is a unique row per entity. + You must choose either the latest or pivot method for your transform; you cannot + use both in a single transform. If you choose to use the pivot method for your + transform, the entities are defined by the set of `group_by` fields in the pivot + object. If you choose to use the latest method, the entities are defined by the + `unique_key` field values in the latest object. You must have `create_index`, + `index`, and `read` privileges on the destination index and `read` and `view_index_metadata` + privileges on the source indices. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. NOTE: You must use Kibana or this API to create + a transform. Do not add a transform directly into any `.transform-internal*` + indices using the Elasticsearch index API. If Elasticsearch security features + are enabled, do not give users any privileges on `.transform-internal*` indices. + If you used transforms prior to 7.5, also do not give users any privileges on + `.data-frame-internal*` indices. ``_ @@ -488,9 +491,9 @@ async def reset_transform( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Resets a transform. Before you can reset it, you must stop it; alternatively, - use the `force` query parameter. If the destination index was created by the - transform, it is deleted. + Reset a transform. Resets a transform. Before you can reset it, you must stop + it; alternatively, use the `force` query parameter. If the destination index + was created by the transform, it is deleted. ``_ @@ -538,10 +541,11 @@ async def schedule_now_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Schedules now a transform. If you _schedule_now a transform, it will process - the new data instantly, without waiting for the configured frequency interval. - After _schedule_now API is called, the transform will be processed again at now - + frequency unless _schedule_now API is called again in the meantime. + Schedule a transform to start now. Instantly runs a transform to process data. + If you _schedule_now a transform, it will process the new data instantly, without + waiting for the configured frequency interval. After _schedule_now API is called, + the transform will be processed again at now + frequency unless _schedule_now + API is called again in the meantime. ``_ @@ -588,23 +592,24 @@ async def start_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Starts a transform. When you start a transform, it creates the destination index - if it does not already exist. The `number_of_shards` is set to `1` and the `auto_expand_replicas` - is set to `0-1`. If it is a pivot transform, it deduces the mapping definitions - for the destination index from the source indices and the transform aggregations. - If fields in the destination index are derived from scripts (as in the case of - `scripted_metric` or `bucket_script` aggregations), the transform uses dynamic - mappings unless an index template exists. If it is a latest transform, it does - not deduce mapping definitions; it uses dynamic mappings. To use explicit mappings, - create the destination index before you start the transform. Alternatively, you - can create an index template, though it does not affect the deduced mappings - in a pivot transform. When the transform starts, a series of validations occur - to ensure its success. If you deferred validation when you created the transform, - they occur when you start the transform—​with the exception of privilege checks. - When Elasticsearch security features are enabled, the transform remembers which - roles the user that created it had at the time of creation and uses those same - roles. If those roles do not have the required privileges on the source and destination - indices, the transform fails when it attempts unauthorized operations. + Start a transform. Starts a transform. When you start a transform, it creates + the destination index if it does not already exist. The `number_of_shards` is + set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot transform, + it deduces the mapping definitions for the destination index from the source + indices and the transform aggregations. If fields in the destination index are + derived from scripts (as in the case of `scripted_metric` or `bucket_script` + aggregations), the transform uses dynamic mappings unless an index template exists. + If it is a latest transform, it does not deduce mapping definitions; it uses + dynamic mappings. To use explicit mappings, create the destination index before + you start the transform. Alternatively, you can create an index template, though + it does not affect the deduced mappings in a pivot transform. When the transform + starts, a series of validations occur to ensure its success. If you deferred + validation when you created the transform, they occur when you start the transform—​with + the exception of privilege checks. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. ``_ @@ -658,7 +663,7 @@ async def stop_transform( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Stops one or more transforms. + Stop transforms. Stops one or more transforms. ``_ @@ -751,13 +756,14 @@ async def update_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates certain properties of a transform. All updated properties except `description` - do not take effect until after the transform starts the next checkpoint, thus - there is data consistency in each checkpoint. To use this API, you must have - `read` and `view_index_metadata` privileges for the source indices. You must - also have `index` and `read` privileges for the destination index. When Elasticsearch - security features are enabled, the transform remembers which roles the user who - updated it had at the time of update and runs with those privileges. + Update a transform. Updates certain properties of a transform. All updated properties + except `description` do not take effect until after the transform starts the + next checkpoint, thus there is data consistency in each checkpoint. To use this + API, you must have `read` and `view_index_metadata` privileges for the source + indices. You must also have `index` and `read` privileges for the destination + index. When Elasticsearch security features are enabled, the transform remembers + which roles the user who updated it had at the time of update and runs with those + privileges. ``_ diff --git a/elasticsearch_serverless/_sync/client/__init__.py b/elasticsearch_serverless/_sync/client/__init__.py index 916fa5e..54bf5b5 100644 --- a/elasticsearch_serverless/_sync/client/__init__.py +++ b/elasticsearch_serverless/_sync/client/__init__.py @@ -466,8 +466,9 @@ def bulk( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs multiple indexing or delete operations in a single API call. This reduces - overhead and can greatly increase indexing speed. + Bulk index or delete documents. Performs multiple indexing or delete operations + in a single API call. This reduces overhead and can greatly increase indexing + speed. ``_ @@ -823,9 +824,9 @@ def create( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -929,7 +930,7 @@ def delete( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a JSON document from the specified index. + Delete a document. Removes a JSON document from the specified index. ``_ @@ -1053,7 +1054,7 @@ def delete_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes documents that match the specified query. + Delete documents. Deletes documents that match the specified query. ``_ @@ -1234,7 +1235,7 @@ def delete_script( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a stored script or search template. + Delete a script or search template. Deletes a stored script or search template. ``_ @@ -1302,7 +1303,7 @@ def exists( ] = None, ) -> HeadApiResponse: """ - Checks if a document in an index exists. + Check a document. Checks if a specified document exists. ``_ @@ -1403,7 +1404,7 @@ def exists_source( ] = None, ) -> HeadApiResponse: """ - Checks if a document's `_source` is stored. + Check for a document source. Checks if a document's `_source` is stored. ``_ @@ -1503,8 +1504,8 @@ def explain( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about why a specific document matches (or doesn’t match) - a query. + Explain a document match result. Returns information about why a specific document + matches, or doesn’t match, a query. ``_ @@ -1742,7 +1743,8 @@ def get( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns a document. + Get a document by its ID. Retrieves the document with the specified ID from an + index. ``_ @@ -1833,7 +1835,7 @@ def get_script( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a stored script or search template. + Get a script or search template. Retrieves a stored script or search template. ``_ @@ -1895,7 +1897,7 @@ def get_source( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns the source of a document. + Get a document's source. Returns the source of a document. ``_ @@ -1997,9 +1999,9 @@ def index( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -2108,7 +2110,7 @@ def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns basic information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ """ @@ -2686,7 +2688,8 @@ def put_script( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a stored script or search template. + Create or update a script or search template. Creates or updates a stored script + or search template. ``_ @@ -2866,9 +2869,9 @@ def reindex( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to copy documents from one index to another, optionally filtering the - source documents by a query, changing the destination index settings, or fetching - the documents from a remote cluster. + Reindex documents. Copies documents from a source to a destination. The source + can be any existing index, alias, or data stream. The destination must differ + from the source. For example, you cannot reindex a data stream into itself. ``_ @@ -3038,7 +3041,7 @@ def scripts_painless_execute( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a script and returns a result. + Run a script. Runs a script and returns a result. ``_ @@ -3708,8 +3711,7 @@ def search_mvt( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> BinaryApiResponse: """ - Searches a vector tile for geospatial values. Returns results as a binary Mapbox - vector tile. + Search a vector tile. Searches a vector tile for geospatial values. ``_ @@ -4111,8 +4113,8 @@ def termvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information and statistics about terms in the fields of a particular - document. + Get term vector information. Returns information and statistics about terms in + the fields of a particular document. ``_ @@ -4254,7 +4256,8 @@ def update( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates a document with a script or partial document. + Update a document. Updates a document by running a script or passing a partial + document. ``_ @@ -4419,9 +4422,9 @@ def update_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates documents that match the specified query. If no query is specified, performs - an update on every document in the data stream or index without modifying the - source, which is useful for picking up mapping changes. + Update documents. Updates documents that match the specified query. If no query + is specified, performs an update on every document in the data stream or index + without modifying the source, which is useful for picking up mapping changes. ``_ diff --git a/elasticsearch_serverless/_sync/client/cat.py b/elasticsearch_serverless/_sync/client/cat.py index 5c55a17..38d6922 100644 --- a/elasticsearch_serverless/_sync/client/cat.py +++ b/elasticsearch_serverless/_sync/client/cat.py @@ -53,11 +53,11 @@ def aliases( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Retrieves the cluster’s index aliases, including filter and routing information. - The API does not return data stream aliases. IMPORTANT: cat APIs are only intended + Get aliases. Retrieves the cluster’s index aliases, including filter and routing + information. The API does not return data stream aliases. CAT APIs are only intended for human consumption using the command line or the Kibana console. They are - not intended for use by applications. For application consumption, use the aliases - API. + not intended for use by applications. For application consumption, use the /_alias + endpoints. ``_ @@ -142,11 +142,12 @@ def component_templates( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns information about component templates in a cluster. Component templates - are building blocks for constructing index templates that specify index mappings, - settings, and aliases. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get component template API. + Get component templates. Returns information about component templates in a cluster. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. CAT APIs are only intended for + human consumption using the command line or Kibana console. They are not intended + for use by applications. For application consumption, use the /_component_template + endpoints. ``_ @@ -227,12 +228,12 @@ def count( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Provides quick access to a document count for a data stream, an index, or an - entire cluster. NOTE: The document count only includes live documents, not deleted - documents which have not yet been removed by the merge process. IMPORTANT: cat + Get a document count. Provides quick access to a document count for a data stream, + an index, or an entire cluster.n/ The document count only includes live documents, + not deleted documents which have not yet been removed by the merge process. CAT APIs are only intended for human consumption using the command line or Kibana console. They are not intended for use by applications. For application consumption, - use the count API. + use /_count endpoints. ``_ @@ -313,7 +314,7 @@ def help( v: t.Optional[bool] = None, ) -> TextApiResponse: """ - Returns help for the Cat APIs. + Get CAT help. Returns help for the CAT APIs. ``_ @@ -404,16 +405,16 @@ def indices( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns high-level information about indices in a cluster, including backing - indices for data streams. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get index API. Use the cat indices API to - get the following information for each index in a cluster: shard count; document - count; deleted document count; primary store size; total store size of all shards, - including shard replicas. These metrics are retrieved directly from Lucene, which - Elasticsearch uses internally to power indexing and search. As a result, all - document counts include hidden nested documents. To get an accurate count of - Elasticsearch documents, use the cat count or count APIs. + Get index information. Returns high-level information about indices in a cluster, + including backing indices for data streams. Use this request to get the following + information for each index in a cluster: - shard count - document count - deleted + document count - primary store size - total store size of all shards, including + shard replicas These metrics are retrieved directly from Lucene, which Elasticsearch + uses internally to power indexing and search. As a result, all document counts + include hidden nested documents. To get an accurate count of Elasticsearch documents, + use the /_cat/count or _count endpoints. CAT APIs are only intended for human + consumption using the command line or Kibana console. They are not intended for + use by applications. For application consumption, use an index endpoint. ``_ @@ -546,10 +547,10 @@ def ml_data_frame_analytics( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about data frame analytics jobs. - IMPORTANT: cat APIs are only intended for human consumption using the Kibana - console or command line. They are not intended for use by applications. For application - consumption, use the get data frame analytics jobs statistics API. + Get data frame analytics jobs. Returns configuration and usage information about + data frame analytics jobs. CAT APIs are only intended for human consumption using + the Kibana console or command line. They are not intended for use by applications. + For application consumption, use the /_ml/data_frame/analytics endpoints. ``_ @@ -668,12 +669,12 @@ def ml_datafeeds( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about datafeeds. This API returns - a maximum of 10,000 datafeeds. If the Elasticsearch security features are enabled, - you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges - to use this API. IMPORTANT: cat APIs are only intended for human consumption + Get datafeeds. Returns configuration and usage information about datafeeds. This + API returns a maximum of 10,000 datafeeds. If the Elasticsearch security features + are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` + cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. - For application consumption, use the get datafeed statistics API. + For application consumption, use the /_ml/datafeeds endpoints. ``_ @@ -798,13 +799,13 @@ def ml_jobs( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information for anomaly detection jobs. This - API returns a maximum of 10,000 jobs. If the Elasticsearch security features - are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` - cluster privileges to use this API. IMPORTANT: cat APIs are only intended for + Get anomaly detection jobs. Returns configuration and usage information for anomaly + detection jobs. This API returns a maximum of 10,000 jobs. If the Elasticsearch + security features are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, + or `manage` cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended - for use by applications. For application consumption, use the get anomaly detection - job statistics API. + for use by applications. For application consumption, use the /_ml/anomaly_detectors + endpoints. ``_ @@ -932,10 +933,10 @@ def ml_trained_models( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about inference trained models. IMPORTANT: - cat APIs are only intended for human consumption using the Kibana console or - command line. They are not intended for use by applications. For application - consumption, use the get trained models statistics API. + Get trained models. Returns configuration and usage information about inference + trained models. CAT APIs are only intended for human consumption using the Kibana + console or command line. They are not intended for use by applications. For application + consumption, use the /_ml/trained_models endpoints. ``_ @@ -1066,10 +1067,10 @@ def transforms( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about transforms. IMPORTANT: cat - APIs are only intended for human consumption using the Kibana console or command - line. They are not intended for use by applications. For application consumption, - use the get transform statistics API. + Get transforms. Returns configuration and usage information about transforms. + CAT APIs are only intended for human consumption using the Kibana console or + command line. They are not intended for use by applications. For application + consumption, use the /_transform endpoints. ``_ diff --git a/elasticsearch_serverless/_sync/client/cluster.py b/elasticsearch_serverless/_sync/client/cluster.py index b9ccbf3..3997f8d 100644 --- a/elasticsearch_serverless/_sync/client/cluster.py +++ b/elasticsearch_serverless/_sync/client/cluster.py @@ -40,8 +40,9 @@ def delete_component_template( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes component templates. Component templates are building blocks for constructing - index templates that specify index mappings, settings, and aliases. + Delete component templates. Deletes component templates. Component templates + are building blocks for constructing index templates that specify index mappings, + settings, and aliases. ``_ @@ -95,7 +96,8 @@ def exists_component_template( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Returns information about whether a particular component template exist + Check component templates. Returns information about whether a particular component + template exists. ``_ @@ -152,7 +154,7 @@ def get_component_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about component templates. + Get component templates. Retrieves information about component templates. ``_ @@ -221,7 +223,7 @@ def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns different information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ @@ -274,17 +276,18 @@ def put_component_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a component template. Component templates are building blocks - for constructing index templates that specify index mappings, settings, and aliases. - An index template can be composed of multiple component templates. To use a component - template, specify it in an index template’s `composed_of` list. Component templates - are only applied to new data streams and indices as part of a matching index - template. Settings and mappings specified directly in the index template or the - create index request override any settings or mappings specified in a component - template. Component templates are only used during index creation. For data streams, - this includes data stream creation and the creation of a stream’s backing indices. - Changes to component templates do not affect existing indices, including a stream’s - backing indices. You can use C-style `/* *\\/` block comments in component templates. + Create or update a component template. Creates or updates a component template. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. An index template can be composed + of multiple component templates. To use a component template, specify it in an + index template’s `composed_of` list. Component templates are only applied to + new data streams and indices as part of a matching index template. Settings and + mappings specified directly in the index template or the create index request + override any settings or mappings specified in a component template. Component + templates are only used during index creation. For data streams, this includes + data stream creation and the creation of a stream’s backing indices. Changes + to component templates do not affect existing indices, including a stream’s backing + indices. You can use C-style `/* *\\/` block comments in component templates. You can include comments anywhere in the request body except before the opening curly bracket. diff --git a/elasticsearch_serverless/_sync/client/enrich.py b/elasticsearch_serverless/_sync/client/enrich.py index c6bf5f2..6a855c4 100644 --- a/elasticsearch_serverless/_sync/client/enrich.py +++ b/elasticsearch_serverless/_sync/client/enrich.py @@ -36,7 +36,7 @@ def delete_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing enrich policy and its enrich index. + Delete an enrich policy. Deletes an existing enrich policy and its enrich index. ``_ @@ -121,7 +121,7 @@ def get_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about an enrich policy. + Get an enrich policy. Returns information about an enrich policy. ``_ @@ -171,7 +171,7 @@ def put_policy( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an enrich policy. + Create an enrich policy. Creates an enrich policy. ``_ @@ -224,8 +224,8 @@ def stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns enrich coordinator statistics and information about enrich policies that - are currently executing. + Get enrich stats. Returns enrich coordinator statistics and information about + enrich policies that are currently executing. ``_ """ diff --git a/elasticsearch_serverless/_sync/client/esql.py b/elasticsearch_serverless/_sync/client/esql.py index f3acd38..8442238 100644 --- a/elasticsearch_serverless/_sync/client/esql.py +++ b/elasticsearch_serverless/_sync/client/esql.py @@ -26,7 +26,15 @@ class EsqlClient(NamespacedClient): @_rewrite_parameters( - body_fields=("query", "columnar", "filter", "locale", "params"), + body_fields=( + "query", + "columnar", + "filter", + "locale", + "params", + "profile", + "tables", + ), ignore_deprecated_options={"params"}, ) def query( @@ -35,14 +43,21 @@ def query( query: t.Optional[str] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, + drop_null_columns: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, locale: t.Optional[str] = None, - params: t.Optional[t.Sequence[t.Union[None, bool, float, int, str]]] = None, + params: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, pretty: t.Optional[bool] = None, + profile: t.Optional[bool] = None, + tables: t.Optional[ + t.Mapping[str, t.Mapping[str, t.Mapping[str, t.Any]]] + ] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -58,6 +73,10 @@ def query( row represents all the values of a certain column in the results. :param delimiter: The character to use between values within a CSV row. Only valid for the CSV format. + :param drop_null_columns: Should columns that are entirely `null` be removed + from the `columns` and `values` portion of the results? Defaults to `false`. + If `true` then the response will include an extra section under the name + `all_columns` which has the name of all columns. :param filter: Specify a Query DSL query in the filter parameter to filter the set of documents that an ES|QL query runs on. :param format: A short version of the Accept header, e.g. json, yaml. @@ -65,6 +84,12 @@ def query( :param params: To avoid any attempts of hacking or code injection, extract the values in a separate list of parameters. Use question mark placeholders (?) in the query string for each of the parameters. + :param profile: If provided and `true` the response will include an extra `profile` + object with information on how the query was executed. This information is + for human debugging and its format can change at any time but it can give + some insight into the performance of each part of the query. + :param tables: Tables to use with the LOOKUP operation. The top level key is + the table name and the next level key is the column name. """ if query is None and body is None: raise ValueError("Empty value passed for parameter 'query'") @@ -74,6 +99,8 @@ def query( __body: t.Dict[str, t.Any] = body if body is not None else {} if delimiter is not None: __query["delimiter"] = delimiter + if drop_null_columns is not None: + __query["drop_null_columns"] = drop_null_columns if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -95,6 +122,10 @@ def query( __body["locale"] = locale if params is not None: __body["params"] = params + if profile is not None: + __body["profile"] = profile + if tables is not None: + __body["tables"] = tables __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "POST", diff --git a/elasticsearch_serverless/_sync/client/indices.py b/elasticsearch_serverless/_sync/client/indices.py index 30ee227..2dce9cc 100644 --- a/elasticsearch_serverless/_sync/client/indices.py +++ b/elasticsearch_serverless/_sync/client/indices.py @@ -51,7 +51,8 @@ def add_block( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a block to an index. + Add an index block. Limits the operations allowed on an index by blocking specific + operation types. ``_ @@ -238,7 +239,7 @@ def create( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index. + Create an index. Creates a new index. ``_ @@ -309,8 +310,8 @@ def create_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a data stream. You must have a matching index template with data stream - enabled. + Create a data stream. Creates a data stream. You must have a matching index template + with data stream enabled. ``_ @@ -362,7 +363,7 @@ def data_streams_stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves statistics for one or more data streams. + Get data stream stats. Retrieves statistics for one or more data streams. ``_ @@ -425,7 +426,7 @@ def delete( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more indices. + Delete indices. Deletes one or more indices. ``_ @@ -497,7 +498,7 @@ def delete_alias( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a data stream or index from an alias. + Delete an alias. Removes a data stream or index from an alias. ``_ @@ -563,8 +564,8 @@ def delete_data_lifecycle( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes the data lifecycle from a data stream rendering it not managed by the - data stream lifecycle + Delete data stream lifecycles. Removes the data stream lifecycle from a data + stream, rendering it not managed by the data stream lifecycle. ``_ @@ -623,7 +624,7 @@ def delete_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more data streams and their backing indices. + Delete data streams. Deletes one or more data streams and their backing indices. ``_ @@ -738,7 +739,8 @@ def exists( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if a data stream, index, or alias exists. + Check indices. Checks if one or more indices, index aliases, or data streams + exist. ``_ @@ -816,7 +818,7 @@ def exists_alias( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if an alias exists. + Check aliases. Checks if one or more data stream or index aliases exist. ``_ @@ -938,8 +940,10 @@ def explain_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about the index's current data stream lifecycle, such as - any potential encountered error, time since creation etc. + Get the status for a data stream lifecycle. Retrieves information about an index + or data stream’s current data stream lifecycle status, such as time since index + creation, time since rollover, the lifecycle configuration managing the index, + or any errors encountered during lifecycle execution. ``_ @@ -1010,8 +1014,8 @@ def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about one or more indices. For data streams, the API returns - information about the stream’s backing indices. + Get index information. Returns information about one or more indices. For data + streams, the API returns information about the stream’s backing indices. ``_ @@ -1100,7 +1104,7 @@ def get_alias( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more aliases. + Get aliases. Retrieves information for one or more data stream or index aliases. ``_ @@ -1181,7 +1185,8 @@ def get_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the data stream lifecycle configuration of one or more data streams. + Get data stream lifecycles. Retrieves the data stream lifecycle configuration + of one or more data streams. ``_ @@ -1240,7 +1245,7 @@ def get_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about one or more data streams. + Get data streams. Retrieves information about one or more data streams. ``_ @@ -1374,8 +1379,8 @@ def get_mapping( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves mapping definitions for one or more indices. For data streams, the - API retrieves mappings for the stream’s backing indices. + Get mapping definitions. Retrieves mapping definitions for one or more indices. + For data streams, the API retrieves mappings for the stream’s backing indices. ``_ @@ -1461,8 +1466,8 @@ def get_settings( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns setting information for one or more indices. For data streams, returns - setting information for the stream’s backing indices. + Get index settings. Returns setting information for one or more indices. For + data streams, returns setting information for the stream’s backing indices. ``_ @@ -1546,14 +1551,14 @@ def migrate_to_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Converts an index alias to a data stream. You must have a matching index template - that is data stream enabled. The alias must meet the following criteria: The - alias must have a write index; All indices for the alias must have a `@timestamp` - field mapping of a `date` or `date_nanos` field type; The alias must not have - any filters; The alias must not use custom routing. If successful, the request - removes the alias and creates a data stream with the same name. The indices for - the alias become hidden backing indices for the stream. The write index for the - alias becomes the write index for the stream. + Convert an index alias to a data stream. Converts an index alias to a data stream. + You must have a matching index template that is data stream enabled. The alias + must meet the following criteria: The alias must have a write index; All indices + for the alias must have a `@timestamp` field mapping of a `date` or `date_nanos` + field type; The alias must not have any filters; The alias must not use custom + routing. If successful, the request removes the alias and creates a data stream + with the same name. The indices for the alias become hidden backing indices for + the stream. The write index for the alias becomes the write index for the stream. ``_ @@ -1596,7 +1601,8 @@ def modify_data_stream( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs one or more data stream modification actions in a single atomic operation. + Update data streams. Performs one or more data stream modification actions in + a single atomic operation. ``_ @@ -1660,7 +1666,7 @@ def put_alias( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -1766,7 +1772,8 @@ def put_data_lifecycle( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Update the data lifecycle of the specified data streams. + Update data stream lifecycles. Update the data stream lifecycle of the specified + data streams. ``_ @@ -2031,9 +2038,9 @@ def put_mapping( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds new fields to an existing data stream or index. You can also use this API - to change the search settings of existing fields. For data streams, these changes - are applied to all backing indices by default. + Update field mappings. Adds new fields to an existing data stream or index. You + can also use this API to change the search settings of existing fields. For data + streams, these changes are applied to all backing indices by default. ``_ @@ -2164,8 +2171,8 @@ def put_settings( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Changes a dynamic index setting in real time. For data streams, index setting - changes are applied to all backing indices by default. + Update index settings. Changes dynamic index settings in real time. For data + streams, index setting changes are applied to all backing indices by default. ``_ @@ -2360,9 +2367,9 @@ def refresh( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - A refresh makes recent operations performed on one or more indices available - for search. For data streams, the API runs the refresh operation on the stream’s - backing indices. + Refresh an index. A refresh makes recent operations performed on one or more + indices available for search. For data streams, the API runs the refresh operation + on the stream’s backing indices. ``_ @@ -2495,7 +2502,7 @@ def rollover( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index for a data stream or index alias. + Roll over to a new index. Creates a new index for a data stream or index alias. ``_ @@ -2801,7 +2808,7 @@ def update_aliases( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -2876,7 +2883,7 @@ def validate_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Validates a potentially expensive query without executing it. + Validate a query. Validates a query without running it. ``_ diff --git a/elasticsearch_serverless/_sync/client/license.py b/elasticsearch_serverless/_sync/client/license.py index 4658bf3..b63fecd 100644 --- a/elasticsearch_serverless/_sync/client/license.py +++ b/elasticsearch_serverless/_sync/client/license.py @@ -37,9 +37,9 @@ def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - This API returns information about the type of license, when it was issued, and - when it expires, for example. For more information about the different types - of licenses, see https://www.elastic.co/subscriptions. + Get license information. Returns information about your Elastic license, including + its type, its status, when it was issued, and when it expires. For more information + about the different types of licenses, refer to [Elastic Stack subscriptions](https://www.elastic.co/subscriptions). ``_ diff --git a/elasticsearch_serverless/_sync/client/ml.py b/elasticsearch_serverless/_sync/client/ml.py index 76c89ed..855ba81 100644 --- a/elasticsearch_serverless/_sync/client/ml.py +++ b/elasticsearch_serverless/_sync/client/ml.py @@ -42,7 +42,7 @@ def close_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Close anomaly detection jobs A job can be opened and closed multiple times throughout + Close anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating @@ -115,7 +115,8 @@ def delete_calendar( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes all scheduled events from a calendar, then deletes it. + Delete a calendar. Removes all scheduled events from a calendar, then deletes + it. ``_ @@ -156,7 +157,7 @@ def delete_calendar_event( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes scheduled events from a calendar. + Delete events from a calendar. ``_ @@ -204,7 +205,7 @@ def delete_calendar_job( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes anomaly detection jobs from a calendar. + Delete anomaly jobs from a calendar. ``_ @@ -253,7 +254,7 @@ def delete_data_frame_analytics( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a data frame analytics job. + Delete a data frame analytics job. ``_ @@ -301,7 +302,7 @@ def delete_datafeed( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing datafeed. + Delete a datafeed. ``_ @@ -348,7 +349,7 @@ def delete_filter( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a filter. If an anomaly detection job references the filter, you cannot + Delete a filter. If an anomaly detection job references the filter, you cannot delete the filter. You must update or delete the job before you can delete the filter. @@ -393,7 +394,7 @@ def delete_job( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an anomaly detection job. All job configuration, model state and results + Delete an anomaly detection job. All job configuration, model state and results are deleted. It is not currently possible to delete multiple jobs using wildcards or a comma separated list. If you delete a job that has a datafeed, the request first tries to delete the datafeed. This behavior is equivalent to calling the @@ -452,8 +453,8 @@ def delete_trained_model( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing trained inference model that is currently not referenced - by an ingest pipeline. + Delete an unreferenced trained model. The request deletes a trained inference + model that is not referenced by an ingest pipeline. ``_ @@ -498,9 +499,9 @@ def delete_trained_model_alias( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a trained model alias. This API deletes an existing model alias that - refers to a trained model. If the model alias is missing or refers to a model - other than the one identified by the `model_id`, this API returns an error. + Delete a trained model alias. This API deletes an existing model alias that refers + to a trained model. If the model alias is missing or refers to a model other + than the one identified by the `model_id`, this API returns an error. ``_ @@ -555,9 +556,9 @@ def estimate_model_memory( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Makes an estimation of the memory usage for an anomaly detection job model. It - is based on analysis configuration details for the job and cardinality estimates - for the fields it references. + Estimate job model memory usage. Makes an estimation of the memory usage for + an anomaly detection job model. It is based on analysis configuration details + for the job and cardinality estimates for the fields it references. ``_ @@ -622,10 +623,10 @@ def evaluate_data_frame( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Evaluates the data frame analytics for an annotated index. The API packages together - commonly used evaluation metrics for various types of machine learning features. - This has been designed for use on indexes created by data frame analytics. Evaluation - requires both a ground truth field and an analytics result field to be present. + Evaluate data frame analytics. The API packages together commonly used evaluation + metrics for various types of machine learning features. This has been designed + for use on indexes created by data frame analytics. Evaluation requires both + a ground truth field and an analytics result field to be present. ``_ @@ -1665,12 +1666,12 @@ def open_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Opens one or more anomaly detection jobs. An anomaly detection job must be opened - in order for it to be ready to receive and analyze data. It can be opened and - closed multiple times throughout its lifecycle. When you open a new job, it starts - with an empty model. When you open an existing job, the most recent model state - is automatically loaded. The job is ready to resume its analysis from where it - left off, once new data is received. + Open anomaly detection jobs. An anomaly detection job must be opened in order + for it to be ready to receive and analyze data. It can be opened and closed multiple + times throughout its lifecycle. When you open a new job, it starts with an empty + model. When you open an existing job, the most recent model state is automatically + loaded. The job is ready to resume its analysis from where it left off, once + new data is received. ``_ @@ -2471,8 +2472,8 @@ def put_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Instantiates an anomaly detection job. If you include a `datafeed_config`, you - must have read index privileges on the source index. + Create an anomaly detection job. If you include a `datafeed_config`, you must + have read index privileges on the source index. ``_ diff --git a/elasticsearch_serverless/_sync/client/query_rules.py b/elasticsearch_serverless/_sync/client/query_rules.py index 3b211b2..19c65c4 100644 --- a/elasticsearch_serverless/_sync/client/query_rules.py +++ b/elasticsearch_serverless/_sync/client/query_rules.py @@ -251,7 +251,7 @@ def list_rulesets( ) @_rewrite_parameters( - body_fields=("actions", "criteria", "type"), + body_fields=("actions", "criteria", "type", "priority"), ) def put_rule( self, @@ -259,12 +259,15 @@ def put_rule( ruleset_id: str, rule_id: str, actions: t.Optional[t.Mapping[str, t.Any]] = None, - criteria: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + criteria: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, type: t.Optional[t.Union["t.Literal['pinned']", str]] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, + priority: t.Optional[int] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -279,6 +282,7 @@ def put_rule( :param actions: :param criteria: :param type: + :param priority: """ if ruleset_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'ruleset_id'") @@ -312,6 +316,8 @@ def put_rule( __body["criteria"] = criteria if type is not None: __body["type"] = type + if priority is not None: + __body["priority"] = priority __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "PUT", @@ -330,7 +336,9 @@ def put_ruleset( self, *, ruleset_id: str, - rules: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + rules: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, diff --git a/elasticsearch_serverless/_sync/client/security.py b/elasticsearch_serverless/_sync/client/security.py index c71c4c6..3b6b41d 100644 --- a/elasticsearch_serverless/_sync/client/security.py +++ b/elasticsearch_serverless/_sync/client/security.py @@ -35,12 +35,12 @@ def authenticate( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Enables you to submit a request with a basic auth header to authenticate a user - and retrieve information about the authenticated user. A successful call returns - a JSON structure that shows user information such as their username, the roles - that are assigned to the user, any assigned metadata, and information about the - realms that authenticated and authorized the user. If the user cannot be authenticated, - this API returns a 401 status code. + Authenticate a user. Authenticates a user and returns information about the authenticated + user. Include the user information in a [basic auth header](https://en.wikipedia.org/wiki/Basic_access_authentication). + A successful call returns a JSON structure that shows user information such as + their username, the roles that are assigned to the user, any assigned metadata, + and information about the realms that authenticated and authorized the user. + If the user cannot be authenticated, this API returns a 401 status code. ``_ """ @@ -85,11 +85,11 @@ def create_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an API key for access without requiring basic authentication. A successful - request returns a JSON structure that contains the API key, its unique id, and - its name. If applicable, it also returns expiration information for the API key - in milliseconds. NOTE: By default, API keys never expire. You can specify expiration - information when you create the API keys. + Create an API key. Creates an API key for access without requiring basic authentication. + A successful request returns a JSON structure that contains the API key, its + unique id, and its name. If applicable, it also returns expiration information + for the API key in milliseconds. NOTE: By default, API keys never expire. You + can specify expiration information when you create the API keys. ``_ @@ -163,10 +163,11 @@ def get_api_key( with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more API keys. NOTE: If you have only the `manage_own_api_key` - privilege, this API returns only the API keys that you own. If you have `read_security`, - `manage_api_key` or greater privileges (including `manage_security`), this API - returns all API keys regardless of ownership. + Get API key information. Retrieves information for one or more API keys. NOTE: + If you have only the `manage_own_api_key` privilege, this API returns only the + API keys that you own. If you have `read_security`, `manage_api_key` or greater + privileges (including `manage_security`), this API returns all API keys regardless + of ownership. ``_ @@ -241,7 +242,7 @@ def has_privileges( cluster: t.Optional[ t.Sequence[ t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_connector_secrets', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", + "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", str, ] ] @@ -254,7 +255,8 @@ def has_privileges( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Determines whether the specified user has a specified list of privileges. + Check user privileges. Determines whether the specified user has a specified + list of privileges. ``_ @@ -317,13 +319,13 @@ def invalidate_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Invalidates one or more API keys. The `manage_api_key` privilege allows deleting - any API keys. The `manage_own_api_key` only allows deleting API keys that are - owned by the user. In addition, with the `manage_own_api_key` privilege, an invalidation - request must be issued in one of the three formats: - Set the parameter `owner=true`. - - Or, set both `username` and `realm_name` to match the user’s identity. - Or, - if the request is issued by an API key, i.e. an API key invalidates itself, specify - its ID in the `ids` field. + Invalidate API keys. Invalidates one or more API keys. The `manage_api_key` privilege + allows deleting any API keys. The `manage_own_api_key` only allows deleting API + keys that are owned by the user. In addition, with the `manage_own_api_key` privilege, + an invalidation request must be issued in one of the three formats: - Set the + parameter `owner=true`. - Or, set both `username` and `realm_name` to match the + user’s identity. - Or, if the request is issued by an API key, i.e. an API key + invalidates itself, specify its ID in the `ids` field. ``_ @@ -416,8 +418,8 @@ def query_api_keys( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for API keys in a paginated manner. You can optionally - filter the results with a query. + Query API keys. Retrieves a paginated list of API keys and their information. + You can optionally filter the results with a query. ``_ @@ -534,22 +536,22 @@ def update_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates attributes of an existing API key. Users can only update API keys that - they created or that were granted to them. Use this API to update API keys created - by the create API Key or grant API Key APIs. If you need to apply the same update - to many API keys, you can use bulk update API Keys to reduce overhead. It’s not - possible to update expired API keys, or API keys that have been invalidated by - invalidate API Key. This API supports updates to an API key’s access scope and - metadata. The access scope of an API key is derived from the `role_descriptors` - you specify in the request, and a snapshot of the owner user’s permissions at - the time of the request. The snapshot of the owner’s permissions is updated automatically - on every call. If you don’t specify `role_descriptors` in the request, a call - to this API might still change the API key’s access scope. This change can occur - if the owner user’s permissions have changed since the API key was created or - last modified. To update another user’s API key, use the `run_as` feature to - submit a request on behalf of another user. IMPORTANT: It’s not possible to use - an API key as the authentication credential for this API. To update an API key, - the owner user’s credentials are required. + Update an API key. Updates attributes of an existing API key. Users can only + update API keys that they created or that were granted to them. Use this API + to update API keys created by the create API Key or grant API Key APIs. If you + need to apply the same update to many API keys, you can use bulk update API Keys + to reduce overhead. It’s not possible to update expired API keys, or API keys + that have been invalidated by invalidate API Key. This API supports updates to + an API key’s access scope and metadata. The access scope of an API key is derived + from the `role_descriptors` you specify in the request, and a snapshot of the + owner user’s permissions at the time of the request. The snapshot of the owner’s + permissions is updated automatically on every call. If you don’t specify `role_descriptors` + in the request, a call to this API might still change the API key’s access scope. + This change can occur if the owner user’s permissions have changed since the + API key was created or last modified. To update another user’s API key, use the + `run_as` feature to submit a request on behalf of another user. IMPORTANT: It’s + not possible to use an API key as the authentication credential for this API. + To update an API key, the owner user’s credentials are required. ``_ diff --git a/elasticsearch_serverless/_sync/client/tasks.py b/elasticsearch_serverless/_sync/client/tasks.py index 6ab0f12..5a01306 100644 --- a/elasticsearch_serverless/_sync/client/tasks.py +++ b/elasticsearch_serverless/_sync/client/tasks.py @@ -38,7 +38,8 @@ def get( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about a task. + Get task information. Returns information about the tasks currently executing + in the cluster. ``_ diff --git a/elasticsearch_serverless/_sync/client/transform.py b/elasticsearch_serverless/_sync/client/transform.py index 3f3de36..7fc8dec 100644 --- a/elasticsearch_serverless/_sync/client/transform.py +++ b/elasticsearch_serverless/_sync/client/transform.py @@ -39,7 +39,7 @@ def delete_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a transform. + Delete a transform. Deletes a transform. ``_ @@ -99,7 +99,7 @@ def get_transform( size: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves configuration information for transforms. + Get transforms. Retrieves configuration information for transforms. ``_ @@ -168,7 +168,7 @@ def get_transform_stats( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves usage information for transforms. + Get transform stats. Retrieves usage information for transforms. ``_ @@ -249,10 +249,12 @@ def preview_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Previews a transform. It returns a maximum of 100 results. The calculations are - based on all the current data in the source index. It also generates a list of - mappings and settings for the destination index. These values are determined - based on the field types of the source index and the transform aggregations. + Preview a transform. Generates a preview of the results that you will get when + you create a transform with the same configuration. It returns a maximum of 100 + results. The calculations are based on all the current data in the source index. + It also generates a list of mappings and settings for the destination index. + These values are determined based on the field types of the source index and + the transform aggregations. ``_ @@ -369,26 +371,27 @@ def put_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a transform. A transform copies data from source indices, transforms - it, and persists it into an entity-centric destination index. You can also think - of the destination index as a two-dimensional tabular data structure (known as - a data frame). The ID for each document in the data frame is generated from a - hash of the entity, so there is a unique row per entity. You must choose either - the latest or pivot method for your transform; you cannot use both in a single - transform. If you choose to use the pivot method for your transform, the entities - are defined by the set of `group_by` fields in the pivot object. If you choose - to use the latest method, the entities are defined by the `unique_key` field - values in the latest object. You must have `create_index`, `index`, and `read` - privileges on the destination index and `read` and `view_index_metadata` privileges - on the source indices. When Elasticsearch security features are enabled, the - transform remembers which roles the user that created it had at the time of creation - and uses those same roles. If those roles do not have the required privileges - on the source and destination indices, the transform fails when it attempts unauthorized - operations. NOTE: You must use Kibana or this API to create a transform. Do not - add a transform directly into any `.transform-internal*` indices using the Elasticsearch - index API. If Elasticsearch security features are enabled, do not give users - any privileges on `.transform-internal*` indices. If you used transforms prior - to 7.5, also do not give users any privileges on `.data-frame-internal*` indices. + Create a transform. Creates a transform. A transform copies data from source + indices, transforms it, and persists it into an entity-centric destination index. + You can also think of the destination index as a two-dimensional tabular data + structure (known as a data frame). The ID for each document in the data frame + is generated from a hash of the entity, so there is a unique row per entity. + You must choose either the latest or pivot method for your transform; you cannot + use both in a single transform. If you choose to use the pivot method for your + transform, the entities are defined by the set of `group_by` fields in the pivot + object. If you choose to use the latest method, the entities are defined by the + `unique_key` field values in the latest object. You must have `create_index`, + `index`, and `read` privileges on the destination index and `read` and `view_index_metadata` + privileges on the source indices. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. NOTE: You must use Kibana or this API to create + a transform. Do not add a transform directly into any `.transform-internal*` + indices using the Elasticsearch index API. If Elasticsearch security features + are enabled, do not give users any privileges on `.transform-internal*` indices. + If you used transforms prior to 7.5, also do not give users any privileges on + `.data-frame-internal*` indices. ``_ @@ -488,9 +491,9 @@ def reset_transform( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Resets a transform. Before you can reset it, you must stop it; alternatively, - use the `force` query parameter. If the destination index was created by the - transform, it is deleted. + Reset a transform. Resets a transform. Before you can reset it, you must stop + it; alternatively, use the `force` query parameter. If the destination index + was created by the transform, it is deleted. ``_ @@ -538,10 +541,11 @@ def schedule_now_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Schedules now a transform. If you _schedule_now a transform, it will process - the new data instantly, without waiting for the configured frequency interval. - After _schedule_now API is called, the transform will be processed again at now - + frequency unless _schedule_now API is called again in the meantime. + Schedule a transform to start now. Instantly runs a transform to process data. + If you _schedule_now a transform, it will process the new data instantly, without + waiting for the configured frequency interval. After _schedule_now API is called, + the transform will be processed again at now + frequency unless _schedule_now + API is called again in the meantime. ``_ @@ -588,23 +592,24 @@ def start_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Starts a transform. When you start a transform, it creates the destination index - if it does not already exist. The `number_of_shards` is set to `1` and the `auto_expand_replicas` - is set to `0-1`. If it is a pivot transform, it deduces the mapping definitions - for the destination index from the source indices and the transform aggregations. - If fields in the destination index are derived from scripts (as in the case of - `scripted_metric` or `bucket_script` aggregations), the transform uses dynamic - mappings unless an index template exists. If it is a latest transform, it does - not deduce mapping definitions; it uses dynamic mappings. To use explicit mappings, - create the destination index before you start the transform. Alternatively, you - can create an index template, though it does not affect the deduced mappings - in a pivot transform. When the transform starts, a series of validations occur - to ensure its success. If you deferred validation when you created the transform, - they occur when you start the transform—​with the exception of privilege checks. - When Elasticsearch security features are enabled, the transform remembers which - roles the user that created it had at the time of creation and uses those same - roles. If those roles do not have the required privileges on the source and destination - indices, the transform fails when it attempts unauthorized operations. + Start a transform. Starts a transform. When you start a transform, it creates + the destination index if it does not already exist. The `number_of_shards` is + set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot transform, + it deduces the mapping definitions for the destination index from the source + indices and the transform aggregations. If fields in the destination index are + derived from scripts (as in the case of `scripted_metric` or `bucket_script` + aggregations), the transform uses dynamic mappings unless an index template exists. + If it is a latest transform, it does not deduce mapping definitions; it uses + dynamic mappings. To use explicit mappings, create the destination index before + you start the transform. Alternatively, you can create an index template, though + it does not affect the deduced mappings in a pivot transform. When the transform + starts, a series of validations occur to ensure its success. If you deferred + validation when you created the transform, they occur when you start the transform—​with + the exception of privilege checks. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. ``_ @@ -658,7 +663,7 @@ def stop_transform( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Stops one or more transforms. + Stop transforms. Stops one or more transforms. ``_ @@ -751,13 +756,14 @@ def update_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates certain properties of a transform. All updated properties except `description` - do not take effect until after the transform starts the next checkpoint, thus - there is data consistency in each checkpoint. To use this API, you must have - `read` and `view_index_metadata` privileges for the source indices. You must - also have `index` and `read` privileges for the destination index. When Elasticsearch - security features are enabled, the transform remembers which roles the user who - updated it had at the time of update and runs with those privileges. + Update a transform. Updates certain properties of a transform. All updated properties + except `description` do not take effect until after the transform starts the + next checkpoint, thus there is data consistency in each checkpoint. To use this + API, you must have `read` and `view_index_metadata` privileges for the source + indices. You must also have `index` and `read` privileges for the destination + index. When Elasticsearch security features are enabled, the transform remembers + which roles the user who updated it had at the time of update and runs with those + privileges. ``_