Skip to content

Commit f319f5d

Browse files
Auto-generated API code (#2728)
1 parent 49352a6 commit f319f5d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+4146
-860
lines changed

elasticsearch/_async/client/__init__.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -626,12 +626,14 @@ async def bulk(
626626
error_trace: t.Optional[bool] = None,
627627
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
628628
human: t.Optional[bool] = None,
629+
list_executed_pipelines: t.Optional[bool] = None,
629630
pipeline: t.Optional[str] = None,
630631
pretty: t.Optional[bool] = None,
631632
refresh: t.Optional[
632633
t.Union[bool, str, t.Literal["false", "true", "wait_for"]]
633634
] = None,
634635
require_alias: t.Optional[bool] = None,
636+
require_data_stream: t.Optional[bool] = None,
635637
routing: t.Optional[str] = None,
636638
source: t.Optional[t.Union[bool, t.Union[str, t.Sequence[str]]]] = None,
637639
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
@@ -651,6 +653,8 @@ async def bulk(
651653
:param operations:
652654
:param index: Name of the data stream, index, or index alias to perform bulk
653655
actions on.
656+
:param list_executed_pipelines: If `true`, the response will include the ingest
657+
pipelines that were executed for each index or create.
654658
:param pipeline: ID of the pipeline to use to preprocess incoming documents.
655659
If the index has a default ingest pipeline specified, then setting the value
656660
to `_none` disables the default ingest pipeline for this request. If a final
@@ -661,6 +665,8 @@ async def bulk(
661665
make this operation visible to search, if `false` do nothing with refreshes.
662666
Valid values: `true`, `false`, `wait_for`.
663667
:param require_alias: If `true`, the request’s actions must target an index alias.
668+
:param require_data_stream: If `true`, the request's actions must target a data
669+
stream (existing or to-be-created).
664670
:param routing: Custom value used to route operations to a specific shard.
665671
:param source: `true` or `false` to return the `_source` field or not, or a list
666672
of fields to return.
@@ -694,6 +700,8 @@ async def bulk(
694700
__query["filter_path"] = filter_path
695701
if human is not None:
696702
__query["human"] = human
703+
if list_executed_pipelines is not None:
704+
__query["list_executed_pipelines"] = list_executed_pipelines
697705
if pipeline is not None:
698706
__query["pipeline"] = pipeline
699707
if pretty is not None:
@@ -702,6 +710,8 @@ async def bulk(
702710
__query["refresh"] = refresh
703711
if require_alias is not None:
704712
__query["require_alias"] = require_alias
713+
if require_data_stream is not None:
714+
__query["require_data_stream"] = require_data_stream
705715
if routing is not None:
706716
__query["routing"] = routing
707717
if source is not None:

elasticsearch/_async/client/cat.py

Lines changed: 64 additions & 195 deletions
Large diffs are not rendered by default.

elasticsearch/_async/client/ccr.py

Lines changed: 128 additions & 45 deletions
Large diffs are not rendered by default.

elasticsearch/_async/client/connector.py

Lines changed: 237 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -589,6 +589,125 @@ async def sync_job_cancel(
589589
path_parts=__path_parts,
590590
)
591591

592+
@_rewrite_parameters()
593+
@_stability_warning(Stability.EXPERIMENTAL)
594+
async def sync_job_check_in(
595+
self,
596+
*,
597+
connector_sync_job_id: str,
598+
error_trace: t.Optional[bool] = None,
599+
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
600+
human: t.Optional[bool] = None,
601+
pretty: t.Optional[bool] = None,
602+
) -> ObjectApiResponse[t.Any]:
603+
"""
604+
Check in a connector sync job. Check in a connector sync job and set the `last_seen`
605+
field to the current time before updating it in the internal index. To sync data
606+
using self-managed connectors, you need to deploy the Elastic connector service
607+
on your own infrastructure. This service runs automatically on Elastic Cloud
608+
for Elastic managed connectors.
609+
610+
`<https://www.elastic.co/guide/en/elasticsearch/reference/8.16/check-in-connector-sync-job-api.html>`_
611+
612+
:param connector_sync_job_id: The unique identifier of the connector sync job
613+
to be checked in.
614+
"""
615+
if connector_sync_job_id in SKIP_IN_PATH:
616+
raise ValueError("Empty value passed for parameter 'connector_sync_job_id'")
617+
__path_parts: t.Dict[str, str] = {
618+
"connector_sync_job_id": _quote(connector_sync_job_id)
619+
}
620+
__path = (
621+
f'/_connector/_sync_job/{__path_parts["connector_sync_job_id"]}/_check_in'
622+
)
623+
__query: t.Dict[str, t.Any] = {}
624+
if error_trace is not None:
625+
__query["error_trace"] = error_trace
626+
if filter_path is not None:
627+
__query["filter_path"] = filter_path
628+
if human is not None:
629+
__query["human"] = human
630+
if pretty is not None:
631+
__query["pretty"] = pretty
632+
__headers = {"accept": "application/json"}
633+
return await self.perform_request( # type: ignore[return-value]
634+
"PUT",
635+
__path,
636+
params=__query,
637+
headers=__headers,
638+
endpoint_id="connector.sync_job_check_in",
639+
path_parts=__path_parts,
640+
)
641+
642+
@_rewrite_parameters(
643+
body_fields=("worker_hostname", "sync_cursor"),
644+
)
645+
@_stability_warning(Stability.EXPERIMENTAL)
646+
async def sync_job_claim(
647+
self,
648+
*,
649+
connector_sync_job_id: str,
650+
worker_hostname: t.Optional[str] = None,
651+
error_trace: t.Optional[bool] = None,
652+
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
653+
human: t.Optional[bool] = None,
654+
pretty: t.Optional[bool] = None,
655+
sync_cursor: t.Optional[t.Any] = None,
656+
body: t.Optional[t.Dict[str, t.Any]] = None,
657+
) -> ObjectApiResponse[t.Any]:
658+
"""
659+
Claim a connector sync job. This action updates the job status to `in_progress`
660+
and sets the `last_seen` and `started_at` timestamps to the current time. Additionally,
661+
it can set the `sync_cursor` property for the sync job. This API is not intended
662+
for direct connector management by users. It supports the implementation of services
663+
that utilize the connector protocol to communicate with Elasticsearch. To sync
664+
data using self-managed connectors, you need to deploy the Elastic connector
665+
service on your own infrastructure. This service runs automatically on Elastic
666+
Cloud for Elastic managed connectors.
667+
668+
`<https://www.elastic.co/guide/en/elasticsearch/reference/8.16/claim-connector-sync-job-api.html>`_
669+
670+
:param connector_sync_job_id: The unique identifier of the connector sync job.
671+
:param worker_hostname: The host name of the current system that will run the
672+
job.
673+
:param sync_cursor: The cursor object from the last incremental sync job. This
674+
should reference the `sync_cursor` field in the connector state for which
675+
the job runs.
676+
"""
677+
if connector_sync_job_id in SKIP_IN_PATH:
678+
raise ValueError("Empty value passed for parameter 'connector_sync_job_id'")
679+
if worker_hostname is None and body is None:
680+
raise ValueError("Empty value passed for parameter 'worker_hostname'")
681+
__path_parts: t.Dict[str, str] = {
682+
"connector_sync_job_id": _quote(connector_sync_job_id)
683+
}
684+
__path = f'/_connector/_sync_job/{__path_parts["connector_sync_job_id"]}/_claim'
685+
__query: t.Dict[str, t.Any] = {}
686+
__body: t.Dict[str, t.Any] = body if body is not None else {}
687+
if error_trace is not None:
688+
__query["error_trace"] = error_trace
689+
if filter_path is not None:
690+
__query["filter_path"] = filter_path
691+
if human is not None:
692+
__query["human"] = human
693+
if pretty is not None:
694+
__query["pretty"] = pretty
695+
if not __body:
696+
if worker_hostname is not None:
697+
__body["worker_hostname"] = worker_hostname
698+
if sync_cursor is not None:
699+
__body["sync_cursor"] = sync_cursor
700+
__headers = {"accept": "application/json", "content-type": "application/json"}
701+
return await self.perform_request( # type: ignore[return-value]
702+
"PUT",
703+
__path,
704+
params=__query,
705+
headers=__headers,
706+
body=__body,
707+
endpoint_id="connector.sync_job_claim",
708+
path_parts=__path_parts,
709+
)
710+
592711
@_rewrite_parameters()
593712
@_stability_warning(Stability.BETA)
594713
async def sync_job_delete(
@@ -634,6 +753,64 @@ async def sync_job_delete(
634753
path_parts=__path_parts,
635754
)
636755

756+
@_rewrite_parameters(
757+
body_fields=("error",),
758+
)
759+
@_stability_warning(Stability.EXPERIMENTAL)
760+
async def sync_job_error(
761+
self,
762+
*,
763+
connector_sync_job_id: str,
764+
error: t.Optional[str] = None,
765+
error_trace: t.Optional[bool] = None,
766+
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
767+
human: t.Optional[bool] = None,
768+
pretty: t.Optional[bool] = None,
769+
body: t.Optional[t.Dict[str, t.Any]] = None,
770+
) -> ObjectApiResponse[t.Any]:
771+
"""
772+
Set a connector sync job error. Set the `error` field for a connector sync job
773+
and set its `status` to `error`. To sync data using self-managed connectors,
774+
you need to deploy the Elastic connector service on your own infrastructure.
775+
This service runs automatically on Elastic Cloud for Elastic managed connectors.
776+
777+
`<https://www.elastic.co/guide/en/elasticsearch/reference/8.16/set-connector-sync-job-error-api.html>`_
778+
779+
:param connector_sync_job_id: The unique identifier for the connector sync job.
780+
:param error: The error for the connector sync job error field.
781+
"""
782+
if connector_sync_job_id in SKIP_IN_PATH:
783+
raise ValueError("Empty value passed for parameter 'connector_sync_job_id'")
784+
if error is None and body is None:
785+
raise ValueError("Empty value passed for parameter 'error'")
786+
__path_parts: t.Dict[str, str] = {
787+
"connector_sync_job_id": _quote(connector_sync_job_id)
788+
}
789+
__path = f'/_connector/_sync_job/{__path_parts["connector_sync_job_id"]}/_error'
790+
__query: t.Dict[str, t.Any] = {}
791+
__body: t.Dict[str, t.Any] = body if body is not None else {}
792+
if error_trace is not None:
793+
__query["error_trace"] = error_trace
794+
if filter_path is not None:
795+
__query["filter_path"] = filter_path
796+
if human is not None:
797+
__query["human"] = human
798+
if pretty is not None:
799+
__query["pretty"] = pretty
800+
if not __body:
801+
if error is not None:
802+
__body["error"] = error
803+
__headers = {"accept": "application/json", "content-type": "application/json"}
804+
return await self.perform_request( # type: ignore[return-value]
805+
"PUT",
806+
__path,
807+
params=__query,
808+
headers=__headers,
809+
body=__body,
810+
endpoint_id="connector.sync_job_error",
811+
path_parts=__path_parts,
812+
)
813+
637814
@_rewrite_parameters()
638815
@_stability_warning(Stability.BETA)
639816
async def sync_job_get(
@@ -1032,6 +1209,66 @@ async def update_error(
10321209
path_parts=__path_parts,
10331210
)
10341211

1212+
@_rewrite_parameters(
1213+
body_fields=("features",),
1214+
)
1215+
@_stability_warning(Stability.EXPERIMENTAL)
1216+
async def update_features(
1217+
self,
1218+
*,
1219+
connector_id: str,
1220+
features: t.Optional[t.Mapping[str, t.Any]] = None,
1221+
error_trace: t.Optional[bool] = None,
1222+
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
1223+
human: t.Optional[bool] = None,
1224+
pretty: t.Optional[bool] = None,
1225+
body: t.Optional[t.Dict[str, t.Any]] = None,
1226+
) -> ObjectApiResponse[t.Any]:
1227+
"""
1228+
Update the connector features. Update the connector features in the connector
1229+
document. This API can be used to control the following aspects of a connector:
1230+
* document-level security * incremental syncs * advanced sync rules * basic sync
1231+
rules Normally, the running connector service automatically manages these features.
1232+
However, you can use this API to override the default behavior. To sync data
1233+
using self-managed connectors, you need to deploy the Elastic connector service
1234+
on your own infrastructure. This service runs automatically on Elastic Cloud
1235+
for Elastic managed connectors.
1236+
1237+
`<https://www.elastic.co/guide/en/elasticsearch/reference/8.16/update-connector-features-api.html>`_
1238+
1239+
:param connector_id: The unique identifier of the connector to be updated.
1240+
:param features:
1241+
"""
1242+
if connector_id in SKIP_IN_PATH:
1243+
raise ValueError("Empty value passed for parameter 'connector_id'")
1244+
if features is None and body is None:
1245+
raise ValueError("Empty value passed for parameter 'features'")
1246+
__path_parts: t.Dict[str, str] = {"connector_id": _quote(connector_id)}
1247+
__path = f'/_connector/{__path_parts["connector_id"]}/_features'
1248+
__query: t.Dict[str, t.Any] = {}
1249+
__body: t.Dict[str, t.Any] = body if body is not None else {}
1250+
if error_trace is not None:
1251+
__query["error_trace"] = error_trace
1252+
if filter_path is not None:
1253+
__query["filter_path"] = filter_path
1254+
if human is not None:
1255+
__query["human"] = human
1256+
if pretty is not None:
1257+
__query["pretty"] = pretty
1258+
if not __body:
1259+
if features is not None:
1260+
__body["features"] = features
1261+
__headers = {"accept": "application/json", "content-type": "application/json"}
1262+
return await self.perform_request( # type: ignore[return-value]
1263+
"PUT",
1264+
__path,
1265+
params=__query,
1266+
headers=__headers,
1267+
body=__body,
1268+
endpoint_id="connector.update_features",
1269+
path_parts=__path_parts,
1270+
)
1271+
10351272
@_rewrite_parameters(
10361273
body_fields=("advanced_snippet", "filtering", "rules"),
10371274
)

elasticsearch/_async/client/eql.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -167,13 +167,16 @@ async def get_status(
167167
@_rewrite_parameters(
168168
body_fields=(
169169
"query",
170+
"allow_partial_search_results",
171+
"allow_partial_sequence_results",
170172
"case_sensitive",
171173
"event_category_field",
172174
"fetch_size",
173175
"fields",
174176
"filter",
175177
"keep_alive",
176178
"keep_on_completion",
179+
"max_samples_per_key",
177180
"result_position",
178181
"runtime_mappings",
179182
"size",
@@ -188,6 +191,8 @@ async def search(
188191
index: t.Union[str, t.Sequence[str]],
189192
query: t.Optional[str] = None,
190193
allow_no_indices: t.Optional[bool] = None,
194+
allow_partial_search_results: t.Optional[bool] = None,
195+
allow_partial_sequence_results: t.Optional[bool] = None,
191196
case_sensitive: t.Optional[bool] = None,
192197
error_trace: t.Optional[bool] = None,
193198
event_category_field: t.Optional[str] = None,
@@ -211,6 +216,7 @@ async def search(
211216
ignore_unavailable: t.Optional[bool] = None,
212217
keep_alive: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
213218
keep_on_completion: t.Optional[bool] = None,
219+
max_samples_per_key: t.Optional[int] = None,
214220
pretty: t.Optional[bool] = None,
215221
result_position: t.Optional[t.Union[str, t.Literal["head", "tail"]]] = None,
216222
runtime_mappings: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None,
@@ -232,6 +238,8 @@ async def search(
232238
:param index: The name of the index to scope the operation
233239
:param query: EQL query you wish to run.
234240
:param allow_no_indices:
241+
:param allow_partial_search_results:
242+
:param allow_partial_sequence_results:
235243
:param case_sensitive:
236244
:param event_category_field: Field containing the event classification, such
237245
as process, file, or network.
@@ -246,6 +254,11 @@ async def search(
246254
in the response.
247255
:param keep_alive:
248256
:param keep_on_completion:
257+
:param max_samples_per_key: By default, the response of a sample query contains
258+
up to `10` samples, with one sample per unique set of join keys. Use the
259+
`size` parameter to get a smaller or larger set of samples. To retrieve more
260+
than one sample per set of join keys, use the `max_samples_per_key` parameter.
261+
Pipes are not supported for sample queries.
249262
:param result_position:
250263
:param runtime_mappings:
251264
:param size: For basic queries, the maximum number of matching events to return.
@@ -280,6 +293,12 @@ async def search(
280293
if not __body:
281294
if query is not None:
282295
__body["query"] = query
296+
if allow_partial_search_results is not None:
297+
__body["allow_partial_search_results"] = allow_partial_search_results
298+
if allow_partial_sequence_results is not None:
299+
__body["allow_partial_sequence_results"] = (
300+
allow_partial_sequence_results
301+
)
283302
if case_sensitive is not None:
284303
__body["case_sensitive"] = case_sensitive
285304
if event_category_field is not None:
@@ -294,6 +313,8 @@ async def search(
294313
__body["keep_alive"] = keep_alive
295314
if keep_on_completion is not None:
296315
__body["keep_on_completion"] = keep_on_completion
316+
if max_samples_per_key is not None:
317+
__body["max_samples_per_key"] = max_samples_per_key
297318
if result_position is not None:
298319
__body["result_position"] = result_position
299320
if runtime_mappings is not None:

0 commit comments

Comments
 (0)