Skip to content

Auto-generated code for 8.13 #2472

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions elasticsearch/_async/client/ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -3627,6 +3627,7 @@ async def put_job(
"model_size_bytes",
"model_type",
"platform_architecture",
"prefix_strings",
"tags",
),
)
Expand All @@ -3649,6 +3650,7 @@ async def put_trained_model(
t.Union["t.Literal['lang_ident', 'pytorch', 'tree_ensemble']", str]
] = None,
platform_architecture: t.Optional[str] = None,
prefix_strings: t.Optional[t.Mapping[str, t.Any]] = None,
pretty: t.Optional[bool] = None,
tags: t.Optional[t.Sequence[str]] = None,
body: t.Optional[t.Dict[str, t.Any]] = None,
Expand Down Expand Up @@ -3686,6 +3688,7 @@ async def put_trained_model(
`darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models
(those that work independent of processor architecture or OS features), leave
this field unset.
:param prefix_strings: Optional prefix strings applied at inference
:param tags: An array of tags to organize the model.
"""
if model_id in SKIP_IN_PATH:
Expand Down Expand Up @@ -3723,6 +3726,8 @@ async def put_trained_model(
__body["model_type"] = model_type
if platform_architecture is not None:
__body["platform_architecture"] = platform_architecture
if prefix_strings is not None:
__body["prefix_strings"] = prefix_strings
if tags is not None:
__body["tags"] = tags
__headers = {"accept": "application/json", "content-type": "application/json"}
Expand Down
5 changes: 5 additions & 0 deletions elasticsearch/_sync/client/ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -3627,6 +3627,7 @@ def put_job(
"model_size_bytes",
"model_type",
"platform_architecture",
"prefix_strings",
"tags",
),
)
Expand All @@ -3649,6 +3650,7 @@ def put_trained_model(
t.Union["t.Literal['lang_ident', 'pytorch', 'tree_ensemble']", str]
] = None,
platform_architecture: t.Optional[str] = None,
prefix_strings: t.Optional[t.Mapping[str, t.Any]] = None,
pretty: t.Optional[bool] = None,
tags: t.Optional[t.Sequence[str]] = None,
body: t.Optional[t.Dict[str, t.Any]] = None,
Expand Down Expand Up @@ -3686,6 +3688,7 @@ def put_trained_model(
`darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models
(those that work independent of processor architecture or OS features), leave
this field unset.
:param prefix_strings: Optional prefix strings applied at inference
:param tags: An array of tags to organize the model.
"""
if model_id in SKIP_IN_PATH:
Expand Down Expand Up @@ -3723,6 +3726,8 @@ def put_trained_model(
__body["model_type"] = model_type
if platform_architecture is not None:
__body["platform_architecture"] = platform_architecture
if prefix_strings is not None:
__body["prefix_strings"] = prefix_strings
if tags is not None:
__body["tags"] = tags
__headers = {"accept": "application/json", "content-type": "application/json"}
Expand Down