Skip to content

Commit 1efca86

Browse files
Auto-generated API code
1 parent fb88fbb commit 1efca86

File tree

2 files changed

+42
-10
lines changed

2 files changed

+42
-10
lines changed

elasticsearch/_async/client/inference.py

Lines changed: 21 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,10 @@ async def delete_model(
3131
*,
3232
inference_id: str,
3333
task_type: t.Optional[
34-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
34+
t.Union[
35+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
36+
str,
37+
]
3538
] = None,
3639
error_trace: t.Optional[bool] = None,
3740
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
@@ -85,7 +88,10 @@ async def get_model(
8588
*,
8689
inference_id: str,
8790
task_type: t.Optional[
88-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
91+
t.Union[
92+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
93+
str,
94+
]
8995
] = None,
9096
error_trace: t.Optional[bool] = None,
9197
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
@@ -134,20 +140,24 @@ async def get_model(
134140
)
135141

136142
@_rewrite_parameters(
137-
body_fields=("input", "task_settings"),
143+
body_fields=("input", "query", "task_settings"),
138144
)
139145
async def inference(
140146
self,
141147
*,
142148
inference_id: str,
143149
input: t.Optional[t.Union[str, t.Sequence[str]]] = None,
144150
task_type: t.Optional[
145-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
151+
t.Union[
152+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
153+
str,
154+
]
146155
] = None,
147156
error_trace: t.Optional[bool] = None,
148157
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
149158
human: t.Optional[bool] = None,
150159
pretty: t.Optional[bool] = None,
160+
query: t.Optional[str] = None,
151161
task_settings: t.Optional[t.Any] = None,
152162
body: t.Optional[t.Dict[str, t.Any]] = None,
153163
) -> ObjectApiResponse[t.Any]:
@@ -159,6 +169,7 @@ async def inference(
159169
:param inference_id: The inference Id
160170
:param input: Text input to the model. Either a string or an array of strings.
161171
:param task_type: The task type
172+
:param query: Query input, required for rerank task. Not required for other tasks.
162173
:param task_settings: Optional task settings
163174
"""
164175
if inference_id in SKIP_IN_PATH:
@@ -190,6 +201,8 @@ async def inference(
190201
if not __body:
191202
if input is not None:
192203
__body["input"] = input
204+
if query is not None:
205+
__body["query"] = query
193206
if task_settings is not None:
194207
__body["task_settings"] = task_settings
195208
if not __body:
@@ -217,7 +230,10 @@ async def put_model(
217230
model_config: t.Optional[t.Mapping[str, t.Any]] = None,
218231
body: t.Optional[t.Mapping[str, t.Any]] = None,
219232
task_type: t.Optional[
220-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
233+
t.Union[
234+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
235+
str,
236+
]
221237
] = None,
222238
error_trace: t.Optional[bool] = None,
223239
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,

elasticsearch/_sync/client/inference.py

Lines changed: 21 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,10 @@ def delete_model(
3131
*,
3232
inference_id: str,
3333
task_type: t.Optional[
34-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
34+
t.Union[
35+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
36+
str,
37+
]
3538
] = None,
3639
error_trace: t.Optional[bool] = None,
3740
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
@@ -85,7 +88,10 @@ def get_model(
8588
*,
8689
inference_id: str,
8790
task_type: t.Optional[
88-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
91+
t.Union[
92+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
93+
str,
94+
]
8995
] = None,
9096
error_trace: t.Optional[bool] = None,
9197
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
@@ -134,20 +140,24 @@ def get_model(
134140
)
135141

136142
@_rewrite_parameters(
137-
body_fields=("input", "task_settings"),
143+
body_fields=("input", "query", "task_settings"),
138144
)
139145
def inference(
140146
self,
141147
*,
142148
inference_id: str,
143149
input: t.Optional[t.Union[str, t.Sequence[str]]] = None,
144150
task_type: t.Optional[
145-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
151+
t.Union[
152+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
153+
str,
154+
]
146155
] = None,
147156
error_trace: t.Optional[bool] = None,
148157
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
149158
human: t.Optional[bool] = None,
150159
pretty: t.Optional[bool] = None,
160+
query: t.Optional[str] = None,
151161
task_settings: t.Optional[t.Any] = None,
152162
body: t.Optional[t.Dict[str, t.Any]] = None,
153163
) -> ObjectApiResponse[t.Any]:
@@ -159,6 +169,7 @@ def inference(
159169
:param inference_id: The inference Id
160170
:param input: Text input to the model. Either a string or an array of strings.
161171
:param task_type: The task type
172+
:param query: Query input, required for rerank task. Not required for other tasks.
162173
:param task_settings: Optional task settings
163174
"""
164175
if inference_id in SKIP_IN_PATH:
@@ -190,6 +201,8 @@ def inference(
190201
if not __body:
191202
if input is not None:
192203
__body["input"] = input
204+
if query is not None:
205+
__body["query"] = query
193206
if task_settings is not None:
194207
__body["task_settings"] = task_settings
195208
if not __body:
@@ -217,7 +230,10 @@ def put_model(
217230
model_config: t.Optional[t.Mapping[str, t.Any]] = None,
218231
body: t.Optional[t.Mapping[str, t.Any]] = None,
219232
task_type: t.Optional[
220-
t.Union["t.Literal['sparse_embedding', 'text_embedding']", str]
233+
t.Union[
234+
"t.Literal['completion', 'rerank', 'sparse_embedding', 'text_embedding']",
235+
str,
236+
]
221237
] = None,
222238
error_trace: t.Optional[bool] = None,
223239
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,

0 commit comments

Comments
 (0)