Skip to content

Auto-generated code for 8.x #2895

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 10, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
138 changes: 0 additions & 138 deletions elasticsearch/_async/client/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,67 +351,6 @@ async def inference(
path_parts=__path_parts,
)

@_rewrite_parameters(
body_name="chat_completion_request",
)
async def post_eis_chat_completion(
self,
*,
eis_inference_id: str,
chat_completion_request: t.Optional[t.Mapping[str, t.Any]] = None,
body: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Perform a chat completion task through the Elastic Inference Service (EIS).</p>
<p>Perform a chat completion inference task with the <code>elastic</code> service.</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/8.17/post-inference-api.html>`_

:param eis_inference_id: The unique identifier of the inference endpoint.
:param chat_completion_request:
"""
if eis_inference_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'eis_inference_id'")
if chat_completion_request is None and body is None:
raise ValueError(
"Empty value passed for parameters 'chat_completion_request' and 'body', one of them should be set."
)
elif chat_completion_request is not None and body is not None:
raise ValueError("Cannot set both 'chat_completion_request' and 'body'")
__path_parts: t.Dict[str, str] = {"eis_inference_id": _quote(eis_inference_id)}
__path = (
f'/_inference/chat_completion/{__path_parts["eis_inference_id"]}/_stream'
)
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__body = (
chat_completion_request if chat_completion_request is not None else body
)
__headers = {"accept": "application/json", "content-type": "application/json"}
return await self.perform_request( # type: ignore[return-value]
"POST",
__path,
params=__query,
headers=__headers,
body=__body,
endpoint_id="inference.post_eis_chat_completion",
path_parts=__path_parts,
)

@_rewrite_parameters(
body_name="inference_config",
)
Expand Down Expand Up @@ -1088,83 +1027,6 @@ async def put_cohere(
path_parts=__path_parts,
)

@_rewrite_parameters(
body_fields=("service", "service_settings"),
)
async def put_eis(
self,
*,
task_type: t.Union[str, t.Literal["chat_completion"]],
eis_inference_id: str,
service: t.Optional[t.Union[str, t.Literal["elastic"]]] = None,
service_settings: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
body: t.Optional[t.Dict[str, t.Any]] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Create an Elastic Inference Service (EIS) inference endpoint.</p>
<p>Create an inference endpoint to perform an inference task through the Elastic Inference Service (EIS).</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/8.17/infer-service-elastic.html>`_

:param task_type: The type of the inference task that the model will perform.
NOTE: The `chat_completion` task type only supports streaming and only through
the _stream API.
:param eis_inference_id: The unique identifier of the inference endpoint.
:param service: The type of service supported for the specified task type. In
this case, `elastic`.
:param service_settings: Settings used to install the inference model. These
settings are specific to the `elastic` service.
"""
if task_type in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'task_type'")
if eis_inference_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'eis_inference_id'")
if service is None and body is None:
raise ValueError("Empty value passed for parameter 'service'")
if service_settings is None and body is None:
raise ValueError("Empty value passed for parameter 'service_settings'")
__path_parts: t.Dict[str, str] = {
"task_type": _quote(task_type),
"eis_inference_id": _quote(eis_inference_id),
}
__path = f'/_inference/{__path_parts["task_type"]}/{__path_parts["eis_inference_id"]}'
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = body if body is not None else {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if not __body:
if service is not None:
__body["service"] = service
if service_settings is not None:
__body["service_settings"] = service_settings
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return await self.perform_request( # type: ignore[return-value]
"PUT",
__path,
params=__query,
headers=__headers,
body=__body,
endpoint_id="inference.put_eis",
path_parts=__path_parts,
)

@_rewrite_parameters(
body_fields=(
"service",
Expand Down
138 changes: 0 additions & 138 deletions elasticsearch/_sync/client/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,67 +351,6 @@ def inference(
path_parts=__path_parts,
)

@_rewrite_parameters(
body_name="chat_completion_request",
)
def post_eis_chat_completion(
self,
*,
eis_inference_id: str,
chat_completion_request: t.Optional[t.Mapping[str, t.Any]] = None,
body: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Perform a chat completion task through the Elastic Inference Service (EIS).</p>
<p>Perform a chat completion inference task with the <code>elastic</code> service.</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/8.17/post-inference-api.html>`_

:param eis_inference_id: The unique identifier of the inference endpoint.
:param chat_completion_request:
"""
if eis_inference_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'eis_inference_id'")
if chat_completion_request is None and body is None:
raise ValueError(
"Empty value passed for parameters 'chat_completion_request' and 'body', one of them should be set."
)
elif chat_completion_request is not None and body is not None:
raise ValueError("Cannot set both 'chat_completion_request' and 'body'")
__path_parts: t.Dict[str, str] = {"eis_inference_id": _quote(eis_inference_id)}
__path = (
f'/_inference/chat_completion/{__path_parts["eis_inference_id"]}/_stream'
)
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__body = (
chat_completion_request if chat_completion_request is not None else body
)
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST",
__path,
params=__query,
headers=__headers,
body=__body,
endpoint_id="inference.post_eis_chat_completion",
path_parts=__path_parts,
)

@_rewrite_parameters(
body_name="inference_config",
)
Expand Down Expand Up @@ -1088,83 +1027,6 @@ def put_cohere(
path_parts=__path_parts,
)

@_rewrite_parameters(
body_fields=("service", "service_settings"),
)
def put_eis(
self,
*,
task_type: t.Union[str, t.Literal["chat_completion"]],
eis_inference_id: str,
service: t.Optional[t.Union[str, t.Literal["elastic"]]] = None,
service_settings: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
body: t.Optional[t.Dict[str, t.Any]] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Create an Elastic Inference Service (EIS) inference endpoint.</p>
<p>Create an inference endpoint to perform an inference task through the Elastic Inference Service (EIS).</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/8.17/infer-service-elastic.html>`_

:param task_type: The type of the inference task that the model will perform.
NOTE: The `chat_completion` task type only supports streaming and only through
the _stream API.
:param eis_inference_id: The unique identifier of the inference endpoint.
:param service: The type of service supported for the specified task type. In
this case, `elastic`.
:param service_settings: Settings used to install the inference model. These
settings are specific to the `elastic` service.
"""
if task_type in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'task_type'")
if eis_inference_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'eis_inference_id'")
if service is None and body is None:
raise ValueError("Empty value passed for parameter 'service'")
if service_settings is None and body is None:
raise ValueError("Empty value passed for parameter 'service_settings'")
__path_parts: t.Dict[str, str] = {
"task_type": _quote(task_type),
"eis_inference_id": _quote(eis_inference_id),
}
__path = f'/_inference/{__path_parts["task_type"]}/{__path_parts["eis_inference_id"]}'
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = body if body is not None else {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if not __body:
if service is not None:
__body["service"] = service
if service_settings is not None:
__body["service_settings"] = service_settings
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"PUT",
__path,
params=__query,
headers=__headers,
body=__body,
endpoint_id="inference.put_eis",
path_parts=__path_parts,
)

@_rewrite_parameters(
body_fields=(
"service",
Expand Down