Skip to content

Commit ce4acd3

Browse files
Auto-generated API code (#2707)
1 parent 655d62b commit ce4acd3

8 files changed

+68
-70
lines changed

Diff for: docs/doc_examples/120fcf9f55128d6a81d5e87a9c235bbd.asciidoc

+10-9
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,17 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.streamInference({
7-
task_type: "chat_completion",
6+
const response = await client.inference.chatCompletionUnified({
87
inference_id: "openai-completion",
9-
model: "gpt-4o",
10-
messages: [
11-
{
12-
role: "user",
13-
content: "What is Elastic?",
14-
},
15-
],
8+
chat_completion_request: {
9+
model: "gpt-4o",
10+
messages: [
11+
{
12+
role: "user",
13+
content: "What is Elastic?",
14+
},
15+
],
16+
},
1617
});
1718
console.log(response);
1819
----

Diff for: docs/doc_examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc

+1-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.inference({
7-
task_type: "sparse_embedding",
6+
const response = await client.inference.sparseEmbedding({
87
inference_id: "my-elser-model",
98
input:
109
"The sky above the port was the color of television tuned to a dead channel.",

Diff for: docs/doc_examples/45954b8aaedfed57012be8b6538b0a24.asciidoc

+30-29
Original file line numberDiff line numberDiff line change
@@ -3,42 +3,43 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.streamInference({
7-
task_type: "chat_completion",
6+
const response = await client.inference.chatCompletionUnified({
87
inference_id: "openai-completion",
9-
messages: [
10-
{
11-
role: "user",
12-
content: [
13-
{
14-
type: "text",
15-
text: "What's the price of a scarf?",
8+
chat_completion_request: {
9+
messages: [
10+
{
11+
role: "user",
12+
content: [
13+
{
14+
type: "text",
15+
text: "What's the price of a scarf?",
16+
},
17+
],
18+
},
19+
],
20+
tools: [
21+
{
22+
type: "function",
23+
function: {
24+
name: "get_current_price",
25+
description: "Get the current price of a item",
26+
parameters: {
27+
type: "object",
28+
properties: {
29+
item: {
30+
id: "123",
31+
},
32+
},
33+
},
1634
},
17-
],
18-
},
19-
],
20-
tools: [
21-
{
35+
},
36+
],
37+
tool_choice: {
2238
type: "function",
2339
function: {
2440
name: "get_current_price",
25-
description: "Get the current price of a item",
26-
parameters: {
27-
type: "object",
28-
properties: {
29-
item: {
30-
id: "123",
31-
},
32-
},
33-
},
3441
},
3542
},
36-
],
37-
tool_choice: {
38-
type: "function",
39-
function: {
40-
name: "get_current_price",
41-
},
4243
},
4344
});
4445
console.log(response);

Diff for: docs/doc_examples/4b91ad7c9b44e07db4a4e81390f19ad3.asciidoc

+1-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.streamInference({
7-
task_type: "completion",
6+
const response = await client.inference.streamCompletion({
87
inference_id: "openai-completion",
98
input: "What is Elastic?",
109
});

Diff for: docs/doc_examples/7429b16221fe741fd31b0584786dd0b0.asciidoc

+1-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.inference({
7-
task_type: "text_embedding",
6+
const response = await client.inference.textEmbedding({
87
inference_id: "my-cohere-endpoint",
98
input:
109
"The sky above the port was the color of television tuned to a dead channel.",

Diff for: docs/doc_examples/82bb6c61dab959f4446dc5ecab7ecbdf.asciidoc

+23-22
Original file line numberDiff line numberDiff line change
@@ -3,30 +3,31 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.streamInference({
7-
task_type: "chat_completion",
6+
const response = await client.inference.chatCompletionUnified({
87
inference_id: "openai-completion",
9-
messages: [
10-
{
11-
role: "assistant",
12-
content: "Let's find out what the weather is",
13-
tool_calls: [
14-
{
15-
id: "call_KcAjWtAww20AihPHphUh46Gd",
16-
type: "function",
17-
function: {
18-
name: "get_current_weather",
19-
arguments: '{"location":"Boston, MA"}',
8+
chat_completion_request: {
9+
messages: [
10+
{
11+
role: "assistant",
12+
content: "Let's find out what the weather is",
13+
tool_calls: [
14+
{
15+
id: "call_KcAjWtAww20AihPHphUh46Gd",
16+
type: "function",
17+
function: {
18+
name: "get_current_weather",
19+
arguments: '{"location":"Boston, MA"}',
20+
},
2021
},
21-
},
22-
],
23-
},
24-
{
25-
role: "tool",
26-
content: "The weather is cold",
27-
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
28-
},
29-
],
22+
],
23+
},
24+
{
25+
role: "tool",
26+
content: "The weather is cold",
27+
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
28+
},
29+
],
30+
},
3031
});
3132
console.log(response);
3233
----

Diff for: docs/doc_examples/b45a8c6fc746e9c90fd181e69a605fad.asciidoc

+1-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.inference({
7-
task_type: "completion",
6+
const response = await client.inference.completion({
87
inference_id: "openai_chat_completions",
98
input: "What is Elastic?",
109
});

Diff for: docs/doc_examples/f1b24217b1d9ba6ea5e4fa6e6f412022.asciidoc

+1-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33

44
[source, js]
55
----
6-
const response = await client.inference.inference({
7-
task_type: "rerank",
6+
const response = await client.inference.rerank({
87
inference_id: "cohere_rerank",
98
input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"],
109
query: "star wars main character",

0 commit comments

Comments
 (0)