Auto-generated API code (#2707)

This commit is contained in:
Elastic Machine
2025-04-07 21:31:00 +02:00
committed by GitHub
parent 655d62b7b5
commit ce4acd37ff
8 changed files with 69 additions and 71 deletions

View File

@ -3,16 +3,17 @@
[source, js] [source, js]
---- ----
const response = await client.inference.streamInference({ const response = await client.inference.chatCompletionUnified({
task_type: "chat_completion",
inference_id: "openai-completion", inference_id: "openai-completion",
model: "gpt-4o", chat_completion_request: {
messages: [ model: "gpt-4o",
{ messages: [
role: "user", {
content: "What is Elastic?", role: "user",
}, content: "What is Elastic?",
], },
],
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,8 +3,7 @@
[source, js] [source, js]
---- ----
const response = await client.inference.inference({ const response = await client.inference.sparseEmbedding({
task_type: "sparse_embedding",
inference_id: "my-elser-model", inference_id: "my-elser-model",
input: input:
"The sky above the port was the color of television tuned to a dead channel.", "The sky above the port was the color of television tuned to a dead channel.",

View File

@ -3,41 +3,42 @@
[source, js] [source, js]
---- ----
const response = await client.inference.streamInference({ const response = await client.inference.chatCompletionUnified({
task_type: "chat_completion",
inference_id: "openai-completion", inference_id: "openai-completion",
messages: [ chat_completion_request: {
{ messages: [
role: "user", {
content: [ role: "user",
{ content: [
type: "text", {
text: "What's the price of a scarf?", type: "text",
}, text: "What's the price of a scarf?",
], },
}, ],
], },
tools: [ ],
{ tools: [
type: "function", {
function: { type: "function",
name: "get_current_price", function: {
description: "Get the current price of a item", name: "get_current_price",
parameters: { description: "Get the current price of a item",
type: "object", parameters: {
properties: { type: "object",
item: { properties: {
id: "123", item: {
id: "123",
},
}, },
}, },
}, },
}, },
}, ],
], tool_choice: {
tool_choice: { type: "function",
type: "function", function: {
function: { name: "get_current_price",
name: "get_current_price", },
}, },
}, },
}); });

View File

@ -3,8 +3,7 @@
[source, js] [source, js]
---- ----
const response = await client.inference.streamInference({ const response = await client.inference.streamCompletion({
task_type: "completion",
inference_id: "openai-completion", inference_id: "openai-completion",
input: "What is Elastic?", input: "What is Elastic?",
}); });

View File

@ -3,8 +3,7 @@
[source, js] [source, js]
---- ----
const response = await client.inference.inference({ const response = await client.inference.textEmbedding({
task_type: "text_embedding",
inference_id: "my-cohere-endpoint", inference_id: "my-cohere-endpoint",
input: input:
"The sky above the port was the color of television tuned to a dead channel.", "The sky above the port was the color of television tuned to a dead channel.",

View File

@ -3,30 +3,31 @@
[source, js] [source, js]
---- ----
const response = await client.inference.streamInference({ const response = await client.inference.chatCompletionUnified({
task_type: "chat_completion",
inference_id: "openai-completion", inference_id: "openai-completion",
messages: [ chat_completion_request: {
{ messages: [
role: "assistant", {
content: "Let's find out what the weather is", role: "assistant",
tool_calls: [ content: "Let's find out what the weather is",
{ tool_calls: [
id: "call_KcAjWtAww20AihPHphUh46Gd", {
type: "function", id: "call_KcAjWtAww20AihPHphUh46Gd",
function: { type: "function",
name: "get_current_weather", function: {
arguments: '{"location":"Boston, MA"}', name: "get_current_weather",
arguments: '{"location":"Boston, MA"}',
},
}, },
}, ],
], },
}, {
{ role: "tool",
role: "tool", content: "The weather is cold",
content: "The weather is cold", tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd", },
}, ],
], },
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,8 +3,7 @@
[source, js] [source, js]
---- ----
const response = await client.inference.inference({ const response = await client.inference.completion({
task_type: "completion",
inference_id: "openai_chat_completions", inference_id: "openai_chat_completions",
input: "What is Elastic?", input: "What is Elastic?",
}); });

View File

@ -3,8 +3,7 @@
[source, js] [source, js]
---- ----
const response = await client.inference.inference({ const response = await client.inference.rerank({
task_type: "rerank",
inference_id: "cohere_rerank", inference_id: "cohere_rerank",
input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"], input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"],
query: "star wars main character", query: "star wars main character",