Auto-generated API code (#2707)

This commit is contained in:
Elastic Machine
2025-04-07 21:31:00 +02:00
committed by GitHub
parent 655d62b7b5
commit ce4acd37ff
8 changed files with 69 additions and 71 deletions

View File

@ -3,16 +3,17 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "chat_completion",
const response = await client.inference.chatCompletionUnified({
inference_id: "openai-completion",
model: "gpt-4o",
messages: [
{
role: "user",
content: "What is Elastic?",
},
],
chat_completion_request: {
model: "gpt-4o",
messages: [
{
role: "user",
content: "What is Elastic?",
},
],
},
});
console.log(response);
----

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "sparse_embedding",
const response = await client.inference.sparseEmbedding({
inference_id: "my-elser-model",
input:
"The sky above the port was the color of television tuned to a dead channel.",

View File

@ -3,41 +3,42 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "chat_completion",
const response = await client.inference.chatCompletionUnified({
inference_id: "openai-completion",
messages: [
{
role: "user",
content: [
{
type: "text",
text: "What's the price of a scarf?",
},
],
},
],
tools: [
{
type: "function",
function: {
name: "get_current_price",
description: "Get the current price of a item",
parameters: {
type: "object",
properties: {
item: {
id: "123",
chat_completion_request: {
messages: [
{
role: "user",
content: [
{
type: "text",
text: "What's the price of a scarf?",
},
],
},
],
tools: [
{
type: "function",
function: {
name: "get_current_price",
description: "Get the current price of a item",
parameters: {
type: "object",
properties: {
item: {
id: "123",
},
},
},
},
},
},
],
tool_choice: {
type: "function",
function: {
name: "get_current_price",
],
tool_choice: {
type: "function",
function: {
name: "get_current_price",
},
},
},
});

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "completion",
const response = await client.inference.streamCompletion({
inference_id: "openai-completion",
input: "What is Elastic?",
});

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "text_embedding",
const response = await client.inference.textEmbedding({
inference_id: "my-cohere-endpoint",
input:
"The sky above the port was the color of television tuned to a dead channel.",

View File

@ -3,30 +3,31 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "chat_completion",
const response = await client.inference.chatCompletionUnified({
inference_id: "openai-completion",
messages: [
{
role: "assistant",
content: "Let's find out what the weather is",
tool_calls: [
{
id: "call_KcAjWtAww20AihPHphUh46Gd",
type: "function",
function: {
name: "get_current_weather",
arguments: '{"location":"Boston, MA"}',
chat_completion_request: {
messages: [
{
role: "assistant",
content: "Let's find out what the weather is",
tool_calls: [
{
id: "call_KcAjWtAww20AihPHphUh46Gd",
type: "function",
function: {
name: "get_current_weather",
arguments: '{"location":"Boston, MA"}',
},
},
},
],
},
{
role: "tool",
content: "The weather is cold",
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
},
],
],
},
{
role: "tool",
content: "The weather is cold",
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
},
],
},
});
console.log(response);
----

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "completion",
const response = await client.inference.completion({
inference_id: "openai_chat_completions",
input: "What is Elastic?",
});

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "rerank",
const response = await client.inference.rerank({
inference_id: "cohere_rerank",
input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"],
query: "star wars main character",