Auto-generated API code (#2707)

This commit is contained in:
Elastic Machine
2025-04-07 21:31:00 +02:00
committed by GitHub
parent 655d62b7b5
commit ce4acd37ff
8 changed files with 69 additions and 71 deletions

View File

@ -3,9 +3,9 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "chat_completion",
const response = await client.inference.chatCompletionUnified({
inference_id: "openai-completion",
chat_completion_request: {
model: "gpt-4o",
messages: [
{
@ -13,6 +13,7 @@ const response = await client.inference.streamInference({
content: "What is Elastic?",
},
],
},
});
console.log(response);
----

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "sparse_embedding",
const response = await client.inference.sparseEmbedding({
inference_id: "my-elser-model",
input:
"The sky above the port was the color of television tuned to a dead channel.",

View File

@ -3,9 +3,9 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "chat_completion",
const response = await client.inference.chatCompletionUnified({
inference_id: "openai-completion",
chat_completion_request: {
messages: [
{
role: "user",
@ -40,6 +40,7 @@ const response = await client.inference.streamInference({
name: "get_current_price",
},
},
},
});
console.log(response);
----

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "completion",
const response = await client.inference.streamCompletion({
inference_id: "openai-completion",
input: "What is Elastic?",
});

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "text_embedding",
const response = await client.inference.textEmbedding({
inference_id: "my-cohere-endpoint",
input:
"The sky above the port was the color of television tuned to a dead channel.",

View File

@ -3,9 +3,9 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "chat_completion",
const response = await client.inference.chatCompletionUnified({
inference_id: "openai-completion",
chat_completion_request: {
messages: [
{
role: "assistant",
@ -27,6 +27,7 @@ const response = await client.inference.streamInference({
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
},
],
},
});
console.log(response);
----

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "completion",
const response = await client.inference.completion({
inference_id: "openai_chat_completions",
input: "What is Elastic?",
});

View File

@ -3,8 +3,7 @@
[source, js]
----
const response = await client.inference.inference({
task_type: "rerank",
const response = await client.inference.rerank({
inference_id: "cohere_rerank",
input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"],
query: "star wars main character",