Auto-generated code for 8.x (#2500)

This commit is contained in:
Elastic Machine
2024-12-02 18:06:57 +00:00
committed by GitHub
parent dbd0ec2457
commit 4e1273ef33
34 changed files with 1024 additions and 102 deletions

View File

@ -0,0 +1,46 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
range: {
year: {
gt: 2023,
},
},
},
},
},
{
standard: {
query: {
term: {
topic: "elastic",
},
},
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
_source: false,
aggs: {
topics: {
terms: {
field: "topic",
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "rerank",
inference_id: "my-rerank-model",
inference_config: {
service: "cohere",
service_settings: {
model_id: "rerank-english-v3.0",
api_key: "{{COHERE_API_KEY}}",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,49 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example_nested",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
nested: {
path: "nested_field",
inner_hits: {
name: "nested_vector",
_source: false,
fields: ["nested_field.paragraph_id"],
},
query: {
knn: {
field: "nested_field.nested_vector",
query_vector: [1, 0, 0.5],
k: 10,
},
},
},
},
},
},
{
standard: {
query: {
term: {
topic: "ai",
},
},
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
_source: ["topic"],
});
console.log(response);
----

View File

@ -0,0 +1,57 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
term: {
topic: "elastic",
},
},
},
},
{
rrf: {
retrievers: [
{
standard: {
query: {
query_string: {
query:
"(information retrieval) OR (artificial intelligence)",
default_field: "text",
},
},
},
},
{
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
_source: false,
size: 1,
explain: true,
});
console.log(response);
----

View File

@ -16,7 +16,7 @@ const response = await client.search({
},
},
field: "text",
inference_id: "my-cohere-rerank-model",
inference_id: "elastic-rerank",
inference_text: "How often does the moon hide the sun?",
rank_window_size: 100,
min_score: 0.5,

View File

@ -3,8 +3,8 @@
[source, js]
----
const response = await client.cluster.getSettings({
flat_settings: "true",
const response = await client.indices.rollover({
alias: "datastream",
});
console.log(response);
----

View File

@ -1,28 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.esql.query({
format: "txt",
query:
"\n FROM library\n | SORT page_count DESC\n | KEEP name, author\n | LOOKUP era ON author\n | LIMIT 5\n ",
tables: {
era: {
author: {
keyword: [
"Frank Herbert",
"Peter F. Hamilton",
"Vernor Vinge",
"Alastair Reynolds",
"James S.A. Corey",
],
},
era: {
keyword: ["The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"],
},
},
},
});
console.log(response);
----

View File

@ -1,16 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cluster.putSettings({
persistent: {
"cluster.indices.close.enable": false,
"indices.recovery.max_bytes_per_sec": "50mb",
},
transient: {
"*": null,
},
});
console.log(response);
----

View File

@ -0,0 +1,23 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "rerank",
inference_id: "my-elastic-rerank",
inference_config: {
service: "elasticsearch",
service_settings: {
model_id: ".rerank-v1",
num_threads: 1,
adaptive_allocations: {
enabled: true,
min_number_of_allocations: 1,
max_number_of_allocations: 4,
},
},
},
});
console.log(response);
----

View File

@ -8,11 +8,6 @@ const response = await client.search({
query: {
bool: {
must: [
{
term: {
"category.keyword": "Main Course",
},
},
{
term: {
tags: "vegetarian",
@ -27,6 +22,11 @@ const response = await client.search({
},
],
should: [
{
term: {
category: "Main Course",
},
},
{
multi_match: {
query: "curry spicy",

View File

@ -9,7 +9,6 @@ const response = await client.indices.create({
properties: {
inference_field: {
type: "semantic_text",
inference_id: "my-elser-endpoint",
},
},
},

View File

@ -45,7 +45,7 @@ console.log(response);
const response1 = await client.indices.putIndexTemplate({
name: 2,
index_patterns: ["k8s*"],
index_patterns: ["k9s*"],
composed_of: ["destination_template"],
data_stream: {},
});

View File

@ -0,0 +1,44 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
rrf: {
retrievers: [
{
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
{
text_similarity_reranker: {
retriever: {
standard: {
query: {
term: {
topic: "ai",
},
},
},
},
field: "text",
inference_id: "my-rerank-model",
inference_text:
"Can I use generative AI to identify user intent and improve search relevance?",
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
_source: false,
});
console.log(response);
----

View File

@ -0,0 +1,46 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
text_similarity_reranker: {
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
query_string: {
query:
"(information retrieval) OR (artificial intelligence)",
default_field: "text",
},
},
},
},
{
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
field: "text",
inference_id: "my-rerank-model",
inference_text:
"What are the state of the art applications of AI in information retrieval?",
},
},
_source: false,
});
console.log(response);
----

View File

@ -0,0 +1,23 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "rerank",
inference_id: "my-elastic-rerank",
inference_config: {
service: "elasticsearch",
service_settings: {
model_id: ".rerank-v1",
num_threads: 1,
adaptive_allocations: {
enabled: true,
min_number_of_allocations: 1,
max_number_of_allocations: 10,
},
},
},
});
console.log(response);
----

View File

@ -4,7 +4,7 @@
[source, js]
----
const response = await client.indices.create({
index: "my-index-000002",
index: "my-index-000003",
mappings: {
properties: {
inference_field: {

View File

@ -9,7 +9,6 @@ const response = await client.indices.create({
properties: {
content: {
type: "semantic_text",
inference_id: "my-elser-endpoint",
},
},
},

View File

@ -0,0 +1,13 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.connector.put({
connector_id: "my-{service-name-stub}-connector",
index_name: "my-elasticsearch-index",
name: "Content synced from {service-name}",
service_type: "{service-name-stub}",
});
console.log(response);
----

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000002",
mappings: {
properties: {
inference_field: {
type: "semantic_text",
inference_id: "my-openai-endpoint",
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,37 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
query_string: {
query: "(information retrieval) OR (artificial intelligence)",
default_field: "text",
},
},
},
},
{
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
_source: false,
});
console.log(response);
----

View File

@ -0,0 +1,27 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
retriever: {
text_similarity_reranker: {
retriever: {
standard: {
query: {
match: {
text: "How often does the moon hide the sun?",
},
},
},
},
field: "text",
inference_id: "my-elastic-rerank",
inference_text: "How often does the moon hide the sun?",
rank_window_size: 100,
min_score: 0.5,
},
},
});
console.log(response);
----

View File

@ -0,0 +1,37 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
text_similarity_reranker: {
retriever: {
text_similarity_reranker: {
retriever: {
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
rank_window_size: 100,
field: "text",
inference_id: "my-rerank-model",
inference_text:
"What are the state of the art applications of AI in information retrieval?",
},
},
rank_window_size: 10,
field: "text",
inference_id: "my-other-more-expensive-rerank-model",
inference_text:
"Applications of Large Language Models in technology and their impact on user satisfaction",
},
},
_source: false,
});
console.log(response);
----

View File

@ -12,7 +12,7 @@ const response = await client.inference.put({
adaptive_allocations: {
enabled: true,
min_number_of_allocations: 1,
max_number_of_allocations: 10,
max_number_of_allocations: 4,
},
num_threads: 1,
model_id: ".elser_model_2",

View File

@ -0,0 +1,14 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.connector.put({
connector_id: "my-{service-name-stub}-connector",
index_name: "my-elasticsearch-index",
name: "Content synced from {service-name}",
service_type: "{service-name-stub}",
is_native: true,
});
console.log(response);
----

View File

@ -0,0 +1,45 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
query_string: {
query: "(information retrieval) OR (artificial intelligence)",
default_field: "text",
},
},
},
},
{
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
highlight: {
fields: {
text: {
fragment_size: 150,
number_of_fragments: 3,
},
},
},
_source: false,
});
console.log(response);
----

View File

@ -0,0 +1,83 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "retrievers_example_nested",
mappings: {
properties: {
nested_field: {
type: "nested",
properties: {
paragraph_id: {
type: "keyword",
},
nested_vector: {
type: "dense_vector",
dims: 3,
similarity: "l2_norm",
index: true,
},
},
},
topic: {
type: "keyword",
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "retrievers_example_nested",
id: 1,
document: {
nested_field: [
{
paragraph_id: "1a",
nested_vector: [-1.12, -0.59, 0.78],
},
{
paragraph_id: "1b",
nested_vector: [-0.12, 1.56, 0.42],
},
{
paragraph_id: "1c",
nested_vector: [1, -1, 0],
},
],
topic: ["ai"],
},
});
console.log(response1);
const response2 = await client.index({
index: "retrievers_example_nested",
id: 2,
document: {
nested_field: [
{
paragraph_id: "2a",
nested_vector: [0.23, 1.24, 0.65],
},
],
topic: ["information_retrieval"],
},
});
console.log(response2);
const response3 = await client.index({
index: "retrievers_example_nested",
id: 3,
document: {
topic: ["ai"],
},
});
console.log(response3);
const response4 = await client.indices.refresh({
index: "retrievers_example_nested",
});
console.log(response4);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cluster.getSettings({
flat_settings: "true",
filter_path: "transient",
});
console.log(response);
----

View File

@ -0,0 +1,154 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.putIndexTemplate({
name: "datastream_template",
index_patterns: ["datastream*"],
data_stream: {},
template: {
lifecycle: {
downsampling: [
{
after: "1m",
fixed_interval: "1h",
},
],
},
settings: {
index: {
mode: "time_series",
},
},
mappings: {
properties: {
"@timestamp": {
type: "date",
},
kubernetes: {
properties: {
container: {
properties: {
cpu: {
properties: {
usage: {
properties: {
core: {
properties: {
ns: {
type: "long",
},
},
},
limit: {
properties: {
pct: {
type: "float",
},
},
},
nanocores: {
type: "long",
time_series_metric: "gauge",
},
node: {
properties: {
pct: {
type: "float",
},
},
},
},
},
},
},
memory: {
properties: {
available: {
properties: {
bytes: {
type: "long",
time_series_metric: "gauge",
},
},
},
majorpagefaults: {
type: "long",
},
pagefaults: {
type: "long",
time_series_metric: "gauge",
},
rss: {
properties: {
bytes: {
type: "long",
time_series_metric: "gauge",
},
},
},
usage: {
properties: {
bytes: {
type: "long",
time_series_metric: "gauge",
},
limit: {
properties: {
pct: {
type: "float",
},
},
},
node: {
properties: {
pct: {
type: "float",
},
},
},
},
},
workingset: {
properties: {
bytes: {
type: "long",
time_series_metric: "gauge",
},
},
},
},
},
name: {
type: "keyword",
},
start_time: {
type: "date",
},
},
},
host: {
type: "keyword",
time_series_dimension: true,
},
namespace: {
type: "keyword",
time_series_dimension: true,
},
node: {
type: "keyword",
time_series_dimension: true,
},
pod: {
type: "keyword",
time_series_dimension: true,
},
},
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,16 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "POST",
path: "/_query_rules/my-ruleset/_test",
body: {
match_criteria: {
query_string: "puggles",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,44 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
query_string: {
query: "(information retrieval) OR (artificial intelligence)",
default_field: "text",
},
},
},
},
{
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
collapse: {
field: "year",
inner_hits: {
name: "topic related documents",
_source: ["year"],
},
},
_source: false,
});
console.log(response);
----

View File

@ -0,0 +1,94 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "retrievers_example",
mappings: {
properties: {
vector: {
type: "dense_vector",
dims: 3,
similarity: "l2_norm",
index: true,
},
text: {
type: "text",
},
year: {
type: "integer",
},
topic: {
type: "keyword",
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "retrievers_example",
id: 1,
document: {
vector: [0.23, 0.67, 0.89],
text: "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences in data-rich environments.",
year: 2024,
topic: ["llm", "ai", "information_retrieval"],
},
});
console.log(response1);
const response2 = await client.index({
index: "retrievers_example",
id: 2,
document: {
vector: [0.12, 0.56, 0.78],
text: "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved health outcomes.",
year: 2023,
topic: ["ai", "medicine"],
},
});
console.log(response2);
const response3 = await client.index({
index: "retrievers_example",
id: 3,
document: {
vector: [0.45, 0.32, 0.91],
text: "AI is redefining security by enabling advanced threat detection, proactive risk analysis, and dynamic defenses against increasingly sophisticated cyber threats.",
year: 2024,
topic: ["ai", "security"],
},
});
console.log(response3);
const response4 = await client.index({
index: "retrievers_example",
id: 4,
document: {
vector: [0.34, 0.21, 0.98],
text: "Elastic introduces Elastic AI Assistant, the open, generative AI sidekick powered by ESRE to democratize cybersecurity and enable users of every skill level.",
year: 2023,
topic: ["ai", "elastic", "assistant"],
},
});
console.log(response4);
const response5 = await client.index({
index: "retrievers_example",
id: 5,
document: {
vector: [0.11, 0.65, 0.47],
text: "Learn how to spin up a deployment of our hosted Elasticsearch Service and use Elastic Observability to gain deeper insight into the behavior of your applications and systems.",
year: 2024,
topic: ["documentation", "observability", "elastic"],
},
});
console.log(response5);
const response6 = await client.indices.refresh({
index: "retrievers_example",
});
console.log(response6);
----