Auto-generated code for 8.16 (#2502)
This commit is contained in:
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
range: {
|
||||||
|
year: {
|
||||||
|
gt: 2023,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
term: {
|
||||||
|
topic: "elastic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
aggs: {
|
||||||
|
topics: {
|
||||||
|
terms: {
|
||||||
|
field: "topic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.put({
|
||||||
|
task_type: "rerank",
|
||||||
|
inference_id: "my-rerank-model",
|
||||||
|
inference_config: {
|
||||||
|
service: "cohere",
|
||||||
|
service_settings: {
|
||||||
|
model_id: "rerank-english-v3.0",
|
||||||
|
api_key: "{{COHERE_API_KEY}}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
49
docs/doc_examples/0bc6155e0c88062a4d8490da49db3aa8.asciidoc
Normal file
49
docs/doc_examples/0bc6155e0c88062a4d8490da49db3aa8.asciidoc
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example_nested",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
nested: {
|
||||||
|
path: "nested_field",
|
||||||
|
inner_hits: {
|
||||||
|
name: "nested_vector",
|
||||||
|
_source: false,
|
||||||
|
fields: ["nested_field.paragraph_id"],
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
knn: {
|
||||||
|
field: "nested_field.nested_vector",
|
||||||
|
query_vector: [1, 0, 0.5],
|
||||||
|
k: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
term: {
|
||||||
|
topic: "ai",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: ["topic"],
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,8 +3,12 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.esql.asyncQuery({
|
const response = await client.transport.request({
|
||||||
format: "json",
|
method: "POST",
|
||||||
|
path: "/_query/async",
|
||||||
|
querystring: {
|
||||||
|
format: "json",
|
||||||
|
},
|
||||||
body: {
|
body: {
|
||||||
query:
|
query:
|
||||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||||
|
|||||||
57
docs/doc_examples/0d689ac6e78be5d438f9b5d441be2b44.asciidoc
Normal file
57
docs/doc_examples/0d689ac6e78be5d438f9b5d441be2b44.asciidoc
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
term: {
|
||||||
|
topic: "elastic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query:
|
||||||
|
"(information retrieval) OR (artificial intelligence)",
|
||||||
|
default_field: "text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
size: 1,
|
||||||
|
explain: true,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,8 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.searchApplication.renderQuery({
|
const response = await client.transport.request({
|
||||||
name: "my-app",
|
method: "POST",
|
||||||
|
path: "/_application/search_application/my-app/_render_query",
|
||||||
body: {
|
body: {
|
||||||
params: {
|
params: {
|
||||||
query_string: "my first query",
|
query_string: "my first query",
|
||||||
|
|||||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
|||||||
script: {
|
script: {
|
||||||
lang: "mustache",
|
lang: "mustache",
|
||||||
source:
|
source:
|
||||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||||
params: {
|
params: {
|
||||||
query: "",
|
query: "",
|
||||||
_es_filters: {},
|
_es_filters: {},
|
||||||
|
|||||||
@ -16,7 +16,7 @@ const response = await client.search({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
field: "text",
|
field: "text",
|
||||||
inference_id: "my-cohere-rerank-model",
|
inference_id: "elastic-rerank",
|
||||||
inference_text: "How often does the moon hide the sun?",
|
inference_text: "How often does the moon hide the sun?",
|
||||||
rank_window_size: 100,
|
rank_window_size: 100,
|
||||||
min_score: 0.5,
|
min_score: 0.5,
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.simulate.ingest({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_ingest/_simulate",
|
||||||
body: {
|
body: {
|
||||||
docs: [
|
docs: [
|
||||||
{
|
{
|
||||||
|
|||||||
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.cluster.getSettings({
|
const response = await client.indices.rollover({
|
||||||
flat_settings: "true",
|
alias: "datastream",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -1,28 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.esql.query({
|
|
||||||
format: "txt",
|
|
||||||
query:
|
|
||||||
"\n FROM library\n | SORT page_count DESC\n | KEEP name, author\n | LOOKUP era ON author\n | LIMIT 5\n ",
|
|
||||||
tables: {
|
|
||||||
era: {
|
|
||||||
author: {
|
|
||||||
keyword: [
|
|
||||||
"Frank Herbert",
|
|
||||||
"Peter F. Hamilton",
|
|
||||||
"Vernor Vinge",
|
|
||||||
"Alastair Reynolds",
|
|
||||||
"James S.A. Corey",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
era: {
|
|
||||||
keyword: ["The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.oidcLogout({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/oidc/logout",
|
||||||
body: {
|
body: {
|
||||||
token:
|
token:
|
||||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||||
|
|||||||
@ -3,10 +3,12 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.esql.asyncQueryGet({
|
const response = await client.transport.request({
|
||||||
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
method: "GET",
|
||||||
wait_for_completion_timeout: "30s",
|
path: "/_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||||
body: null,
|
querystring: {
|
||||||
|
wait_for_completion_timeout: "30s",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
@ -1,16 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.cluster.putSettings({
|
|
||||||
persistent: {
|
|
||||||
"cluster.indices.close.enable": false,
|
|
||||||
"indices.recovery.max_bytes_per_sec": "50mb",
|
|
||||||
},
|
|
||||||
transient: {
|
|
||||||
"*": null,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
23
docs/doc_examples/30d051f534aeb884176eedb2c11dac85.asciidoc
Normal file
23
docs/doc_examples/30d051f534aeb884176eedb2c11dac85.asciidoc
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.put({
|
||||||
|
task_type: "rerank",
|
||||||
|
inference_id: "my-elastic-rerank",
|
||||||
|
inference_config: {
|
||||||
|
service: "elasticsearch",
|
||||||
|
service_settings: {
|
||||||
|
model_id: ".rerank-v1",
|
||||||
|
num_threads: 1,
|
||||||
|
adaptive_allocations: {
|
||||||
|
enabled: true,
|
||||||
|
min_number_of_allocations: 1,
|
||||||
|
max_number_of_allocations: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.esql.asyncQuery({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_query/async",
|
||||||
body: {
|
body: {
|
||||||
query:
|
query:
|
||||||
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",
|
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",
|
||||||
|
|||||||
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.esql.asyncQueryGet({
|
const response = await client.transport.request({
|
||||||
id: "FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
|
method: "GET",
|
||||||
body: null,
|
path: "/_query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.inference.streamInference({
|
const response = await client.transport.request({
|
||||||
task_type: "completion",
|
method: "POST",
|
||||||
inference_id: "openai-completion",
|
path: "/_inference/completion/openai-completion/_stream",
|
||||||
body: {
|
body: {
|
||||||
input: "What is Elastic?",
|
input: "What is Elastic?",
|
||||||
},
|
},
|
||||||
|
|||||||
@ -8,11 +8,6 @@ const response = await client.search({
|
|||||||
query: {
|
query: {
|
||||||
bool: {
|
bool: {
|
||||||
must: [
|
must: [
|
||||||
{
|
|
||||||
term: {
|
|
||||||
"category.keyword": "Main Course",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
term: {
|
term: {
|
||||||
tags: "vegetarian",
|
tags: "vegetarian",
|
||||||
@ -27,6 +22,11 @@ const response = await client.search({
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
should: [
|
should: [
|
||||||
|
{
|
||||||
|
term: {
|
||||||
|
category: "Main Course",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
multi_match: {
|
multi_match: {
|
||||||
query: "curry spicy",
|
query: "curry spicy",
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.oidcPrepareAuthentication({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/oidc/prepare",
|
||||||
body: {
|
body: {
|
||||||
realm: "oidc1",
|
realm: "oidc1",
|
||||||
state: "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO",
|
state: "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO",
|
||||||
|
|||||||
@ -3,8 +3,12 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.esql.asyncQuery({
|
const response = await client.transport.request({
|
||||||
format: "json",
|
method: "POST",
|
||||||
|
path: "/_query/async",
|
||||||
|
querystring: {
|
||||||
|
format: "json",
|
||||||
|
},
|
||||||
body: {
|
body: {
|
||||||
query:
|
query:
|
||||||
"\n FROM cluster_one:my-index*,cluster_two:logs*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
"\n FROM cluster_one:my-index*,cluster_two:logs*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||||
|
|||||||
@ -9,7 +9,6 @@ const response = await client.indices.create({
|
|||||||
properties: {
|
properties: {
|
||||||
inference_field: {
|
inference_field: {
|
||||||
type: "semantic_text",
|
type: "semantic_text",
|
||||||
inference_id: "my-elser-endpoint",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -45,7 +45,7 @@ console.log(response);
|
|||||||
|
|
||||||
const response1 = await client.indices.putIndexTemplate({
|
const response1 = await client.indices.putIndexTemplate({
|
||||||
name: 2,
|
name: 2,
|
||||||
index_patterns: ["k8s*"],
|
index_patterns: ["k9s*"],
|
||||||
composed_of: ["destination_template"],
|
composed_of: ["destination_template"],
|
||||||
data_stream: {},
|
data_stream: {},
|
||||||
});
|
});
|
||||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
|||||||
script: {
|
script: {
|
||||||
lang: "mustache",
|
lang: "mustache",
|
||||||
source:
|
source:
|
||||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||||
params: {
|
params: {
|
||||||
query: "",
|
query: "",
|
||||||
_es_filters: {},
|
_es_filters: {},
|
||||||
|
|||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.bulkUpdateApiKeys({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/api_key/_bulk_update",
|
||||||
body: {
|
body: {
|
||||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||||
},
|
},
|
||||||
|
|||||||
44
docs/doc_examples/76e02434835630cb830724beb92df354.asciidoc
Normal file
44
docs/doc_examples/76e02434835630cb830724beb92df354.asciidoc
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text_similarity_reranker: {
|
||||||
|
retriever: {
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
term: {
|
||||||
|
topic: "ai",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
field: "text",
|
||||||
|
inference_id: "my-rerank-model",
|
||||||
|
inference_text:
|
||||||
|
"Can I use generative AI to identify user intent and improve search relevance?",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.textStructure.findMessageStructure({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_text_structure/find_message_structure",
|
||||||
body: {
|
body: {
|
||||||
messages: [
|
messages: [
|
||||||
"[2024-03-05T10:52:36,256][INFO ][o.a.l.u.VectorUtilPanamaProvider] [laptop] Java vector incubator API enabled; uses preferredBitSize=128",
|
"[2024-03-05T10:52:36,256][INFO ][o.a.l.u.VectorUtilPanamaProvider] [laptop] Java vector incubator API enabled; uses preferredBitSize=128",
|
||||||
|
|||||||
46
docs/doc_examples/78043831fd32004a82930c8ac8a1d809.asciidoc
Normal file
46
docs/doc_examples/78043831fd32004a82930c8ac8a1d809.asciidoc
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
text_similarity_reranker: {
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query:
|
||||||
|
"(information retrieval) OR (artificial intelligence)",
|
||||||
|
default_field: "text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
field: "text",
|
||||||
|
inference_id: "my-rerank-model",
|
||||||
|
inference_text:
|
||||||
|
"What are the state of the art applications of AI in information retrieval?",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
23
docs/doc_examples/79d206a528be704050a437adce2496dd.asciidoc
Normal file
23
docs/doc_examples/79d206a528be704050a437adce2496dd.asciidoc
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.put({
|
||||||
|
task_type: "rerank",
|
||||||
|
inference_id: "my-elastic-rerank",
|
||||||
|
inference_config: {
|
||||||
|
service: "elasticsearch",
|
||||||
|
service_settings: {
|
||||||
|
model_id: ".rerank-v1",
|
||||||
|
num_threads: 1,
|
||||||
|
adaptive_allocations: {
|
||||||
|
enabled: true,
|
||||||
|
min_number_of_allocations: 1,
|
||||||
|
max_number_of_allocations: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -4,7 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "my-index-000002",
|
index: "my-index-000003",
|
||||||
mappings: {
|
mappings: {
|
||||||
properties: {
|
properties: {
|
||||||
inference_field: {
|
inference_field: {
|
||||||
11
docs/doc_examples/7ba29f0be2297b54a640b0a17d7ef5ca.asciidoc
Normal file
11
docs/doc_examples/7ba29f0be2297b54a640b0a17d7ef5ca.asciidoc
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.transport.request({
|
||||||
|
method: "DELETE",
|
||||||
|
path: "/_ingest/ip_location/database/my-database-id",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.bulkUpdateApiKeys({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/api_key/_bulk_update",
|
||||||
body: {
|
body: {
|
||||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||||
role_descriptors: {
|
role_descriptors: {
|
||||||
|
|||||||
@ -9,7 +9,6 @@ const response = await client.indices.create({
|
|||||||
properties: {
|
properties: {
|
||||||
content: {
|
content: {
|
||||||
type: "semantic_text",
|
type: "semantic_text",
|
||||||
inference_id: "my-elser-endpoint",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
17
docs/doc_examples/91e106a2affbc8df32cd940684a779ed.asciidoc
Normal file
17
docs/doc_examples/91e106a2affbc8df32cd940684a779ed.asciidoc
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.transport.request({
|
||||||
|
method: "PUT",
|
||||||
|
path: "/_ingest/ip_location/database/my-database-1",
|
||||||
|
body: {
|
||||||
|
name: "GeoIP2-Domain",
|
||||||
|
maxmind: {
|
||||||
|
account_id: "1234567",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
18
docs/doc_examples/96e88611f99e6834bd64b58dc8a282c1.asciidoc
Normal file
18
docs/doc_examples/96e88611f99e6834bd64b58dc8a282c1.asciidoc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "my-index-000002",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
inference_field: {
|
||||||
|
type: "semantic_text",
|
||||||
|
inference_id: "my-openai-endpoint",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
37
docs/doc_examples/97c6c07f46f4177f0565a04bc50924a3.asciidoc
Normal file
37
docs/doc_examples/97c6c07f46f4177f0565a04bc50924a3.asciidoc
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query: "(information retrieval) OR (artificial intelligence)",
|
||||||
|
default_field: "text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
11
docs/doc_examples/99fb82d49ac477e6a9dfdd71f9465374.asciidoc
Normal file
11
docs/doc_examples/99fb82d49ac477e6a9dfdd71f9465374.asciidoc
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.transport.request({
|
||||||
|
method: "DELETE",
|
||||||
|
path: "/_ingest/ip_location/database/example-database-id",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.searchApplication.postBehavioralAnalyticsEvent({
|
const response = await client.transport.request({
|
||||||
collection_name: "my_analytics_collection",
|
method: "POST",
|
||||||
event_type: "search_click",
|
path: "/_application/analytics/my_analytics_collection/event/search_click",
|
||||||
body: {
|
body: {
|
||||||
session: {
|
session: {
|
||||||
id: "1797ca95-91c9-4e2e-b1bd-9c38e6f386a9",
|
id: "1797ca95-91c9-4e2e-b1bd-9c38e6f386a9",
|
||||||
|
|||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.oidcAuthenticate({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/oidc/authenticate",
|
||||||
body: {
|
body: {
|
||||||
redirect_uri:
|
redirect_uri:
|
||||||
"https://oidc-kibana.elastic.co:5603/api/security/oidc/callback?code=jtI3Ntt8v3_XvcLzCFGq&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I",
|
"https://oidc-kibana.elastic.co:5603/api/security/oidc/callback?code=jtI3Ntt8v3_XvcLzCFGq&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I",
|
||||||
|
|||||||
@ -3,8 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.searchApplication.renderQuery({
|
const response = await client.transport.request({
|
||||||
name: "my_search_application",
|
method: "POST",
|
||||||
|
path: "/_application/search_application/my_search_application/_render_query",
|
||||||
body: {
|
body: {
|
||||||
params: {
|
params: {
|
||||||
query_string: "rock climbing",
|
query_string: "rock climbing",
|
||||||
|
|||||||
27
docs/doc_examples/a9f14efc26fdd3c37a71f06c310163d9.asciidoc
Normal file
27
docs/doc_examples/a9f14efc26fdd3c37a71f06c310163d9.asciidoc
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
retriever: {
|
||||||
|
text_similarity_reranker: {
|
||||||
|
retriever: {
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
match: {
|
||||||
|
text: "How often does the moon hide the sun?",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
field: "text",
|
||||||
|
inference_id: "my-elastic-rerank",
|
||||||
|
inference_text: "How often does the moon hide the sun?",
|
||||||
|
rank_window_size: 100,
|
||||||
|
min_score: 0.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
37
docs/doc_examples/ac22cc2b0f4ad659055feed2852a2d59.asciidoc
Normal file
37
docs/doc_examples/ac22cc2b0f4ad659055feed2852a2d59.asciidoc
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
text_similarity_reranker: {
|
||||||
|
retriever: {
|
||||||
|
text_similarity_reranker: {
|
||||||
|
retriever: {
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
rank_window_size: 100,
|
||||||
|
field: "text",
|
||||||
|
inference_id: "my-rerank-model",
|
||||||
|
inference_text:
|
||||||
|
"What are the state of the art applications of AI in information retrieval?",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
rank_window_size: 10,
|
||||||
|
field: "text",
|
||||||
|
inference_id: "my-other-more-expensive-rerank-model",
|
||||||
|
inference_text:
|
||||||
|
"Applications of Large Language Models in technology and their impact on user satisfaction",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -12,7 +12,7 @@ const response = await client.inference.put({
|
|||||||
adaptive_allocations: {
|
adaptive_allocations: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
min_number_of_allocations: 1,
|
min_number_of_allocations: 1,
|
||||||
max_number_of_allocations: 10,
|
max_number_of_allocations: 4,
|
||||||
},
|
},
|
||||||
num_threads: 1,
|
num_threads: 1,
|
||||||
model_id: ".elser_model_2",
|
model_id: ".elser_model_2",
|
||||||
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.searchApplication.renderQuery({
|
const response = await client.transport.request({
|
||||||
name: "my_search_application",
|
method: "POST",
|
||||||
body: null,
|
path: "/_application/search_application/my_search_application/_render_query",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
@ -208,10 +208,13 @@ const response = await client.bulk({
|
|||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
|
|
||||||
const response1 = await client.textStructure.findFieldStructure({
|
const response1 = await client.transport.request({
|
||||||
index: "test-logs",
|
method: "GET",
|
||||||
field: "message",
|
path: "/_text_structure/find_field_structure",
|
||||||
body: null,
|
querystring: {
|
||||||
|
index: "test-logs",
|
||||||
|
field: "message",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
console.log(response1);
|
console.log(response1);
|
||||||
----
|
----
|
||||||
|
|||||||
45
docs/doc_examples/bb2ba5d1885f87506f90dbb002e518f4.asciidoc
Normal file
45
docs/doc_examples/bb2ba5d1885f87506f90dbb002e518f4.asciidoc
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query: "(information retrieval) OR (artificial intelligence)",
|
||||||
|
default_field: "text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
highlight: {
|
||||||
|
fields: {
|
||||||
|
text: {
|
||||||
|
fragment_size: 150,
|
||||||
|
number_of_fragments: 3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.simulate.ingest({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_ingest/_simulate",
|
||||||
body: {
|
body: {
|
||||||
docs: [
|
docs: [
|
||||||
{
|
{
|
||||||
|
|||||||
83
docs/doc_examples/bee3fda7bb07086243424b62e5b16ca7.asciidoc
Normal file
83
docs/doc_examples/bee3fda7bb07086243424b62e5b16ca7.asciidoc
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "retrievers_example_nested",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
nested_field: {
|
||||||
|
type: "nested",
|
||||||
|
properties: {
|
||||||
|
paragraph_id: {
|
||||||
|
type: "keyword",
|
||||||
|
},
|
||||||
|
nested_vector: {
|
||||||
|
type: "dense_vector",
|
||||||
|
dims: 3,
|
||||||
|
similarity: "l2_norm",
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
topic: {
|
||||||
|
type: "keyword",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.index({
|
||||||
|
index: "retrievers_example_nested",
|
||||||
|
id: 1,
|
||||||
|
document: {
|
||||||
|
nested_field: [
|
||||||
|
{
|
||||||
|
paragraph_id: "1a",
|
||||||
|
nested_vector: [-1.12, -0.59, 0.78],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
paragraph_id: "1b",
|
||||||
|
nested_vector: [-0.12, 1.56, 0.42],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
paragraph_id: "1c",
|
||||||
|
nested_vector: [1, -1, 0],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
topic: ["ai"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
|
||||||
|
const response2 = await client.index({
|
||||||
|
index: "retrievers_example_nested",
|
||||||
|
id: 2,
|
||||||
|
document: {
|
||||||
|
nested_field: [
|
||||||
|
{
|
||||||
|
paragraph_id: "2a",
|
||||||
|
nested_vector: [0.23, 1.24, 0.65],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
topic: ["information_retrieval"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response2);
|
||||||
|
|
||||||
|
const response3 = await client.index({
|
||||||
|
index: "retrievers_example_nested",
|
||||||
|
id: 3,
|
||||||
|
document: {
|
||||||
|
topic: ["ai"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response3);
|
||||||
|
|
||||||
|
const response4 = await client.indices.refresh({
|
||||||
|
index: "retrievers_example_nested",
|
||||||
|
});
|
||||||
|
console.log(response4);
|
||||||
|
----
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.bulkUpdateApiKeys({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/api_key/_bulk_update",
|
||||||
body: {
|
body: {
|
||||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||||
role_descriptors: {},
|
role_descriptors: {},
|
||||||
|
|||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.simulate.ingest({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_ingest/_simulate",
|
||||||
body: {
|
body: {
|
||||||
docs: [
|
docs: [
|
||||||
{
|
{
|
||||||
|
|||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.oidcPrepareAuthentication({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/oidc/prepare",
|
||||||
body: {
|
body: {
|
||||||
iss: "http://127.0.0.1:8080",
|
iss: "http://127.0.0.1:8080",
|
||||||
login_hint: "this_is_an_opaque_string",
|
login_hint: "this_is_an_opaque_string",
|
||||||
|
|||||||
154
docs/doc_examples/d4158d486e7fee2702a14068b69e3b33.asciidoc
Normal file
154
docs/doc_examples/d4158d486e7fee2702a14068b69e3b33.asciidoc
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.putIndexTemplate({
|
||||||
|
name: "datastream_template",
|
||||||
|
index_patterns: ["datastream*"],
|
||||||
|
data_stream: {},
|
||||||
|
template: {
|
||||||
|
lifecycle: {
|
||||||
|
downsampling: [
|
||||||
|
{
|
||||||
|
after: "1m",
|
||||||
|
fixed_interval: "1h",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
settings: {
|
||||||
|
index: {
|
||||||
|
mode: "time_series",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
"@timestamp": {
|
||||||
|
type: "date",
|
||||||
|
},
|
||||||
|
kubernetes: {
|
||||||
|
properties: {
|
||||||
|
container: {
|
||||||
|
properties: {
|
||||||
|
cpu: {
|
||||||
|
properties: {
|
||||||
|
usage: {
|
||||||
|
properties: {
|
||||||
|
core: {
|
||||||
|
properties: {
|
||||||
|
ns: {
|
||||||
|
type: "long",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
limit: {
|
||||||
|
properties: {
|
||||||
|
pct: {
|
||||||
|
type: "float",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
nanocores: {
|
||||||
|
type: "long",
|
||||||
|
time_series_metric: "gauge",
|
||||||
|
},
|
||||||
|
node: {
|
||||||
|
properties: {
|
||||||
|
pct: {
|
||||||
|
type: "float",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
memory: {
|
||||||
|
properties: {
|
||||||
|
available: {
|
||||||
|
properties: {
|
||||||
|
bytes: {
|
||||||
|
type: "long",
|
||||||
|
time_series_metric: "gauge",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
majorpagefaults: {
|
||||||
|
type: "long",
|
||||||
|
},
|
||||||
|
pagefaults: {
|
||||||
|
type: "long",
|
||||||
|
time_series_metric: "gauge",
|
||||||
|
},
|
||||||
|
rss: {
|
||||||
|
properties: {
|
||||||
|
bytes: {
|
||||||
|
type: "long",
|
||||||
|
time_series_metric: "gauge",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
usage: {
|
||||||
|
properties: {
|
||||||
|
bytes: {
|
||||||
|
type: "long",
|
||||||
|
time_series_metric: "gauge",
|
||||||
|
},
|
||||||
|
limit: {
|
||||||
|
properties: {
|
||||||
|
pct: {
|
||||||
|
type: "float",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
node: {
|
||||||
|
properties: {
|
||||||
|
pct: {
|
||||||
|
type: "float",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
workingset: {
|
||||||
|
properties: {
|
||||||
|
bytes: {
|
||||||
|
type: "long",
|
||||||
|
time_series_metric: "gauge",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
type: "keyword",
|
||||||
|
},
|
||||||
|
start_time: {
|
||||||
|
type: "date",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
host: {
|
||||||
|
type: "keyword",
|
||||||
|
time_series_dimension: true,
|
||||||
|
},
|
||||||
|
namespace: {
|
||||||
|
type: "keyword",
|
||||||
|
time_series_dimension: true,
|
||||||
|
},
|
||||||
|
node: {
|
||||||
|
type: "keyword",
|
||||||
|
time_series_dimension: true,
|
||||||
|
},
|
||||||
|
pod: {
|
||||||
|
type: "keyword",
|
||||||
|
time_series_dimension: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
15
docs/doc_examples/d4df39f72d3a3b80cd4042f6a21c3f19.asciidoc
Normal file
15
docs/doc_examples/d4df39f72d3a3b80cd4042f6a21c3f19.asciidoc
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.transport.request({
|
||||||
|
method: "PUT",
|
||||||
|
path: "/_ingest/ip_location/database/my-database-2",
|
||||||
|
body: {
|
||||||
|
name: "standard_location",
|
||||||
|
ipinfo: {},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.cluster.getSettings({
|
const response = await client.transport.request({
|
||||||
flat_settings: "true",
|
method: "GET",
|
||||||
filter_path: "transient",
|
path: "/_ingest/ip_location/database/my-database-id",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -3,6 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.getSettings();
|
const response = await client.transport.request({
|
||||||
|
method: "GET",
|
||||||
|
path: "/_security/settings",
|
||||||
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.oidcPrepareAuthentication({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_security/oidc/prepare",
|
||||||
body: {
|
body: {
|
||||||
realm: "oidc1",
|
realm: "oidc1",
|
||||||
},
|
},
|
||||||
|
|||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.simulate.ingest({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_ingest/_simulate",
|
||||||
body: {
|
body: {
|
||||||
docs: [
|
docs: [
|
||||||
{
|
{
|
||||||
|
|||||||
44
docs/doc_examples/e6f6d3aeea7ecea47cfd5c3d727f7004.asciidoc
Normal file
44
docs/doc_examples/e6f6d3aeea7ecea47cfd5c3d727f7004.asciidoc
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query: "(information retrieval) OR (artificial intelligence)",
|
||||||
|
default_field: "text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
collapse: {
|
||||||
|
field: "year",
|
||||||
|
inner_hits: {
|
||||||
|
name: "topic related documents",
|
||||||
|
_source: ["year"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
94
docs/doc_examples/ee05714a83d75fb6858e3b9fcbeb8f8b.asciidoc
Normal file
94
docs/doc_examples/ee05714a83d75fb6858e3b9fcbeb8f8b.asciidoc
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "retrievers_example",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
vector: {
|
||||||
|
type: "dense_vector",
|
||||||
|
dims: 3,
|
||||||
|
similarity: "l2_norm",
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
text: {
|
||||||
|
type: "text",
|
||||||
|
},
|
||||||
|
year: {
|
||||||
|
type: "integer",
|
||||||
|
},
|
||||||
|
topic: {
|
||||||
|
type: "keyword",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.index({
|
||||||
|
index: "retrievers_example",
|
||||||
|
id: 1,
|
||||||
|
document: {
|
||||||
|
vector: [0.23, 0.67, 0.89],
|
||||||
|
text: "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences in data-rich environments.",
|
||||||
|
year: 2024,
|
||||||
|
topic: ["llm", "ai", "information_retrieval"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
|
||||||
|
const response2 = await client.index({
|
||||||
|
index: "retrievers_example",
|
||||||
|
id: 2,
|
||||||
|
document: {
|
||||||
|
vector: [0.12, 0.56, 0.78],
|
||||||
|
text: "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved health outcomes.",
|
||||||
|
year: 2023,
|
||||||
|
topic: ["ai", "medicine"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response2);
|
||||||
|
|
||||||
|
const response3 = await client.index({
|
||||||
|
index: "retrievers_example",
|
||||||
|
id: 3,
|
||||||
|
document: {
|
||||||
|
vector: [0.45, 0.32, 0.91],
|
||||||
|
text: "AI is redefining security by enabling advanced threat detection, proactive risk analysis, and dynamic defenses against increasingly sophisticated cyber threats.",
|
||||||
|
year: 2024,
|
||||||
|
topic: ["ai", "security"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response3);
|
||||||
|
|
||||||
|
const response4 = await client.index({
|
||||||
|
index: "retrievers_example",
|
||||||
|
id: 4,
|
||||||
|
document: {
|
||||||
|
vector: [0.34, 0.21, 0.98],
|
||||||
|
text: "Elastic introduces Elastic AI Assistant, the open, generative AI sidekick powered by ESRE to democratize cybersecurity and enable users of every skill level.",
|
||||||
|
year: 2023,
|
||||||
|
topic: ["ai", "elastic", "assistant"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response4);
|
||||||
|
|
||||||
|
const response5 = await client.index({
|
||||||
|
index: "retrievers_example",
|
||||||
|
id: 5,
|
||||||
|
document: {
|
||||||
|
vector: [0.11, 0.65, 0.47],
|
||||||
|
text: "Learn how to spin up a deployment of our hosted Elasticsearch Service and use Elastic Observability to gain deeper insight into the behavior of your applications and systems.",
|
||||||
|
year: 2024,
|
||||||
|
topic: ["documentation", "observability", "elastic"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response5);
|
||||||
|
|
||||||
|
const response6 = await client.indices.refresh({
|
||||||
|
index: "retrievers_example",
|
||||||
|
});
|
||||||
|
console.log(response6);
|
||||||
|
----
|
||||||
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.connector.secretPost({
|
const response = await client.transport.request({
|
||||||
|
method: "POST",
|
||||||
|
path: "/_connector/_secret",
|
||||||
body: {
|
body: {
|
||||||
value: "encoded_api_key",
|
value: "encoded_api_key",
|
||||||
},
|
},
|
||||||
|
|||||||
@ -818,6 +818,8 @@ Random by default.
|
|||||||
** *`expand_wildcards` (Optional, Enum("all" | "open" | "closed" | "hidden" | "none") | Enum("all" | "open" | "closed" | "hidden" | "none")[])*: Type of index that wildcard patterns can match.
|
** *`expand_wildcards` (Optional, Enum("all" | "open" | "closed" | "hidden" | "none") | Enum("all" | "open" | "closed" | "hidden" | "none")[])*: Type of index that wildcard patterns can match.
|
||||||
If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams.
|
If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams.
|
||||||
Supports a list of values, such as `open,hidden`. Valid values are: `all`, `open`, `closed`, `hidden`, `none`.
|
Supports a list of values, such as `open,hidden`. Valid values are: `all`, `open`, `closed`, `hidden`, `none`.
|
||||||
|
** *`allow_partial_search_results` (Optional, boolean)*: If `false`, creating a point in time request when a shard is missing or unavailable will throw an exception.
|
||||||
|
If `true`, the point in time will contain all the shards that are available at the time of the request.
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
=== ping
|
=== ping
|
||||||
@ -1560,6 +1562,8 @@ client.asyncSearch.status({ id })
|
|||||||
|
|
||||||
* *Request (object):*
|
* *Request (object):*
|
||||||
** *`id` (string)*: A unique identifier for the async search.
|
** *`id` (string)*: A unique identifier for the async search.
|
||||||
|
** *`keep_alive` (Optional, string | -1 | 0)*: Specifies how long the async search needs to be available.
|
||||||
|
Ongoing async searches and any saved search results are deleted after this period.
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
==== submit
|
==== submit
|
||||||
@ -1662,7 +1666,6 @@ A partial reduction is performed every time the coordinating node has received a
|
|||||||
** *`pre_filter_shard_size` (Optional, number)*: The default value cannot be changed, which enforces the execution of a pre-filter roundtrip to retrieve statistics from each shard so that the ones that surely don’t hold any document matching the query get skipped.
|
** *`pre_filter_shard_size` (Optional, number)*: The default value cannot be changed, which enforces the execution of a pre-filter roundtrip to retrieve statistics from each shard so that the ones that surely don’t hold any document matching the query get skipped.
|
||||||
** *`request_cache` (Optional, boolean)*: Specify if request cache should be used for this request or not, defaults to true
|
** *`request_cache` (Optional, boolean)*: Specify if request cache should be used for this request or not, defaults to true
|
||||||
** *`routing` (Optional, string)*: A list of specific routing values
|
** *`routing` (Optional, string)*: A list of specific routing values
|
||||||
** *`scroll` (Optional, string | -1 | 0)*
|
|
||||||
** *`search_type` (Optional, Enum("query_then_fetch" | "dfs_query_then_fetch"))*: Search operation type
|
** *`search_type` (Optional, Enum("query_then_fetch" | "dfs_query_then_fetch"))*: Search operation type
|
||||||
** *`suggest_field` (Optional, string)*: Specifies which field to use for suggestions.
|
** *`suggest_field` (Optional, string)*: Specifies which field to use for suggestions.
|
||||||
** *`suggest_mode` (Optional, Enum("missing" | "popular" | "always"))*: Specify suggest mode
|
** *`suggest_mode` (Optional, Enum("missing" | "popular" | "always"))*: Specify suggest mode
|
||||||
@ -1693,6 +1696,9 @@ client.autoscaling.deleteAutoscalingPolicy({ name })
|
|||||||
|
|
||||||
* *Request (object):*
|
* *Request (object):*
|
||||||
** *`name` (string)*: the name of the autoscaling policy
|
** *`name` (string)*: the name of the autoscaling policy
|
||||||
|
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node.
|
||||||
|
If no response is received before the timeout expires, the request fails and returns an error.
|
||||||
|
** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error.
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
==== get_autoscaling_capacity
|
==== get_autoscaling_capacity
|
||||||
@ -1714,9 +1720,15 @@ Do not use this information to make autoscaling decisions.
|
|||||||
{ref}/autoscaling-get-autoscaling-capacity.html[Endpoint documentation]
|
{ref}/autoscaling-get-autoscaling-capacity.html[Endpoint documentation]
|
||||||
[source,ts]
|
[source,ts]
|
||||||
----
|
----
|
||||||
client.autoscaling.getAutoscalingCapacity()
|
client.autoscaling.getAutoscalingCapacity({ ... })
|
||||||
----
|
----
|
||||||
|
|
||||||
|
[discrete]
|
||||||
|
==== Arguments
|
||||||
|
|
||||||
|
* *Request (object):*
|
||||||
|
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node.
|
||||||
|
If no response is received before the timeout expires, the request fails and returns an error.
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
==== get_autoscaling_policy
|
==== get_autoscaling_policy
|
||||||
@ -1735,6 +1747,8 @@ client.autoscaling.getAutoscalingPolicy({ name })
|
|||||||
|
|
||||||
* *Request (object):*
|
* *Request (object):*
|
||||||
** *`name` (string)*: the name of the autoscaling policy
|
** *`name` (string)*: the name of the autoscaling policy
|
||||||
|
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node.
|
||||||
|
If no response is received before the timeout expires, the request fails and returns an error.
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
==== put_autoscaling_policy
|
==== put_autoscaling_policy
|
||||||
@ -1754,6 +1768,9 @@ client.autoscaling.putAutoscalingPolicy({ name })
|
|||||||
* *Request (object):*
|
* *Request (object):*
|
||||||
** *`name` (string)*: the name of the autoscaling policy
|
** *`name` (string)*: the name of the autoscaling policy
|
||||||
** *`policy` (Optional, { roles, deciders })*
|
** *`policy` (Optional, { roles, deciders })*
|
||||||
|
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node.
|
||||||
|
If no response is received before the timeout expires, the request fails and returns an error.
|
||||||
|
** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error.
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
=== cat
|
=== cat
|
||||||
|
|||||||
@ -884,6 +884,7 @@ export interface OpenPointInTimeRequest extends RequestBase {
|
|||||||
preference?: string
|
preference?: string
|
||||||
routing?: Routing
|
routing?: Routing
|
||||||
expand_wildcards?: ExpandWildcards
|
expand_wildcards?: ExpandWildcards
|
||||||
|
allow_partial_search_results?: boolean
|
||||||
index_filter?: QueryDslQueryContainer
|
index_filter?: QueryDslQueryContainer
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6650,6 +6651,7 @@ export type AsyncSearchGetResponse<TDocument = unknown, TAggregations = Record<A
|
|||||||
|
|
||||||
export interface AsyncSearchStatusRequest extends RequestBase {
|
export interface AsyncSearchStatusRequest extends RequestBase {
|
||||||
id: Id
|
id: Id
|
||||||
|
keep_alive?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AsyncSearchStatusResponse = AsyncSearchStatusStatusResponseBase
|
export type AsyncSearchStatusResponse = AsyncSearchStatusStatusResponseBase
|
||||||
@ -6683,7 +6685,6 @@ export interface AsyncSearchSubmitRequest extends RequestBase {
|
|||||||
pre_filter_shard_size?: long
|
pre_filter_shard_size?: long
|
||||||
request_cache?: boolean
|
request_cache?: boolean
|
||||||
routing?: Routing
|
routing?: Routing
|
||||||
scroll?: Duration
|
|
||||||
search_type?: SearchType
|
search_type?: SearchType
|
||||||
suggest_field?: Field
|
suggest_field?: Field
|
||||||
suggest_mode?: SuggestMode
|
suggest_mode?: SuggestMode
|
||||||
@ -6739,6 +6740,8 @@ export interface AutoscalingAutoscalingPolicy {
|
|||||||
|
|
||||||
export interface AutoscalingDeleteAutoscalingPolicyRequest extends RequestBase {
|
export interface AutoscalingDeleteAutoscalingPolicyRequest extends RequestBase {
|
||||||
name: Name
|
name: Name
|
||||||
|
master_timeout?: Duration
|
||||||
|
timeout?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AutoscalingDeleteAutoscalingPolicyResponse = AcknowledgedResponseBase
|
export type AutoscalingDeleteAutoscalingPolicyResponse = AcknowledgedResponseBase
|
||||||
@ -6771,6 +6774,7 @@ export interface AutoscalingGetAutoscalingCapacityAutoscalingResources {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface AutoscalingGetAutoscalingCapacityRequest extends RequestBase {
|
export interface AutoscalingGetAutoscalingCapacityRequest extends RequestBase {
|
||||||
|
master_timeout?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AutoscalingGetAutoscalingCapacityResponse {
|
export interface AutoscalingGetAutoscalingCapacityResponse {
|
||||||
@ -6779,12 +6783,15 @@ export interface AutoscalingGetAutoscalingCapacityResponse {
|
|||||||
|
|
||||||
export interface AutoscalingGetAutoscalingPolicyRequest extends RequestBase {
|
export interface AutoscalingGetAutoscalingPolicyRequest extends RequestBase {
|
||||||
name: Name
|
name: Name
|
||||||
|
master_timeout?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AutoscalingGetAutoscalingPolicyResponse = AutoscalingAutoscalingPolicy
|
export type AutoscalingGetAutoscalingPolicyResponse = AutoscalingAutoscalingPolicy
|
||||||
|
|
||||||
export interface AutoscalingPutAutoscalingPolicyRequest extends RequestBase {
|
export interface AutoscalingPutAutoscalingPolicyRequest extends RequestBase {
|
||||||
name: Name
|
name: Name
|
||||||
|
master_timeout?: Duration
|
||||||
|
timeout?: Duration
|
||||||
policy?: AutoscalingAutoscalingPolicy
|
policy?: AutoscalingAutoscalingPolicy
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -16848,6 +16855,7 @@ export interface QueryRulesListRulesetsQueryRulesetListItem {
|
|||||||
ruleset_id: Id
|
ruleset_id: Id
|
||||||
rule_total_count: integer
|
rule_total_count: integer
|
||||||
rule_criteria_types_counts: Record<string, integer>
|
rule_criteria_types_counts: Record<string, integer>
|
||||||
|
rule_type_counts: Record<string, integer>
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueryRulesListRulesetsRequest extends RequestBase {
|
export interface QueryRulesListRulesetsRequest extends RequestBase {
|
||||||
@ -17249,21 +17257,26 @@ export interface SecurityAccess {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface SecurityApiKey {
|
export interface SecurityApiKey {
|
||||||
creation?: long
|
|
||||||
expiration?: long
|
|
||||||
id: Id
|
id: Id
|
||||||
invalidated?: boolean
|
|
||||||
name: Name
|
name: Name
|
||||||
realm?: string
|
type: SecurityApiKeyType
|
||||||
|
creation: EpochTime<UnitMillis>
|
||||||
|
expiration?: EpochTime<UnitMillis>
|
||||||
|
invalidated: boolean
|
||||||
|
invalidation?: EpochTime<UnitMillis>
|
||||||
|
username: Username
|
||||||
|
realm: string
|
||||||
realm_type?: string
|
realm_type?: string
|
||||||
username?: Username
|
metadata: Metadata
|
||||||
profile_uid?: string
|
|
||||||
metadata?: Metadata
|
|
||||||
role_descriptors?: Record<string, SecurityRoleDescriptor>
|
role_descriptors?: Record<string, SecurityRoleDescriptor>
|
||||||
limited_by?: Record<string, SecurityRoleDescriptor>[]
|
limited_by?: Record<string, SecurityRoleDescriptor>[]
|
||||||
|
access?: SecurityAccess
|
||||||
|
profile_uid?: string
|
||||||
_sort?: SortResults
|
_sort?: SortResults
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type SecurityApiKeyType = 'rest' | 'cross_cluster'
|
||||||
|
|
||||||
export interface SecurityApplicationGlobalUserPrivileges {
|
export interface SecurityApplicationGlobalUserPrivileges {
|
||||||
manage: SecurityManageUserPrivileges
|
manage: SecurityManageUserPrivileges
|
||||||
}
|
}
|
||||||
@ -17464,11 +17477,16 @@ export interface SecurityActivateUserProfileRequest extends RequestBase {
|
|||||||
|
|
||||||
export type SecurityActivateUserProfileResponse = SecurityUserProfileWithMetadata
|
export type SecurityActivateUserProfileResponse = SecurityUserProfileWithMetadata
|
||||||
|
|
||||||
|
export interface SecurityAuthenticateAuthenticateApiKey {
|
||||||
|
id: Id
|
||||||
|
name?: Name
|
||||||
|
}
|
||||||
|
|
||||||
export interface SecurityAuthenticateRequest extends RequestBase {
|
export interface SecurityAuthenticateRequest extends RequestBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SecurityAuthenticateResponse {
|
export interface SecurityAuthenticateResponse {
|
||||||
api_key?: SecurityApiKey
|
api_key?: SecurityAuthenticateAuthenticateApiKey
|
||||||
authentication_realm: SecurityRealmInfo
|
authentication_realm: SecurityRealmInfo
|
||||||
email?: string | null
|
email?: string | null
|
||||||
full_name?: Name | null
|
full_name?: Name | null
|
||||||
|
|||||||
@ -917,6 +917,7 @@ export interface OpenPointInTimeRequest extends RequestBase {
|
|||||||
preference?: string
|
preference?: string
|
||||||
routing?: Routing
|
routing?: Routing
|
||||||
expand_wildcards?: ExpandWildcards
|
expand_wildcards?: ExpandWildcards
|
||||||
|
allow_partial_search_results?: boolean
|
||||||
/** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */
|
/** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */
|
||||||
body?: {
|
body?: {
|
||||||
index_filter?: QueryDslQueryContainer
|
index_filter?: QueryDslQueryContainer
|
||||||
@ -6726,6 +6727,7 @@ export type AsyncSearchGetResponse<TDocument = unknown, TAggregations = Record<A
|
|||||||
|
|
||||||
export interface AsyncSearchStatusRequest extends RequestBase {
|
export interface AsyncSearchStatusRequest extends RequestBase {
|
||||||
id: Id
|
id: Id
|
||||||
|
keep_alive?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AsyncSearchStatusResponse = AsyncSearchStatusStatusResponseBase
|
export type AsyncSearchStatusResponse = AsyncSearchStatusStatusResponseBase
|
||||||
@ -6759,7 +6761,6 @@ export interface AsyncSearchSubmitRequest extends RequestBase {
|
|||||||
pre_filter_shard_size?: long
|
pre_filter_shard_size?: long
|
||||||
request_cache?: boolean
|
request_cache?: boolean
|
||||||
routing?: Routing
|
routing?: Routing
|
||||||
scroll?: Duration
|
|
||||||
search_type?: SearchType
|
search_type?: SearchType
|
||||||
suggest_field?: Field
|
suggest_field?: Field
|
||||||
suggest_mode?: SuggestMode
|
suggest_mode?: SuggestMode
|
||||||
@ -6818,6 +6819,8 @@ export interface AutoscalingAutoscalingPolicy {
|
|||||||
|
|
||||||
export interface AutoscalingDeleteAutoscalingPolicyRequest extends RequestBase {
|
export interface AutoscalingDeleteAutoscalingPolicyRequest extends RequestBase {
|
||||||
name: Name
|
name: Name
|
||||||
|
master_timeout?: Duration
|
||||||
|
timeout?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AutoscalingDeleteAutoscalingPolicyResponse = AcknowledgedResponseBase
|
export type AutoscalingDeleteAutoscalingPolicyResponse = AcknowledgedResponseBase
|
||||||
@ -6850,6 +6853,7 @@ export interface AutoscalingGetAutoscalingCapacityAutoscalingResources {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface AutoscalingGetAutoscalingCapacityRequest extends RequestBase {
|
export interface AutoscalingGetAutoscalingCapacityRequest extends RequestBase {
|
||||||
|
master_timeout?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AutoscalingGetAutoscalingCapacityResponse {
|
export interface AutoscalingGetAutoscalingCapacityResponse {
|
||||||
@ -6858,12 +6862,15 @@ export interface AutoscalingGetAutoscalingCapacityResponse {
|
|||||||
|
|
||||||
export interface AutoscalingGetAutoscalingPolicyRequest extends RequestBase {
|
export interface AutoscalingGetAutoscalingPolicyRequest extends RequestBase {
|
||||||
name: Name
|
name: Name
|
||||||
|
master_timeout?: Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AutoscalingGetAutoscalingPolicyResponse = AutoscalingAutoscalingPolicy
|
export type AutoscalingGetAutoscalingPolicyResponse = AutoscalingAutoscalingPolicy
|
||||||
|
|
||||||
export interface AutoscalingPutAutoscalingPolicyRequest extends RequestBase {
|
export interface AutoscalingPutAutoscalingPolicyRequest extends RequestBase {
|
||||||
name: Name
|
name: Name
|
||||||
|
master_timeout?: Duration
|
||||||
|
timeout?: Duration
|
||||||
/** @deprecated The use of the 'body' key has been deprecated, use 'policy' instead. */
|
/** @deprecated The use of the 'body' key has been deprecated, use 'policy' instead. */
|
||||||
body?: AutoscalingAutoscalingPolicy
|
body?: AutoscalingAutoscalingPolicy
|
||||||
}
|
}
|
||||||
@ -17206,6 +17213,7 @@ export interface QueryRulesListRulesetsQueryRulesetListItem {
|
|||||||
ruleset_id: Id
|
ruleset_id: Id
|
||||||
rule_total_count: integer
|
rule_total_count: integer
|
||||||
rule_criteria_types_counts: Record<string, integer>
|
rule_criteria_types_counts: Record<string, integer>
|
||||||
|
rule_type_counts: Record<string, integer>
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueryRulesListRulesetsRequest extends RequestBase {
|
export interface QueryRulesListRulesetsRequest extends RequestBase {
|
||||||
@ -17629,21 +17637,26 @@ export interface SecurityAccess {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface SecurityApiKey {
|
export interface SecurityApiKey {
|
||||||
creation?: long
|
|
||||||
expiration?: long
|
|
||||||
id: Id
|
id: Id
|
||||||
invalidated?: boolean
|
|
||||||
name: Name
|
name: Name
|
||||||
realm?: string
|
type: SecurityApiKeyType
|
||||||
|
creation: EpochTime<UnitMillis>
|
||||||
|
expiration?: EpochTime<UnitMillis>
|
||||||
|
invalidated: boolean
|
||||||
|
invalidation?: EpochTime<UnitMillis>
|
||||||
|
username: Username
|
||||||
|
realm: string
|
||||||
realm_type?: string
|
realm_type?: string
|
||||||
username?: Username
|
metadata: Metadata
|
||||||
profile_uid?: string
|
|
||||||
metadata?: Metadata
|
|
||||||
role_descriptors?: Record<string, SecurityRoleDescriptor>
|
role_descriptors?: Record<string, SecurityRoleDescriptor>
|
||||||
limited_by?: Record<string, SecurityRoleDescriptor>[]
|
limited_by?: Record<string, SecurityRoleDescriptor>[]
|
||||||
|
access?: SecurityAccess
|
||||||
|
profile_uid?: string
|
||||||
_sort?: SortResults
|
_sort?: SortResults
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type SecurityApiKeyType = 'rest' | 'cross_cluster'
|
||||||
|
|
||||||
export interface SecurityApplicationGlobalUserPrivileges {
|
export interface SecurityApplicationGlobalUserPrivileges {
|
||||||
manage: SecurityManageUserPrivileges
|
manage: SecurityManageUserPrivileges
|
||||||
}
|
}
|
||||||
@ -17847,11 +17860,16 @@ export interface SecurityActivateUserProfileRequest extends RequestBase {
|
|||||||
|
|
||||||
export type SecurityActivateUserProfileResponse = SecurityUserProfileWithMetadata
|
export type SecurityActivateUserProfileResponse = SecurityUserProfileWithMetadata
|
||||||
|
|
||||||
|
export interface SecurityAuthenticateAuthenticateApiKey {
|
||||||
|
id: Id
|
||||||
|
name?: Name
|
||||||
|
}
|
||||||
|
|
||||||
export interface SecurityAuthenticateRequest extends RequestBase {
|
export interface SecurityAuthenticateRequest extends RequestBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SecurityAuthenticateResponse {
|
export interface SecurityAuthenticateResponse {
|
||||||
api_key?: SecurityApiKey
|
api_key?: SecurityAuthenticateAuthenticateApiKey
|
||||||
authentication_realm: SecurityRealmInfo
|
authentication_realm: SecurityRealmInfo
|
||||||
email?: string | null
|
email?: string | null
|
||||||
full_name?: Name | null
|
full_name?: Name | null
|
||||||
|
|||||||
Reference in New Issue
Block a user