Compare commits

..

19 Commits

Author SHA1 Message Date
42a6fe0f3b Auto-generated API code (#2852) 2025-05-27 15:14:45 +00:00
cdebf1aebf Auto-generated API code (#2831) 2025-05-19 19:05:40 +00:00
5f7596dd2c Auto-generated API code (#2816) 2025-05-05 11:16:28 -05:00
d97d8fd35c Auto-generated API code (#2807) 2025-04-28 10:41:16 -05:00
84731411ad Auto-generated API code (#2711) 2025-04-07 14:31:27 -05:00
aa9249bf25 Auto-generated API code (#2688) 2025-03-31 11:11:34 -05:00
c68be6f562 Auto-generated API code (#2679) 2025-03-24 12:20:43 -05:00
b50c2c2e5b Auto-generated API code (#2656) 2025-03-20 01:02:25 +00:00
586d91effb Auto-generated API code (#2643) 2025-03-07 15:05:10 -06:00
469c853a10 Bump to 8.17.1 (#2632) 2025-02-24 13:37:19 -06:00
01f4cf9ba7 Auto-generated API code (#2626) 2025-02-24 10:55:36 -06:00
85dea32310 Auto-generated API code (#2619) 2025-02-18 10:39:36 -06:00
528dd6b24a Auto-generated API code (#2608) 2025-02-10 13:07:52 -06:00
d540d7fdb2 Report correct transport connection type in telemetry (#2599) (#2603)
Fixes #2324

(cherry picked from commit 172180cb21)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2025-02-03 13:37:59 -06:00
07f75a4d9d Auto-generated API code (#2596) 2025-02-03 12:52:56 -06:00
4e6cbf96aa Auto-generated API code (#2579) 2025-01-28 11:52:01 -06:00
aa7d327d20 Auto-generated code for 8.17 (#2567) 2025-01-13 10:07:15 -06:00
6cdb08757d Auto-generated code for 8.17 (#2550) 2025-01-07 12:52:21 -06:00
48f369fe82 Update dependency @elastic/request-converter to v8.17.0 (#2555) (#2559)
Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com>
(cherry picked from commit e688f36396)

Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com>
2025-01-06 12:44:35 -06:00
184 changed files with 9240 additions and 2338 deletions

View File

@ -1,6 +1,22 @@
[[changelog-client]]
== Release notes
[discrete]
=== 8.17.1
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.17`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
===== Report correct transport connection type in telemetry
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
[discrete]
=== 8.17.0
@ -13,6 +29,22 @@
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.17/release-notes-8.17.0.html[here].
[discrete]
=== 8.16.4
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.16`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
===== Report correct transport connection type in telemetry
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
[discrete]
=== 8.16.3

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getDataStream({
name: "my-data-stream",
filter_path: "data_streams.indices.index_name",
});
console.log(response);
----

View File

@ -0,0 +1,10 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getMapping({
index: "kibana_sample_data_ecommerce",
});
console.log(response);
----

View File

@ -11,6 +11,8 @@ const response = await client.indices.putSettings({
"index.indexing.slowlog.threshold.index.debug": "2s",
"index.indexing.slowlog.threshold.index.trace": "500ms",
"index.indexing.slowlog.source": "1000",
"index.indexing.slowlog.reformat": true,
"index.indexing.slowlog.include.user": true,
},
});
console.log(response);

View File

@ -0,0 +1,42 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-rank-vectors-bit",
mappings: {
properties: {
my_vector: {
type: "rank_vectors",
element_type: "bit",
},
},
},
});
console.log(response);
const response1 = await client.bulk({
index: "my-rank-vectors-bit",
refresh: "true",
operations: [
{
index: {
_id: "1",
},
},
{
my_vector: [127, -127, 0, 1, 42],
},
{
index: {
_id: "2",
},
},
{
my_vector: "8100012a7f",
},
],
});
console.log(response1);
----

View File

@ -3,8 +3,12 @@
[source, js]
----
const response = await client.esql.asyncQuery({
format: "json",
const response = await client.transport.request({
method: "POST",
path: "/_query/async",
querystring: {
format: "json",
},
body: {
query:
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",

View File

@ -3,8 +3,9 @@
[source, js]
----
const response = await client.searchApplication.renderQuery({
name: "my-app",
const response = await client.transport.request({
method: "POST",
path: "/_application/search_application/my-app/_render_query",
body: {
params: {
query_string: "my first query",

View File

@ -0,0 +1,15 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.putSettings({
index: ".reindexed-v9-ml-anomalies-custom-example",
settings: {
index: {
number_of_replicas: "<original_number_of_replicas>",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,20 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "POST",
path: "/_inference/chat_completion/openai-completion/_stream",
body: {
model: "gpt-4o",
messages: [
{
role: "user",
content: "What is Elastic?",
},
],
},
});
console.log(response);
----

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.addBlock({
index: ".ml-anomalies-custom-example",
block: "read_only",
});
console.log(response);
----

View File

@ -3,11 +3,13 @@
[source, js]
----
const response = await client.inference.inference({
const response = await client.inference.put({
task_type: "my-inference-endpoint",
inference_id: "_update",
service_settings: {
api_key: "<API_KEY>",
inference_config: {
service_settings: {
api_key: "<API_KEY>",
},
},
});
console.log(response);

View File

@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
script: {
lang: "mustache",
source:
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
params: {
query: "",
_es_filters: {},

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.simulate.ingest({
const response = await client.transport.request({
method: "POST",
path: "/_ingest/_simulate",
body: {
docs: [
{

View File

@ -0,0 +1,19 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.queryRole({
query: {
bool: {
must_not: {
term: {
"metadata._reserved": true,
},
},
},
},
sort: ["name"],
});
console.log(response);
----

View File

@ -0,0 +1,67 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-rank-vectors-bit",
mappings: {
properties: {
my_vector: {
type: "rank_vectors",
element_type: "bit",
},
},
},
});
console.log(response);
const response1 = await client.bulk({
index: "my-rank-vectors-bit",
refresh: "true",
operations: [
{
index: {
_id: "1",
},
},
{
my_vector: [127, -127, 0, 1, 42],
},
{
index: {
_id: "2",
},
},
{
my_vector: "8100012a7f",
},
],
});
console.log(response1);
const response2 = await client.search({
index: "my-rank-vectors-bit",
query: {
script_score: {
query: {
match_all: {},
},
script: {
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
params: {
query_vector: [
[
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
],
],
},
},
},
},
});
console.log(response2);
----

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.addBlock({
index: ".ml-anomalies-custom-example",
block: "write",
});
console.log(response);
----

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.oidcLogout({
const response = await client.transport.request({
method: "POST",
path: "/_security/oidc/logout",
body: {
token:
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",

View File

@ -0,0 +1,26 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-rank-vectors-float",
query: {
script_score: {
query: {
match_all: {},
},
script: {
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
params: {
query_vector: [
[0.5, 10, 6],
[-0.5, 10, 10],
],
},
},
},
},
});
console.log(response);
----

View File

@ -6,13 +6,13 @@
const response = await client.indices.create({
index: "test-index",
query: {
semantic: {
field: "my_semantic_field",
match: {
my_field: "Which country is Paris in?",
},
},
highlight: {
fields: {
my_semantic_field: {
my_field: {
type: "semantic",
number_of_fragments: 2,
order: "score",

View File

@ -3,10 +3,12 @@
[source, js]
----
const response = await client.esql.asyncQueryGet({
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
wait_for_completion_timeout: "30s",
body: null,
const response = await client.transport.request({
method: "GET",
path: "/_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE&#x3D;",
querystring: {
wait_for_completion_timeout: "30s",
},
});
console.log(response);
----

View File

@ -0,0 +1,23 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "test-index",
mappings: {
properties: {
source_field: {
type: "text",
fields: {
infer_field: {
type: "semantic_text",
inference_id: ".elser-2-elasticsearch",
},
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,28 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index-*",
query: {
bool: {
must: [
{
match: {
"user.id": "kimchy",
},
},
],
must_not: [
{
terms: {
_index: ["my-index-01"],
},
},
],
},
},
});
console.log(response);
----

View File

@ -0,0 +1,31 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ilm.putLifecycle({
name: "my_policy",
policy: {
phases: {
hot: {
actions: {
rollover: {
max_primary_shard_size: "50gb",
},
searchable_snapshot: {
snapshot_repository: "backing_repo",
replicate_for: "14d",
},
},
},
delete: {
min_age: "28d",
actions: {
delete: {},
},
},
},
},
});
console.log(response);
----

View File

@ -14,6 +14,7 @@ const response = await client.indices.putSettings({
"index.search.slowlog.threshold.fetch.info": "800ms",
"index.search.slowlog.threshold.fetch.debug": "500ms",
"index.search.slowlog.threshold.fetch.trace": "200ms",
"index.search.slowlog.include.user": true,
},
});
console.log(response);

View File

@ -0,0 +1,70 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "movies",
size: 10,
retriever: {
rescorer: {
rescore: {
window_size: 50,
query: {
rescore_query: {
script_score: {
query: {
match_all: {},
},
script: {
source:
"cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0",
params: {
queryVector: [-0.5, 90, -10, 14.8, -156],
},
},
},
},
},
},
retriever: {
rrf: {
rank_window_size: 100,
retrievers: [
{
standard: {
query: {
sparse_vector: {
field: "plot_embedding",
inference_id: "my-elser-model",
query: "films that explore psychological depths",
},
},
},
},
{
standard: {
query: {
multi_match: {
query: "crime",
fields: ["plot", "title"],
},
},
},
},
{
knn: {
field: "vector",
query_vector: [10, 22, 77],
k: 10,
num_candidates: 10,
},
},
],
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,23 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index",
settings: {
index: {
number_of_shards: 3,
"blocks.write": true,
},
},
mappings: {
properties: {
field1: {
type: "text",
},
},
},
});
console.log(response);
----

View File

@ -1,23 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.bulk({
index: "test-index",
operations: [
{
update: {
_id: "1",
},
},
{
doc: {
infer_field: "updated inference field",
source_field: "updated source field",
},
},
],
});
console.log(response);
----

View File

@ -0,0 +1,19 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: ".ml-anomalies-custom-example",
size: 0,
aggs: {
job_ids: {
terms: {
field: "job_id",
size: 100,
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,61 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
linear: {
retrievers: [
{
retriever: {
standard: {
query: {
function_score: {
query: {
term: {
topic: "ai",
},
},
functions: [
{
script_score: {
script: {
source: "doc['timestamp'].value.millis",
},
},
},
],
boost_mode: "replace",
},
},
sort: {
timestamp: {
order: "asc",
},
},
},
},
weight: 2,
normalizer: "minmax",
},
{
retriever: {
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
weight: 1.5,
},
],
rank_window_size: 10,
},
},
_source: false,
});
console.log(response);
----

View File

@ -0,0 +1,16 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.updateAliases({
actions: [
{
remove_index: {
index: "my-index-2099.05.06-000001",
},
},
],
});
console.log(response);
----

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.esql.asyncQuery({
const response = await client.transport.request({
method: "POST",
path: "/_query/async",
body: {
query:
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
order_stats: {
stats: {
field: "taxful_total_price",
},
},
},
});
console.log(response);
----

View File

@ -3,9 +3,9 @@
[source, js]
----
const response = await client.esql.asyncQueryGet({
id: "FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
body: null,
const response = await client.transport.request({
method: "GET",
path: "/_query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM&#x3D;",
});
console.log(response);
----

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "jinaai-index",
mappings: {
properties: {
content: {
type: "semantic_text",
inference_id: "jinaai-embeddings",
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,47 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "POST",
path: "/_inference/chat_completion/openai-completion/_stream",
body: {
messages: [
{
role: "user",
content: [
{
type: "text",
text: "What's the price of a scarf?",
},
],
},
],
tools: [
{
type: "function",
function: {
name: "get_current_price",
description: "Get the current price of a item",
parameters: {
type: "object",
properties: {
item: {
id: "123",
},
},
},
},
},
],
tool_choice: {
type: "function",
function: {
name: "get_current_price",
},
},
},
});
console.log(response);
----

View File

@ -3,9 +3,9 @@
[source, js]
----
const response = await client.inference.streamInference({
task_type: "completion",
inference_id: "openai-completion",
const response = await client.transport.request({
method: "POST",
path: "/_inference/completion/openai-completion/_stream",
body: {
input: "What is Elastic?",
},

View File

@ -0,0 +1,17 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "sparse_embedding",
inference_id: "elser-model-eis",
inference_config: {
service: "elastic",
service_settings: {
model_name: "elser",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,20 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "image-index",
knn: {
field: "image-vector",
query_vector: [-5, 9, -12],
k: 10,
num_candidates: 100,
rescore_vector: {
oversample: 2,
},
},
fields: ["title", "file-type"],
});
console.log(response);
----

View File

@ -5,7 +5,7 @@
----
const response = await client.cluster.putSettings({
persistent: {
"cluster.routing.allocation.disk.watermark.low": "30gb",
"migrate.data_stream_reindex_max_request_per_second": 10000,
},
});
console.log(response);

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.oidcPrepareAuthentication({
const response = await client.transport.request({
method: "POST",
path: "/_security/oidc/prepare",
body: {
realm: "oidc1",
state: "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO",

View File

@ -0,0 +1,16 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "jinaai-index",
query: {
semantic: {
field: "content",
query: "who inspired taking care of the sea?",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,10 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getSettings({
index: ".reindexed-v9-ml-anomalies-custom-example",
});
console.log(response);
----

View File

@ -3,8 +3,12 @@
[source, js]
----
const response = await client.esql.asyncQuery({
format: "json",
const response = await client.transport.request({
method: "POST",
path: "/_query/async",
querystring: {
format: "json",
},
body: {
query:
"\n FROM cluster_one:my-index*,cluster_two:logs*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",

View File

@ -4,9 +4,11 @@
[source, js]
----
const response = await client.indices.putSettings({
index: "my-index-000001",
index: "*",
settings: {
"index.search.slowlog.include.user": true,
"index.search.slowlog.threshold.fetch.warn": "30s",
"index.search.slowlog.threshold.query.warn": "30s",
},
});
console.log(response);

View File

@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
script: {
lang: "mustache",
source:
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
params: {
query: "",
_es_filters: {},

View File

@ -6,14 +6,15 @@
const response = await client.search({
index: "test-index",
query: {
nested: {
path: "inference_field.inference.chunks",
query: {
sparse_vector: {
field: "inference_field.inference.chunks.embeddings",
inference_id: "my-inference-id",
query: "mountain lake",
},
match: {
my_semantic_field: "Which country is Paris in?",
},
},
highlight: {
fields: {
my_semantic_field: {
number_of_fragments: 2,
order: "score",
},
},
},

View File

@ -0,0 +1,16 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.reindex({
wait_for_completion: "false",
source: {
index: ".ml-anomalies-custom-example",
},
dest: {
index: ".reindexed-v9-ml-anomalies-custom-example",
},
});
console.log(response);
----

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.bulkUpdateApiKeys({
const response = await client.transport.request({
method: "POST",
path: "/_security/api_key/_bulk_update",
body: {
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
},

View File

@ -12,6 +12,13 @@ const response = await client.search({
fields: ["my_field", "my_field._2gram", "my_field._3gram"],
},
},
highlight: {
fields: {
my_field: {
matched_fields: ["my_field._index_prefix"],
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,24 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index-000001",
query: {
prefix: {
full_name: {
value: "ki",
},
},
},
highlight: {
fields: {
full_name: {
matched_fields: ["full_name._index_prefix"],
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,33 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
daily_sales: {
date_histogram: {
field: "order_date",
calendar_interval: "day",
},
aggs: {
daily_revenue: {
sum: {
field: "taxful_total_price",
},
},
smoothed_revenue: {
moving_fn: {
buckets_path: "daily_revenue",
window: 3,
script: "MovingFunctions.unweightedAvg(values)",
},
},
},
},
},
});
console.log(response);
----

View File

@ -1,26 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "test-index",
query: {
nested: {
path: "inference_field.inference.chunks",
query: {
knn: {
field: "inference_field.inference.chunks.embeddings",
query_vector_builder: {
text_embedding: {
model_id: "my_inference_id",
model_text: "mountain lake",
},
},
},
},
},
},
});
console.log(response);
----

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.textStructure.findMessageStructure({
const response = await client.transport.request({
method: "POST",
path: "/_text_structure/find_message_structure",
body: {
messages: [
"[2024-03-05T10:52:36,256][INFO ][o.a.l.u.VectorUtilPanamaProvider] [laptop] Java vector incubator API enabled; uses preferredBitSize=128",

View File

@ -5,10 +5,8 @@
----
const response = await client.cluster.putSettings({
persistent: {
"cluster.routing.allocation.disk.watermark.low": "100gb",
"cluster.routing.allocation.disk.watermark.high": "50gb",
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
"cluster.info.update.interval": "1m",
"cluster.routing.allocation.disk.watermark.low": "90%",
"cluster.routing.allocation.disk.watermark.high": "95%",
},
});
console.log(response);

View File

@ -0,0 +1,35 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
query: {
intervals: {
my_text: {
all_of: {
ordered: false,
max_gaps: 1,
intervals: [
{
match: {
query: "my favorite food",
max_gaps: 0,
ordered: true,
},
},
{
match: {
query: "cold porridge",
max_gaps: 4,
ordered: true,
},
},
],
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "DELETE",
path: "/_ingest/ip_location/database/my-database-id",
});
console.log(response);
----

View File

@ -7,14 +7,14 @@ const response = await client.indices.create({
index: "test-index",
mappings: {
properties: {
infer_field: {
type: "semantic_text",
inference_id: ".elser-2-elasticsearch",
},
source_field: {
type: "text",
copy_to: "infer_field",
},
infer_field: {
type: "semantic_text",
inference_id: ".elser-2-elasticsearch",
},
},
},
});

View File

@ -0,0 +1,37 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
daily_sales: {
date_histogram: {
field: "order_date",
calendar_interval: "day",
format: "yyyy-MM-dd",
},
aggs: {
revenue: {
sum: {
field: "taxful_total_price",
},
},
unique_customers: {
cardinality: {
field: "customer_id",
},
},
avg_basket_size: {
avg: {
field: "total_quantity",
},
},
},
},
},
});
console.log(response);
----

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.bulkUpdateApiKeys({
const response = await client.transport.request({
method: "POST",
path: "/_security/api_key/_bulk_update",
body: {
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
role_descriptors: {

View File

@ -0,0 +1,34 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "POST",
path: "/_inference/chat_completion/openai-completion/_stream",
body: {
messages: [
{
role: "assistant",
content: "Let's find out what the weather is",
tool_calls: [
{
id: "call_KcAjWtAww20AihPHphUh46Gd",
type: "function",
function: {
name: "get_current_weather",
arguments: '{"location":"Boston, MA"}',
},
},
],
},
{
role: "tool",
content: "The weather is cold",
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
},
],
},
});
console.log(response);
----

View File

@ -4,9 +4,11 @@
[source, js]
----
const response = await client.indices.putSettings({
index: "my-index-000001",
index: ".reindexed-v9-ml-anomalies-custom-example",
settings: {
"index.blocks.read_only_allow_delete": null,
index: {
number_of_replicas: 0,
},
},
});
console.log(response);

View File

@ -0,0 +1,12 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.esql.query({
query:
'\nFROM library\n| EVAL year = DATE_EXTRACT("year", release_date)\n| WHERE page_count > ? AND match(author, ?, {"minimum_should_match": ?})\n| LIMIT 5\n',
params: [300, "Frank Herbert", 2],
});
console.log(response);
----

View File

@ -3,8 +3,8 @@
[source, js]
----
const response = await client.security.queryRole({
sort: ["name"],
const response = await client.indices.getAlias({
index: ".ml-anomalies-custom-example",
});
console.log(response);
----

View File

@ -0,0 +1,39 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
categories: {
terms: {
field: "category.keyword",
size: 5,
order: {
total_revenue: "desc",
},
},
aggs: {
total_revenue: {
sum: {
field: "taxful_total_price",
},
},
avg_order_value: {
avg: {
field: "taxful_total_price",
},
},
total_items: {
sum: {
field: "total_quantity",
},
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,17 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "PUT",
path: "/_ingest/ip_location/database/my-database-1",
body: {
name: "GeoIP2-Domain",
maxmind: {
account_id: "1234567",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,42 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.bulk({
index: "jinaai-index",
operations: [
{
index: {
_index: "jinaai-index",
_id: "1",
},
},
{
content:
"Sarah Johnson is a talented marine biologist working at the Oceanographic Institute. Her groundbreaking research on coral reef ecosystems has garnered international attention and numerous accolades.",
},
{
index: {
_index: "jinaai-index",
_id: "2",
},
},
{
content:
"She spends months at a time diving in remote locations, meticulously documenting the intricate relationships between various marine species. ",
},
{
index: {
_index: "jinaai-index",
_id: "3",
},
},
{
content:
"Her dedication to preserving these delicate underwater environments has inspired a new generation of conservationists.",
},
],
});
console.log(response);
----

View File

@ -5,6 +5,9 @@
----
const response = await client.indices.create({
index: "retrievers_example_nested",
settings: {
number_of_shards: 1,
},
mappings: {
properties: {
nested_field: {
@ -18,6 +21,9 @@ const response = await client.indices.create({
dims: 3,
similarity: "l2_norm",
index: true,
index_options: {
type: "flat",
},
},
},
},

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "DELETE",
path: "/_ingest/ip_location/database/example-database-id",
});
console.log(response);
----

View File

@ -3,9 +3,9 @@
[source, js]
----
const response = await client.searchApplication.postBehavioralAnalyticsEvent({
collection_name: "my_analytics_collection",
event_type: "search_click",
const response = await client.transport.request({
method: "POST",
path: "/_application/analytics/my_analytics_collection/event/search_click",
body: {
session: {
id: "1797ca95-91c9-4e2e-b1bd-9c38e6f386a9",

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.oidcAuthenticate({
const response = await client.transport.request({
method: "POST",
path: "/_security/oidc/authenticate",
body: {
redirect_uri:
"https://oidc-kibana.elastic.co:5603/api/security/oidc/callback?code=jtI3Ntt8v3_XvcLzCFGq&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I",

View File

@ -0,0 +1,30 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-rank-vectors-byte",
mappings: {
properties: {
my_vector: {
type: "rank_vectors",
element_type: "byte",
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "my-rank-vectors-byte",
id: 1,
document: {
my_vector: [
[1, 2, 3],
[4, 5, 6],
],
},
});
console.log(response1);
----

View File

@ -3,8 +3,9 @@
[source, js]
----
const response = await client.searchApplication.renderQuery({
name: "my_search_application",
const response = await client.transport.request({
method: "POST",
path: "/_application/search_application/my_search_application/_render_query",
body: {
params: {
query_string: "rock climbing",

View File

@ -0,0 +1,12 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cat.indices({
index: ".ml-anomalies-custom-example",
v: "true",
h: "index,store.size",
});
console.log(response);
----

View File

@ -0,0 +1,12 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.get({
index: ".migrated-ds-my-data-stream-2025.01.23-000001",
human: "true",
filter_path: "*.settings.index.version.created_string",
});
console.log(response);
----

View File

@ -3,9 +3,9 @@
[source, js]
----
const response = await client.searchApplication.renderQuery({
name: "my_search_application",
body: null,
const response = await client.transport.request({
method: "POST",
path: "/_application/search_application/my_search_application/_render_query",
});
console.log(response);
----

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
avg_order_value: {
avg: {
field: "taxful_total_price",
},
},
},
});
console.log(response);
----

View File

@ -208,10 +208,13 @@ const response = await client.bulk({
});
console.log(response);
const response1 = await client.textStructure.findFieldStructure({
index: "test-logs",
field: "message",
body: null,
const response1 = await client.transport.request({
method: "GET",
path: "/_text_structure/find_field_structure",
querystring: {
index: "test-logs",
field: "message",
},
});
console.log(response1);
----

View File

@ -0,0 +1,21 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
daily_orders: {
date_histogram: {
field: "order_date",
calendar_interval: "day",
format: "yyyy-MM-dd",
min_doc_count: 0,
},
},
},
});
console.log(response);
----

View File

@ -6,6 +6,7 @@
const response = await client.indices.resolveCluster({
name: "not-present,clust*:my-index*,oldcluster:*",
ignore_unavailable: "false",
timeout: "5s",
});
console.log(response);
----

View File

@ -0,0 +1,12 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getSettings({
index: "_all",
expand_wildcards: "all",
filter_path: "*.settings.index.*.slowlog",
});
console.log(response);
----

View File

@ -6,15 +6,11 @@
const response = await client.update({
index: "test",
id: 1,
script: {
source: "ctx._source.counter += params.count",
lang: "painless",
params: {
count: 4,
},
doc: {
product_price: 100,
},
upsert: {
counter: 1,
product_price: 50,
},
});
console.log(response);

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.simulate.ingest({
const response = await client.transport.request({
method: "POST",
path: "/_ingest/_simulate",
body: {
docs: [
{

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
sales_by_category: {
terms: {
field: "category.keyword",
size: 5,
order: {
_count: "desc",
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,31 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "kibana_sample_data_ecommerce",
size: 0,
aggs: {
daily_sales: {
date_histogram: {
field: "order_date",
calendar_interval: "day",
},
aggs: {
revenue: {
sum: {
field: "taxful_total_price",
},
},
cumulative_revenue: {
cumulative_sum: {
buckets_path: "revenue",
},
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "rerank",
inference_id: "jinaai-rerank",
inference_config: {
service: "jinaai",
service_settings: {
api_key: "<api_key>",
model_id: "jina-reranker-v2-base-multilingual",
},
task_settings: {
top_n: 10,
return_documents: true,
},
},
});
console.log(response);
----

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.esql.query({
query:
'\nFROM library\n| WHERE match(author, "Frank Herbert", {"minimum_should_match": 2, "operator": "AND"})\n| LIMIT 5\n',
});
console.log(response);
----

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.bulkUpdateApiKeys({
const response = await client.transport.request({
method: "POST",
path: "/_security/api_key/_bulk_update",
body: {
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
role_descriptors: {},

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.simulate.ingest({
const response = await client.transport.request({
method: "POST",
path: "/_ingest/_simulate",
body: {
docs: [
{

View File

@ -0,0 +1,35 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
query: {
intervals: {
my_text: {
all_of: {
ordered: true,
max_gaps: 1,
intervals: [
{
match: {
query: "my favorite food",
max_gaps: 0,
ordered: true,
},
},
{
match: {
query: "cold porridge",
max_gaps: 4,
ordered: true,
},
},
],
},
},
},
},
});
console.log(response);
----

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cluster.state({
metric: "metadata",
filter_path: "metadata.indices.*.system",
});
console.log(response);
----

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.oidcPrepareAuthentication({
const response = await client.transport.request({
method: "POST",
path: "/_security/oidc/prepare",
body: {
iss: "http://127.0.0.1:8080",
login_hint: "this_is_an_opaque_string",

View File

@ -0,0 +1,28 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "jinaai-index",
retriever: {
text_similarity_reranker: {
retriever: {
standard: {
query: {
semantic: {
field: "content",
query: "who inspired taking care of the sea?",
},
},
},
},
field: "content",
rank_window_size: 100,
inference_id: "jinaai-rerank",
inference_text: "who inspired taking care of the sea?",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,44 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
linear: {
retrievers: [
{
retriever: {
standard: {
query: {
query_string: {
query: "(information retrieval) OR (artificial intelligence)",
default_field: "text",
},
},
},
},
weight: 2,
normalizer: "minmax",
},
{
retriever: {
knn: {
field: "vector",
query_vector: [0.23, 0.67, 0.89],
k: 3,
num_candidates: 5,
},
},
weight: 1.5,
normalizer: "minmax",
},
],
rank_window_size: 10,
},
},
_source: false,
});
console.log(response);
----

View File

@ -0,0 +1,15 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "PUT",
path: "/_ingest/ip_location/database/my-database-2",
body: {
name: "standard_location",
ipinfo: {},
},
});
console.log(response);
----

View File

@ -0,0 +1,17 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "chat_completion",
inference_id: "chat-completion-endpoint",
inference_config: {
service: "elastic",
service_settings: {
model_id: "model-1",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,11 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "GET",
path: "/_ingest/ip_location/database/my-database-id",
});
console.log(response);
----

View File

@ -0,0 +1,57 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.updateAliases({
actions: [
{
add: {
index: ".reindexed-v9-ml-anomalies-custom-example",
alias: ".ml-anomalies-example1",
filter: {
term: {
job_id: {
value: "example1",
},
},
},
is_hidden: true,
},
},
{
add: {
index: ".reindexed-v9-ml-anomalies-custom-example",
alias: ".ml-anomalies-example2",
filter: {
term: {
job_id: {
value: "example2",
},
},
},
is_hidden: true,
},
},
{
remove: {
index: ".ml-anomalies-custom-example",
aliases: ".ml-anomalies-*",
},
},
{
remove_index: {
index: ".ml-anomalies-custom-example",
},
},
{
add: {
index: ".reindexed-v9-ml-anomalies-custom-example",
alias: ".ml-anomalies-custom-example",
is_hidden: true,
},
},
],
});
console.log(response);
----

View File

@ -3,6 +3,9 @@
[source, js]
----
const response = await client.security.getSettings();
const response = await client.transport.request({
method: "GET",
path: "/_security/settings",
});
console.log(response);
----

View File

@ -7,7 +7,7 @@ const response = await client.inference.put({
task_type: "sparse_embedding",
inference_id: "elser_embeddings",
inference_config: {
service: "elser",
service: "elasticsearch",
service_settings: {
num_allocations: 1,
num_threads: 1,

View File

@ -3,7 +3,9 @@
[source, js]
----
const response = await client.security.oidcPrepareAuthentication({
const response = await client.transport.request({
method: "POST",
path: "/_security/oidc/prepare",
body: {
realm: "oidc1",
},

Some files were not shown because too many files have changed in this diff Show More