Auto-generated code for 8.x (#2369)
This commit is contained in:
@ -26,7 +26,7 @@ const response1 = await client.cluster.putComponentTemplate({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
actions: "indices:data/write/bulk",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000002",
|
||||
index: "my-index-000003",
|
||||
mappings: {
|
||||
properties: {
|
||||
metrics: {
|
||||
@ -29,7 +29,7 @@ const response = await client.indices.create({
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.indices.getMapping({
|
||||
index: "my-index-000002",
|
||||
index: "my-index-000003",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "alibabacloud_ai_search_embeddings",
|
||||
id: "alibabacloud_ai_search_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -14,6 +14,7 @@ const response = await client.inference.put({
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
num_threads: 1,
|
||||
model_id: ".multilingual-e5-small",
|
||||
},
|
||||
},
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "hugging_face_embeddings",
|
||||
id: "hugging_face_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "google_vertex_ai_embeddings",
|
||||
id: "google_vertex_ai_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
"date.day_of_week": {
|
||||
type: "keyword",
|
||||
script:
|
||||
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "azure-ai-studio-embeddings",
|
||||
pipeline: "azure_ai_studio_embeddings",
|
||||
pipeline: "azure_ai_studio_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "azure_ai_studio_embeddings",
|
||||
id: "azure_ai_studio_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "openai-embeddings",
|
||||
pipeline: "openai_embeddings",
|
||||
pipeline: "openai_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.hotThreads();
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "amazon-bedrock-embeddings",
|
||||
pipeline: "amazon_bedrock_embeddings",
|
||||
pipeline: "amazon_bedrock_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
18
docs/doc_examples/43d9e314431336a6f084cea76dfd6489.asciidoc
Normal file
18
docs/doc_examples/43d9e314431336a6f084cea76dfd6489.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "restaurants",
|
||||
retriever: {
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [10, 22, 77],
|
||||
k: 10,
|
||||
num_candidates: 10,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "openai_embeddings",
|
||||
id: "openai_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "elser-embeddings",
|
||||
pipeline: "elser_embeddings",
|
||||
pipeline: "elser_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
11
docs/doc_examples/4f6694ef147a73b1163bde3c13779d26.asciidoc
Normal file
11
docs/doc_examples/4f6694ef147a73b1163bde3c13779d26.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.stats({
|
||||
human: "true",
|
||||
filter_path: "nodes.*.indexing_pressure",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "cohere_embeddings",
|
||||
id: "cohere_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "hugging-face-embeddings",
|
||||
pipeline: "hugging_face_embeddings",
|
||||
pipeline: "hugging_face_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -14,6 +14,7 @@ const response = await client.inference.put({
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
num_threads: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
19
docs/doc_examples/58f6b72009512851843c7b7a20e9504a.asciidoc
Normal file
19
docs/doc_examples/58f6b72009512851843c7b7a20e9504a.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000002",
|
||||
mappings: {
|
||||
properties: {
|
||||
inference_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint-for-ingest",
|
||||
search_inference_id: "my-elser-endpoint-for-search",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.hotThreads({
|
||||
node_id: "my-node,my-other-node",
|
||||
const response = await client.nodes.stats({
|
||||
metric: "breaker",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/69541f0bb81ab3797926bb2a00607cda.asciidoc
Normal file
19
docs/doc_examples/69541f0bb81ab3797926bb2a00607cda.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "my-msmarco-minilm-model",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service_settings: {
|
||||
num_allocations: 1,
|
||||
num_threads: 1,
|
||||
model_id: "cross-encoder__ms-marco-minilm-l-6-v2",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "google-vertex-ai-embeddings",
|
||||
pipeline: "google_vertex_ai_embeddings",
|
||||
pipeline: "google_vertex_ai_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
45
docs/doc_examples/6e6b78e6b689a5d6aa637271b6d084e2.asciidoc
Normal file
45
docs/doc_examples/6e6b78e6b689a5d6aa637271b6d084e2.asciidoc
Normal file
@ -0,0 +1,45 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "movies",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "plot_embedding",
|
||||
inference_id: "my-elser-model",
|
||||
query: "films that explore psychological depths",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "crime",
|
||||
fields: ["plot", "title"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [10, 22, 77],
|
||||
k: 10,
|
||||
num_candidates: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.search({
|
||||
filter: {
|
||||
range: {
|
||||
price: {
|
||||
to: "500",
|
||||
lte: "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "cohere-embeddings",
|
||||
pipeline: "cohere_embeddings",
|
||||
pipeline: "cohere_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
51
docs/doc_examples/7b1b947bddd7e78f77da265f7e645a61.asciidoc
Normal file
51
docs/doc_examples/7b1b947bddd7e78f77da265f7e645a61.asciidoc
Normal file
@ -0,0 +1,51 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000004",
|
||||
mappings: {
|
||||
properties: {
|
||||
metrics: {
|
||||
subobjects: "auto",
|
||||
properties: {
|
||||
time: {
|
||||
type: "object",
|
||||
properties: {
|
||||
min: {
|
||||
type: "long",
|
||||
},
|
||||
},
|
||||
},
|
||||
to: {
|
||||
type: "object",
|
||||
properties: {
|
||||
inner_metrics: {
|
||||
type: "object",
|
||||
subobjects: "auto",
|
||||
properties: {
|
||||
time: {
|
||||
type: "object",
|
||||
properties: {
|
||||
max: {
|
||||
type: "long",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.indices.getMapping({
|
||||
index: "my-index-000004",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -17,7 +17,7 @@ const response = await client.search({
|
||||
{
|
||||
range: {
|
||||
"result.execution_time": {
|
||||
from: "now-10s",
|
||||
gte: "now-10s",
|
||||
},
|
||||
},
|
||||
},
|
||||
11
docs/doc_examples/84237aa9da49ab4b4c4e2b21d2548df2.asciidoc
Normal file
11
docs/doc_examples/84237aa9da49ab4b4c4e2b21d2548df2.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.snapshot.create({
|
||||
repository: "my_repository",
|
||||
snapshot: "_verify_integrity",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/8477e77e4fad19af66f03f81b8f2592b.asciidoc
Normal file
20
docs/doc_examples/8477e77e4fad19af66f03f81b8f2592b.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index",
|
||||
query: {
|
||||
semantic: {
|
||||
field: "inference_field",
|
||||
query: "mountain lake",
|
||||
inner_hits: {
|
||||
from: 1,
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "mistral-embeddings",
|
||||
pipeline: "mistral_embeddings",
|
||||
pipeline: "mistral_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -9,7 +9,11 @@ const response = await client.inference.put({
|
||||
inference_config: {
|
||||
service: "elser",
|
||||
service_settings: {
|
||||
num_allocations: 1,
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
num_threads: 1,
|
||||
},
|
||||
},
|
||||
17
docs/doc_examples/8d750dfc067b1184c32a2423c60e4d06.asciidoc
Normal file
17
docs/doc_examples/8d750dfc067b1184c32a2423c60e4d06.asciidoc
Normal file
@ -0,0 +1,17 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index",
|
||||
query: {
|
||||
semantic: {
|
||||
field: "inference_field",
|
||||
query: "mountain lake",
|
||||
inner_hits: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
36
docs/doc_examples/9169d19a80175ec94f80865d0f9bef4c.asciidoc
Normal file
36
docs/doc_examples/9169d19a80175ec94f80865d0f9bef4c.asciidoc
Normal file
@ -0,0 +1,36 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "restaurants",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "Austria",
|
||||
fields: ["city", "region"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [10, 22, 77],
|
||||
k: 10,
|
||||
num_candidates: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_constant: 1,
|
||||
rank_window_size: 50,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -7,7 +7,7 @@ const response = await client.search({
|
||||
index: "semantic-embeddings",
|
||||
query: {
|
||||
semantic: {
|
||||
field: "semantic_text",
|
||||
field: "content",
|
||||
query: "How to avoid muscle soreness while running?",
|
||||
},
|
||||
},
|
||||
26
docs/doc_examples/971fd23adb81bb5842c7750e0379336a.asciidoc
Normal file
26
docs/doc_examples/971fd23adb81bb5842c7750e0379336a.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "movies",
|
||||
retriever: {
|
||||
text_similarity_reranker: {
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
match: {
|
||||
genre: "drama",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
field: "plot",
|
||||
inference_id: "my-msmarco-minilm-model",
|
||||
inference_text: "films that explore psychological depths",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
filter_path: "nodes.*.tasks",
|
||||
pretty: "true",
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
17
docs/doc_examples/9dfe3b02bd15409b4b8b36e9756e8f94.asciidoc
Normal file
17
docs/doc_examples/9dfe3b02bd15409b4b8b36e9756e8f94.asciidoc
Normal file
@ -0,0 +1,17 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "my-index",
|
||||
id: "lake_tahoe",
|
||||
document: {
|
||||
inference_field: [
|
||||
"Lake Tahoe is the largest alpine lake in North America",
|
||||
"When hiking in the area, please be on alert for bears",
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
28
docs/doc_examples/a1dda7e7c01be96a4acf7b725d70385f.asciidoc
Normal file
28
docs/doc_examples/a1dda7e7c01be96a4acf7b725d70385f.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "index",
|
||||
retriever: {
|
||||
text_similarity_reranker: {
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
match_phrase: {
|
||||
text: "landmark in Paris",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
field: "text",
|
||||
inference_id: "my-cohere-rerank-model",
|
||||
inference_text: "Most famous landmark in Paris",
|
||||
rank_window_size: 100,
|
||||
min_score: 0.5,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
32
docs/doc_examples/a3646b59da66b9ab68bdbc8dc2e6a9be.asciidoc
Normal file
32
docs/doc_examples/a3646b59da66b9ab68bdbc8dc2e6a9be.asciidoc
Normal file
@ -0,0 +1,32 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "restaurants",
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match: {
|
||||
region: "Austria",
|
||||
},
|
||||
},
|
||||
],
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
year: "2019",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "alibabacloud-ai-search-embeddings",
|
||||
pipeline: "alibabacloud_ai_search_embeddings",
|
||||
pipeline: "alibabacloud_ai_search_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "azure_openai_embeddings",
|
||||
id: "azure_openai_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.pendingTasks();
|
||||
console.log(response);
|
||||
----
|
||||
@ -8,7 +8,7 @@ const response = await client.search({
|
||||
day_of_week: {
|
||||
type: "keyword",
|
||||
script:
|
||||
"\n emit(doc['timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ROOT))\n ",
|
||||
"\n emit(doc['timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ENGLISH))\n ",
|
||||
},
|
||||
},
|
||||
size: 0,
|
||||
@ -7,14 +7,10 @@ const response = await client.indices.create({
|
||||
index: "semantic-embeddings",
|
||||
mappings: {
|
||||
properties: {
|
||||
semantic_text: {
|
||||
content: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
content: {
|
||||
type: "text",
|
||||
copy_to: "semantic_text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "azure-openai-embeddings",
|
||||
pipeline: "azure_openai_embeddings",
|
||||
pipeline: "azure_openai_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
68
docs/doc_examples/dbb8fa2b8af6db66cf75ca4b83c0fb21.asciidoc
Normal file
68
docs/doc_examples/dbb8fa2b8af6db66cf75ca4b83c0fb21.asciidoc
Normal file
@ -0,0 +1,68 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000002",
|
||||
mappings: {
|
||||
properties: {
|
||||
metrics: {
|
||||
type: "object",
|
||||
subobjects: "auto",
|
||||
properties: {
|
||||
inner: {
|
||||
type: "object",
|
||||
enabled: false,
|
||||
},
|
||||
nested: {
|
||||
type: "nested",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000002",
|
||||
id: "metric_1",
|
||||
document: {
|
||||
"metrics.time": 100,
|
||||
"metrics.time.min": 10,
|
||||
"metrics.time.max": 900,
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.index({
|
||||
index: "my-index-000002",
|
||||
id: "metric_2",
|
||||
document: {
|
||||
metrics: {
|
||||
time: 100,
|
||||
"time.min": 10,
|
||||
"time.max": 900,
|
||||
inner: {
|
||||
foo: "bar",
|
||||
"path.to.some.field": "baz",
|
||||
},
|
||||
nested: [
|
||||
{
|
||||
id: 10,
|
||||
},
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
|
||||
const response3 = await client.indices.getMapping({
|
||||
index: "my-index-000002",
|
||||
});
|
||||
console.log(response3);
|
||||
----
|
||||
@ -33,6 +33,25 @@ const response = await client.simulate.ingest({
|
||||
],
|
||||
},
|
||||
},
|
||||
component_template_substitutions: {
|
||||
"my-component-template": {
|
||||
template: {
|
||||
mappings: {
|
||||
dynamic: "true",
|
||||
properties: {
|
||||
field3: {
|
||||
type: "keyword",
|
||||
},
|
||||
},
|
||||
},
|
||||
settings: {
|
||||
index: {
|
||||
default_pipeline: "my-pipeline",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "mistral_embeddings",
|
||||
id: "mistral_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
28
docs/doc_examples/e017c2de6f93a8dd97f5c6e002dd5c4f.asciidoc
Normal file
28
docs/doc_examples/e017c2de6f93a8dd97f5c6e002dd5c4f.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ml.postCalendarEvents({
|
||||
calendar_id: "dst-germany",
|
||||
events: [
|
||||
{
|
||||
description: "Fall 2024",
|
||||
start_time: 1729994400000,
|
||||
end_time: 1730167200000,
|
||||
skip_result: false,
|
||||
skip_model_update: false,
|
||||
force_time_shift: -3600,
|
||||
},
|
||||
{
|
||||
description: "Spring 2025",
|
||||
start_time: 1743296400000,
|
||||
end_time: 1743469200000,
|
||||
skip_result: false,
|
||||
skip_model_update: false,
|
||||
force_time_shift: 3600,
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
13
docs/doc_examples/e04267ffc50d916800b919c6cdc9622a.asciidoc
Normal file
13
docs/doc_examples/e04267ffc50d916800b919c6cdc9622a.asciidoc
Normal file
@ -0,0 +1,13 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
"index.mapping.ignore_above": 256,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/e3fe842951dc873d7d00c8f6a010c53f.asciidoc
Normal file
12
docs/doc_examples/e3fe842951dc873d7d00c8f6a010c53f.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
actions: "indices:data/write/search",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
44
docs/doc_examples/e4b38973c74037335378d8480f1ce894.asciidoc
Normal file
44
docs/doc_examples/e4b38973c74037335378d8480f1ce894.asciidoc
Normal file
@ -0,0 +1,44 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.simulate.ingest({
|
||||
body: {
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "foo",
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
bar: "rab",
|
||||
},
|
||||
},
|
||||
],
|
||||
component_template_substitutions: {
|
||||
"my-mappings_template": {
|
||||
template: {
|
||||
mappings: {
|
||||
dynamic: "strict",
|
||||
properties: {
|
||||
foo: {
|
||||
type: "keyword",
|
||||
},
|
||||
bar: {
|
||||
type: "keyword",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,7 +5,7 @@
|
||||
----
|
||||
const response = await client.cat.threadPool({
|
||||
v: "true",
|
||||
h: "id,name,active,rejected,completed",
|
||||
h: "id,name,queue,active,rejected,completed",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -12,7 +12,7 @@ const response = await client.indices.create({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -11,7 +11,7 @@ const response = await client.indices.create({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "amazon_bedrock_embeddings",
|
||||
id: "amazon_bedrock_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -11,7 +11,7 @@ const response = await client.search({
|
||||
filter: {
|
||||
range: {
|
||||
price: {
|
||||
to: "500",
|
||||
lte: "500",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "elser_embeddings",
|
||||
id: "elser_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
Reference in New Issue
Block a user