Auto-generated code for 8.15 (#2363)
This commit is contained in:
@ -26,7 +26,7 @@ const response1 = await client.cluster.putComponentTemplate({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
actions: "indices:data/write/bulk",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "alibabacloud_ai_search_embeddings",
|
||||
id: "alibabacloud_ai_search_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "hugging_face_embeddings",
|
||||
id: "hugging_face_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "google_vertex_ai_embeddings",
|
||||
id: "google_vertex_ai_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
"date.day_of_week": {
|
||||
type: "keyword",
|
||||
script:
|
||||
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "azure-ai-studio-embeddings",
|
||||
pipeline: "azure_ai_studio_embeddings",
|
||||
pipeline: "azure_ai_studio_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "azure_ai_studio_embeddings",
|
||||
id: "azure_ai_studio_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "openai-embeddings",
|
||||
pipeline: "openai_embeddings",
|
||||
pipeline: "openai_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.hotThreads();
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "amazon-bedrock-embeddings",
|
||||
pipeline: "amazon_bedrock_embeddings",
|
||||
pipeline: "amazon_bedrock_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "openai_embeddings",
|
||||
id: "openai_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "elser-embeddings",
|
||||
pipeline: "elser_embeddings",
|
||||
pipeline: "elser_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
11
docs/doc_examples/4f6694ef147a73b1163bde3c13779d26.asciidoc
Normal file
11
docs/doc_examples/4f6694ef147a73b1163bde3c13779d26.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.stats({
|
||||
human: "true",
|
||||
filter_path: "nodes.*.indexing_pressure",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "cohere_embeddings",
|
||||
id: "cohere_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "hugging-face-embeddings",
|
||||
pipeline: "hugging_face_embeddings",
|
||||
pipeline: "hugging_face_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.hotThreads({
|
||||
node_id: "my-node,my-other-node",
|
||||
const response = await client.nodes.stats({
|
||||
metric: "breaker",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "google-vertex-ai-embeddings",
|
||||
pipeline: "google_vertex_ai_embeddings",
|
||||
pipeline: "google_vertex_ai_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "cohere-embeddings",
|
||||
pipeline: "cohere_embeddings",
|
||||
pipeline: "cohere_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
11
docs/doc_examples/84237aa9da49ab4b4c4e2b21d2548df2.asciidoc
Normal file
11
docs/doc_examples/84237aa9da49ab4b4c4e2b21d2548df2.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.snapshot.create({
|
||||
repository: "my_repository",
|
||||
snapshot: "_verify_integrity",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "mistral-embeddings",
|
||||
pipeline: "mistral_embeddings",
|
||||
pipeline: "mistral_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -7,7 +7,7 @@ const response = await client.search({
|
||||
index: "semantic-embeddings",
|
||||
query: {
|
||||
semantic: {
|
||||
field: "semantic_text",
|
||||
field: "content",
|
||||
query: "How to avoid muscle soreness while running?",
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
filter_path: "nodes.*.tasks",
|
||||
pretty: "true",
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "alibabacloud-ai-search-embeddings",
|
||||
pipeline: "alibabacloud_ai_search_embeddings",
|
||||
pipeline: "alibabacloud_ai_search_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "azure_openai_embeddings",
|
||||
id: "azure_openai_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.pendingTasks();
|
||||
console.log(response);
|
||||
----
|
||||
@ -8,7 +8,7 @@ const response = await client.search({
|
||||
day_of_week: {
|
||||
type: "keyword",
|
||||
script:
|
||||
"\n emit(doc['timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ROOT))\n ",
|
||||
"\n emit(doc['timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ENGLISH))\n ",
|
||||
},
|
||||
},
|
||||
size: 0,
|
||||
@ -7,14 +7,10 @@ const response = await client.indices.create({
|
||||
index: "semantic-embeddings",
|
||||
mappings: {
|
||||
properties: {
|
||||
semantic_text: {
|
||||
content: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
content: {
|
||||
type: "text",
|
||||
copy_to: "semantic_text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "azure-openai-embeddings",
|
||||
pipeline: "azure_openai_embeddings",
|
||||
pipeline: "azure_openai_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "mistral_embeddings",
|
||||
id: "mistral_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
12
docs/doc_examples/e3fe842951dc873d7d00c8f6a010c53f.asciidoc
Normal file
12
docs/doc_examples/e3fe842951dc873d7d00c8f6a010c53f.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
actions: "indices:data/write/search",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,7 +5,7 @@
|
||||
----
|
||||
const response = await client.cat.threadPool({
|
||||
v: "true",
|
||||
h: "id,name,active,rejected,completed",
|
||||
h: "id,name,queue,active,rejected,completed",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -12,7 +12,7 @@ const response = await client.indices.create({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -11,7 +11,7 @@ const response = await client.indices.create({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "amazon_bedrock_embeddings",
|
||||
id: "amazon_bedrock_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "elser_embeddings",
|
||||
id: "elser_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
Reference in New Issue
Block a user