Compare commits
30 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 394dc8d9a5 | |||
| d542c069e8 | |||
| 11f12b48e1 | |||
| 3196bf6d1c | |||
| 6bbc0dacdd | |||
| e93b7495c0 | |||
| c69fbac3ab | |||
| 7d4ae5e3dd | |||
| 529598b43f | |||
| ec61d1afb1 | |||
| 05e140ca16 | |||
| b050185399 | |||
| ce6db97a71 | |||
| 5d4bd654a3 | |||
| 43175e1a2b | |||
| fc80b3247d | |||
| e47b135e8d | |||
| 4afa601051 | |||
| c6544c2979 | |||
| fb1a42cadc | |||
| e3863d7b77 | |||
| 424cc94458 | |||
| 7aca5cf652 | |||
| a8927727b1 | |||
| 15a450eba4 | |||
| 8f028a522a | |||
| 411f379006 | |||
| 242b4227ee | |||
| 78332da539 | |||
| 62b2d78b15 |
@ -125,6 +125,13 @@ async function codegen (args) {
|
||||
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
|
||||
await $`npm run build`
|
||||
|
||||
// run docs example generation
|
||||
if (version === 'main') {
|
||||
await $`node ./scripts/generate-docs-examples.js`
|
||||
} else {
|
||||
await $`node ./scripts/generate-docs-examples.js ${version.split('.').slice(0, 2).join('.')}`
|
||||
}
|
||||
}
|
||||
|
||||
function onError (err) {
|
||||
|
||||
@ -5,3 +5,4 @@ elasticsearch
|
||||
.git
|
||||
lib
|
||||
junit-output
|
||||
.tap
|
||||
|
||||
12
.github/make.sh
vendored
12
.github/make.sh
vendored
@ -150,7 +150,7 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
|
||||
--volume "$(realpath "$repo/../elastic-client-generator-js"):/usr/src/elastic-client-generator-js" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
@ -159,6 +159,14 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
else
|
||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||
|
||||
# determine branch to clone
|
||||
GENERATOR_BRANCH="main"
|
||||
if [[ "$VERSION" == 8.* ]]; then
|
||||
GENERATOR_BRANCH="8.x"
|
||||
fi
|
||||
echo -e "\033[34;1mINFO: Generator branch: $GENERATOR_BRANCH"
|
||||
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
@ -168,7 +176,7 @@ else
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
|
||||
18
.github/workflows/auto-merge.yml
vendored
Normal file
18
.github/workflows/auto-merge.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
name: Automerge
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
types:
|
||||
- submitted
|
||||
|
||||
jobs:
|
||||
automerge:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.review.state == 'approved'
|
||||
steps:
|
||||
- uses: reitermarkus/automerge@v2
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
merge-method: squash
|
||||
pull-request-author-associations: OWNER
|
||||
review-author-associations: OWNER,CONTRIBUTOR
|
||||
2
.github/workflows/nodejs.yml
vendored
2
.github/workflows/nodejs.yml
vendored
@ -2,7 +2,7 @@
|
||||
name: Node CI
|
||||
|
||||
on:
|
||||
pull_request_target: {}
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
paths-filter:
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -64,3 +64,7 @@ test/bundlers/parcel-test/.parcel-cache
|
||||
|
||||
lib
|
||||
junit-output
|
||||
bun.lockb
|
||||
test-results
|
||||
processinfo
|
||||
.tap
|
||||
|
||||
@ -72,3 +72,5 @@ CODE_OF_CONDUCT.md
|
||||
CONTRIBUTING.md
|
||||
|
||||
src
|
||||
bun.lockb
|
||||
.tap
|
||||
|
||||
@ -1,6 +1,44 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.15.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Drop testing artifacts from npm package
|
||||
|
||||
Tap, the unit testing tool, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.0
|
||||
|
||||
|
||||
@ -2,10 +2,11 @@
|
||||
== Configuration
|
||||
|
||||
|
||||
The client is designed to be easily configured for your needs. In the following
|
||||
The client is designed to be easily configured for your needs. In the following
|
||||
section, you can see the possible options that you can use to configure it.
|
||||
|
||||
* <<basic-config>>
|
||||
* <<advanced-config>>
|
||||
* <<timeout-best-practices>>
|
||||
* <<child>>
|
||||
* <<client-testing>>
|
||||
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_inference/text_embedding/my-e5-model",
|
||||
body: {
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "my-e5-model",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service_settings: {
|
||||
num_allocations: 1,
|
||||
|
||||
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
@ -0,0 +1,46 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
range: {
|
||||
year: {
|
||||
gt: 2023,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "elastic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
aggs: {
|
||||
topics: {
|
||||
terms: {
|
||||
field: "topic",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "my-rerank-model",
|
||||
inference_config: {
|
||||
service: "cohere",
|
||||
service_settings: {
|
||||
model_id: "rerank-english-v3.0",
|
||||
api_key: "{{COHERE_API_KEY}}",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,10 +5,16 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
kwd: {
|
||||
type: "keyword",
|
||||
@ -26,7 +26,7 @@ const response1 = await client.cluster.putComponentTemplate({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
actions: "indices:data/write/bulk",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,13 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_connector/my-connector/_api_key_id",
|
||||
body: {
|
||||
api_key_id: "my-api-key-id",
|
||||
api_key_secret_id: "my-connector-secret-id",
|
||||
},
|
||||
const response = await client.connector.updateApiKeyId({
|
||||
connector_id: "my-connector",
|
||||
api_key_id: "my-api-key-id",
|
||||
api_key_secret_id: "my-connector-secret-id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_inference/text_embedding/google_vertex_ai_embeddings",
|
||||
body: {
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "google_vertex_ai_embeddings",
|
||||
inference_config: {
|
||||
service: "googlevertexai",
|
||||
service_settings: {
|
||||
service_account_json: "<service_account_json>",
|
||||
|
||||
21
docs/doc_examples/05e637284bc3bedd46e0b7c26ad983c4.asciidoc
Normal file
21
docs/doc_examples/05e637284bc3bedd46e0b7c26ad983c4.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "alibabacloud_ai_search_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
model_id: "alibabacloud_ai_search_embeddings",
|
||||
input_output: {
|
||||
input_field: "content",
|
||||
output_field: "content_embedding",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,6 +3,6 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.simulate.ingest({});
|
||||
const response = await client.cluster.reroute();
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.hotThreads({
|
||||
node_id: "my-node,my-other-node",
|
||||
const response = await client.indices.getMapping({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,6 +11,8 @@ const response = await client.indices.putSettings({
|
||||
"index.indexing.slowlog.threshold.index.debug": "2s",
|
||||
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
||||
"index.indexing.slowlog.source": "1000",
|
||||
"index.indexing.slowlog.reformat": true,
|
||||
"index.indexing.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
42
docs/doc_examples/082e78c7a2061a7c4a52b494e5ede0e8.asciidoc
Normal file
42
docs/doc_examples/082e78c7a2061a7c4a52b494e5ede0e8.asciidoc
Normal file
@ -0,0 +1,42 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
23
docs/doc_examples/083b92e8ea264e49bf9fd40fc6a3094b.asciidoc
Normal file
23
docs/doc_examples/083b92e8ea264e49bf9fd40fc6a3094b.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "my-e5-model",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service_settings: {
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
num_threads: 1,
|
||||
model_id: ".multilingual-e5-small",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_inference/text_embedding/amazon_bedrock_embeddings",
|
||||
body: {
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "amazon_bedrock_embeddings",
|
||||
inference_config: {
|
||||
service: "amazonbedrock",
|
||||
service_settings: {
|
||||
access_key: "<aws_access_key>",
|
||||
|
||||
@ -3,29 +3,26 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_connector/my-g-drive-connector/_filtering",
|
||||
body: {
|
||||
rules: [
|
||||
{
|
||||
field: "file_extension",
|
||||
id: "exclude-txt-files",
|
||||
order: 0,
|
||||
policy: "exclude",
|
||||
rule: "equals",
|
||||
value: "txt",
|
||||
},
|
||||
{
|
||||
field: "_",
|
||||
id: "DEFAULT",
|
||||
order: 1,
|
||||
policy: "include",
|
||||
rule: "regex",
|
||||
value: ".*",
|
||||
},
|
||||
],
|
||||
},
|
||||
const response = await client.connector.updateFiltering({
|
||||
connector_id: "my-g-drive-connector",
|
||||
rules: [
|
||||
{
|
||||
field: "file_extension",
|
||||
id: "exclude-txt-files",
|
||||
order: 0,
|
||||
policy: "exclude",
|
||||
rule: "equals",
|
||||
value: "txt",
|
||||
},
|
||||
{
|
||||
field: "_",
|
||||
id: "DEFAULT",
|
||||
order: 1,
|
||||
policy: "include",
|
||||
rule: "regex",
|
||||
value: ".*",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
49
docs/doc_examples/0bc6155e0c88062a4d8490da49db3aa8.asciidoc
Normal file
49
docs/doc_examples/0bc6155e0c88062a4d8490da49db3aa8.asciidoc
Normal file
@ -0,0 +1,49 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example_nested",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
nested: {
|
||||
path: "nested_field",
|
||||
inner_hits: {
|
||||
name: "nested_vector",
|
||||
_source: false,
|
||||
fields: ["nested_field.paragraph_id"],
|
||||
},
|
||||
query: {
|
||||
knn: {
|
||||
field: "nested_field.nested_vector",
|
||||
query_vector: [1, 0, 0.5],
|
||||
k: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "ai",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: ["topic"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/0bee07a581c5776e068f6f4efad5a399.asciidoc
Normal file
19
docs/doc_examples/0bee07a581c5776e068f6f4efad5a399.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_query/async",
|
||||
querystring: {
|
||||
format: "json",
|
||||
},
|
||||
body: {
|
||||
query:
|
||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: false,
|
||||
remove_binary: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
18
docs/doc_examples/0c8be7aec84ea86b243904f5d4162f5a.asciidoc
Normal file
18
docs/doc_examples/0c8be7aec84ea86b243904f5d4162f5a.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "cooking_blog",
|
||||
query: {
|
||||
match: {
|
||||
title: {
|
||||
query: "fluffy pancakes breakfast",
|
||||
minimum_should_match: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
15
docs/doc_examples/0d30077cd34e93377a3a86f2ebd69415.asciidoc
Normal file
15
docs/doc_examples/0d30077cd34e93377a3a86f2ebd69415.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.connector.put({
|
||||
connector_id: "my-connector",
|
||||
index_name: "search-google-drive",
|
||||
name: "My Connector",
|
||||
description: "My Connector to sync data to Elastic index from Google Drive",
|
||||
service_type: "google_drive",
|
||||
language: "en",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
57
docs/doc_examples/0d689ac6e78be5d438f9b5d441be2b44.asciidoc
Normal file
57
docs/doc_examples/0d689ac6e78be5d438f9b5d441be2b44.asciidoc
Normal file
@ -0,0 +1,57 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "elastic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query:
|
||||
"(information retrieval) OR (artificial intelligence)",
|
||||
default_field: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
size: 1,
|
||||
explain: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,24 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.searchApplication.renderQuery({
|
||||
name: "my-app",
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_application/search_application/my-app/_render_query",
|
||||
body: {
|
||||
params: {
|
||||
query_string: "my first query",
|
||||
text_fields: [
|
||||
{
|
||||
name: "title",
|
||||
boost: 5,
|
||||
},
|
||||
{
|
||||
name: "description",
|
||||
boost: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
21
docs/doc_examples/0e31b8ad176b31028becf9500989bcbd.asciidoc
Normal file
21
docs/doc_examples/0e31b8ad176b31028becf9500989bcbd.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "watsonx-embeddings",
|
||||
inference_config: {
|
||||
service: "watsonxai",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
url: "<url>",
|
||||
model_id: "ibm/slate-30m-english-rtrvr",
|
||||
project_id: "<project_id>",
|
||||
api_version: "2024-03-14",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "100gb",
|
||||
"cluster.routing.allocation.disk.watermark.high": "50gb",
|
||||
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
|
||||
"cluster.info.update.interval": "1m",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/0fbca60a487f5f22a4d51d73b2434cc4.asciidoc
Normal file
20
docs/doc_examples/0fbca60a487f5f22a4d51d73b2434cc4.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "elser-embeddings",
|
||||
mappings: {
|
||||
properties: {
|
||||
content_embedding: {
|
||||
type: "sparse_vector",
|
||||
},
|
||||
content: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,6 +5,11 @@
|
||||
----
|
||||
const response = await client.security.queryUser({
|
||||
with_profile_uid: "true",
|
||||
query: {
|
||||
prefix: {
|
||||
roles: "other",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
15
docs/doc_examples/11be807bdeaeecc8174dec88e0851ea7.asciidoc
Normal file
15
docs/doc_examples/11be807bdeaeecc8174dec88e0851ea7.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "GET",
|
||||
path: "/_connector/_sync_job",
|
||||
querystring: {
|
||||
connector_id: "my-connector-id",
|
||||
size: "1",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/120fcf9f55128d6a81d5e87a9c235bbd.asciidoc
Normal file
20
docs/doc_examples/120fcf9f55128d6a81d5e87a9c235bbd.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_inference/chat_completion/openai-completion/_stream",
|
||||
body: {
|
||||
model: "gpt-4o",
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "What is Elastic?",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
18
docs/doc_examples/12e9e758f7f18a6cbf27e9d0aea57a19.asciidoc
Normal file
18
docs/doc_examples/12e9e758f7f18a6cbf27e9d0aea57a19.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.update({
|
||||
index: ".elastic-connectors",
|
||||
id: "connector_id",
|
||||
doc: {
|
||||
features: {
|
||||
native_connector_api_keys: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,13 +3,11 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_inference/sparse_embedding/my-elser-model",
|
||||
body: {
|
||||
input:
|
||||
"The sky above the port was the color of television tuned to a dead channel.",
|
||||
},
|
||||
const response = await client.inference.inference({
|
||||
task_type: "sparse_embedding",
|
||||
inference_id: "my-elser-model",
|
||||
input:
|
||||
"The sky above the port was the color of television tuned to a dead channel.",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
23
docs/doc_examples/13fe12cdb73bc89f07a83f1e6b127511.asciidoc
Normal file
23
docs/doc_examples/13fe12cdb73bc89f07a83f1e6b127511.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "google-vertex-ai-embeddings",
|
||||
mappings: {
|
||||
properties: {
|
||||
content_embedding: {
|
||||
type: "dense_vector",
|
||||
dims: 768,
|
||||
element_type: "float",
|
||||
similarity: "dot_product",
|
||||
},
|
||||
content: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
16
docs/doc_examples/141ef0ebaa3b0772892b79b9bb85efb0.asciidoc
Normal file
16
docs/doc_examples/141ef0ebaa3b0772892b79b9bb85efb0.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "my-inference-endpoint",
|
||||
inference_id: "_update",
|
||||
inference_config: {
|
||||
service_settings: {
|
||||
api_key: "<API_KEY>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/14a33c364873c2f930ca83d0a3005389.asciidoc
Normal file
12
docs/doc_examples/14a33c364873c2f930ca83d0a3005389.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.allocationExplain({
|
||||
index: "my-index",
|
||||
shard: 0,
|
||||
primary: false,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/1522a9297151d7046e6345b9b27539ca.asciidoc
Normal file
19
docs/doc_examples/1522a9297151d7046e6345b9b27539ca.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.connector.updateConfiguration({
|
||||
connector_id: "my-connector-id",
|
||||
values: {
|
||||
host: "127.0.0.1",
|
||||
port: 5432,
|
||||
username: "myuser",
|
||||
password: "mypassword",
|
||||
database: "chinook",
|
||||
schema: "public",
|
||||
tables: "album,artist",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
28
docs/doc_examples/15f769bbd7b5fddeb3353ae726b71b14.asciidoc
Normal file
28
docs/doc_examples/15f769bbd7b5fddeb3353ae726b71b14.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-bit-vectors",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "dotProduct(params.query_vector, 'my_dense_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
0.23, 1.45, 3.67, 4.89, -0.56, 2.34, 3.21, 1.78, -2.45, 0.98, -0.12,
|
||||
3.45, 4.56, 2.78, 1.23, 0.67, 3.89, 4.12, -2.34, 1.56, 0.78, 3.21,
|
||||
4.12, 2.45, -1.67, 0.34, -3.45, 4.56, -2.78, 1.23, -0.67, 3.89,
|
||||
-4.34, 2.12, -1.56, 0.78, -3.21, 4.45, 2.12, 1.67,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
||||
script: {
|
||||
lang: "mustache",
|
||||
source:
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
params: {
|
||||
query: "",
|
||||
_es_filters: {},
|
||||
|
||||
15
docs/doc_examples/16a7ce08b4a6b3af269f27eecc71d664.asciidoc
Normal file
15
docs/doc_examples/16a7ce08b4a6b3af269f27eecc71d664.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.delete({
|
||||
index: "books",
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.indices.delete({
|
||||
index: "my-explicit-mappings-books",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
26
docs/doc_examples/17316a81c9dbdd120b7754116bf0461c.asciidoc
Normal file
26
docs/doc_examples/17316a81c9dbdd120b7754116bf0461c.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.createApiKey({
|
||||
name: "my-connector-api-key",
|
||||
role_descriptors: {
|
||||
"my-connector-connector-role": {
|
||||
cluster: ["monitor", "manage_connector"],
|
||||
indices: [
|
||||
{
|
||||
names: [
|
||||
"my-index_name",
|
||||
".search-acl-filter-my-index_name",
|
||||
".elastic-connectors*",
|
||||
],
|
||||
privileges: ["all"],
|
||||
allow_restricted_indices: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -16,7 +16,7 @@ const response = await client.search({
|
||||
},
|
||||
},
|
||||
field: "text",
|
||||
inference_id: "my-cohere-rerank-model",
|
||||
inference_id: "elastic-rerank",
|
||||
inference_text: "How often does the moon hide the sun?",
|
||||
rank_window_size: 100,
|
||||
min_score: 0.5,
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.reroute({
|
||||
metric: "none",
|
||||
const response = await client.indices.create({
|
||||
index: "books",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,6 +3,27 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.simulate.ingest({});
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_ingest/_simulate",
|
||||
body: {
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
foo: "rab",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_inference/text_embedding/openai_embeddings",
|
||||
body: {
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "openai_embeddings",
|
||||
inference_config: {
|
||||
service: "openai",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
|
||||
19
docs/doc_examples/1a7483796087053ba55029d0dc2ab356.asciidoc
Normal file
19
docs/doc_examples/1a7483796087053ba55029d0dc2ab356.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "mv",
|
||||
refresh: "true",
|
||||
document: {
|
||||
a: [2, null, 1],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.esql.query({
|
||||
query: "FROM mv | LIMIT 1",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_inference/rerank/google_vertex_ai_rerank",
|
||||
body: {
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "google_vertex_ai_rerank",
|
||||
inference_config: {
|
||||
service: "googlevertexai",
|
||||
service_settings: {
|
||||
service_account_json: "<service_account_json>",
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "hugging_face_embeddings",
|
||||
id: "hugging_face_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.delete({
|
||||
index: "my-index",
|
||||
index: "music",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
21
docs/doc_examples/1cbecd19be22979aefb45b4f160e77ea.asciidoc
Normal file
21
docs/doc_examples/1cbecd19be22979aefb45b4f160e77ea.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "google_vertex_ai_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
model_id: "google_vertex_ai_embeddings",
|
||||
input_output: {
|
||||
input_field: "content",
|
||||
output_field: "content_embedding",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_inference/text_embedding/azure_ai_studio_embeddings",
|
||||
body: {
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "azure_ai_studio_embeddings",
|
||||
inference_config: {
|
||||
service: "azureaistudio",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
|
||||
@ -3,12 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "GET",
|
||||
path: "/_connector",
|
||||
querystring: {
|
||||
service_type: "sharepoint_online",
|
||||
},
|
||||
const response = await client.connector.list({
|
||||
service_type: "sharepoint_online",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
33
docs/doc_examples/1fb2c77c0988bc6545040b20e3afa7e9.asciidoc
Normal file
33
docs/doc_examples/1fb2c77c0988bc6545040b20e3afa7e9.asciidoc
Normal file
@ -0,0 +1,33 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.createApiKey({
|
||||
name: "john-api-key",
|
||||
expiration: "1d",
|
||||
role_descriptors: {
|
||||
"sharepoint-online-role": {
|
||||
index: [
|
||||
{
|
||||
names: ["sharepoint-search-application"],
|
||||
privileges: ["read"],
|
||||
query: {
|
||||
template: {
|
||||
params: {
|
||||
access_control: ["john@example.co", "Engineering Members"],
|
||||
},
|
||||
source:
|
||||
'\n {\n "bool": {\n "should": [\n {\n "bool": {\n "must_not": {\n "exists": {\n "field": "_allow_access_control"\n }\n }\n }\n },\n {\n "terms": {\n "_allow_access_control.enum": {{#toJson}}access_control{{/toJson}}\n }\n }\n ]\n }\n }\n ',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
restriction: {
|
||||
workflows: ["search_application_query"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
22
docs/doc_examples/20179a8889e949d6a8ee5fbf2ba35c96.asciidoc
Normal file
22
docs/doc_examples/20179a8889e949d6a8ee5fbf2ba35c96.asciidoc
Normal file
@ -0,0 +1,22 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "google-vertex-ai-embeddings",
|
||||
knn: {
|
||||
field: "content_embedding",
|
||||
query_vector_builder: {
|
||||
text_embedding: {
|
||||
model_id: "google_vertex_ai_embeddings",
|
||||
model_text: "Calculate fuel cost",
|
||||
},
|
||||
},
|
||||
k: 10,
|
||||
num_candidates: 100,
|
||||
},
|
||||
_source: ["id", "content"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,7 +5,7 @@
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "geoip",
|
||||
description: "Add geoip info",
|
||||
description: "Add ip geolocation info",
|
||||
processors: [
|
||||
{
|
||||
geoip: {
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_inference/text_embedding/azure_ai_studio_embeddings",
|
||||
body: {
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "azure_ai_studio_embeddings",
|
||||
inference_config: {
|
||||
service: "azureaistudio",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
|
||||
@ -5,10 +5,16 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
card: {
|
||||
type: "wildcard",
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.getSettings({
|
||||
flat_settings: "true",
|
||||
const response = await client.indices.rollover({
|
||||
alias: "datastream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
67
docs/doc_examples/246763219ec06172f7aa57bba28d344a.asciidoc
Normal file
67
docs/doc_examples/246763219ec06172f7aa57bba28d344a.asciidoc
Normal file
@ -0,0 +1,67 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-rank-vectors-bit",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[
|
||||
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
|
||||
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
|
||||
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
|
||||
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
@ -10,7 +10,7 @@ const response = await client.search({
|
||||
"date.day_of_week": {
|
||||
type: "keyword",
|
||||
script:
|
||||
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
||||
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
@ -1,28 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.query({
|
||||
format: "txt",
|
||||
query:
|
||||
"\n FROM library\n | SORT page_count DESC\n | KEEP name, author\n | LOOKUP era ON author\n | LIMIT 5\n ",
|
||||
tables: {
|
||||
era: {
|
||||
author: {
|
||||
keyword: [
|
||||
"Frank Herbert",
|
||||
"Peter F. Hamilton",
|
||||
"Vernor Vinge",
|
||||
"Alastair Reynolds",
|
||||
"James S.A. Corey",
|
||||
],
|
||||
},
|
||||
era: {
|
||||
keyword: ["The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "azure-ai-studio-embeddings",
|
||||
pipeline: "azure_ai_studio_embeddings",
|
||||
pipeline: "azure_ai_studio_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -3,6 +3,14 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.oidcLogout({});
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_security/oidc/logout",
|
||||
body: {
|
||||
token:
|
||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
26
docs/doc_examples/2a21674c40f9b182a8944769d20b2357.asciidoc
Normal file
26
docs/doc_examples/2a21674c40f9b182a8944769d20b2357.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-rank-vectors-float",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[0.5, 10, 6],
|
||||
[-0.5, 10, 10],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
35
docs/doc_examples/2a67608dadbf220a2f040f3a79d3677d.asciidoc
Normal file
35
docs/doc_examples/2a67608dadbf220a2f040f3a79d3677d.asciidoc
Normal file
@ -0,0 +1,35 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "attachment",
|
||||
description: "Extract attachment information including original binary",
|
||||
processors: [
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
pipeline: "attachment",
|
||||
document: {
|
||||
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.get({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
10
docs/doc_examples/2acf75803494fef29f9ca70671aa6be1.asciidoc
Normal file
10
docs/doc_examples/2acf75803494fef29f9ca70671aa6be1.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.bulkDeleteRole({
|
||||
names: ["my_admin_role", "superuser"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,9 +3,12 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.asyncQueryGet({
|
||||
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||
wait_for_completion_timeout: "30s",
|
||||
const response = await client.transport.request({
|
||||
method: "GET",
|
||||
path: "/_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||
querystring: {
|
||||
wait_for_completion_timeout: "30s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
77
docs/doc_examples/2c079d1ae4819a0c206b9e1aa5623523.asciidoc
Normal file
77
docs/doc_examples/2c079d1ae4819a0c206b9e1aa5623523.asciidoc
Normal file
@ -0,0 +1,77 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
mappings: {
|
||||
properties: {
|
||||
attributes: {
|
||||
type: "passthrough",
|
||||
priority: 10,
|
||||
properties: {
|
||||
id: {
|
||||
type: "keyword",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: 1,
|
||||
document: {
|
||||
attributes: {
|
||||
id: "foo",
|
||||
zone: 10,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
id: "foo",
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
zone: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
|
||||
const response3 = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
"attributes.id": "foo",
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
"attributes.zone": 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response3);
|
||||
----
|
||||
28
docs/doc_examples/2c86840a46242a38cf82024a9321be46.asciidoc
Normal file
28
docs/doc_examples/2c86840a46242a38cf82024a9321be46.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-explicit-mappings-books",
|
||||
mappings: {
|
||||
dynamic: false,
|
||||
properties: {
|
||||
name: {
|
||||
type: "text",
|
||||
},
|
||||
author: {
|
||||
type: "text",
|
||||
},
|
||||
release_date: {
|
||||
type: "date",
|
||||
format: "yyyy-MM-dd",
|
||||
},
|
||||
page_count: {
|
||||
type: "integer",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
25
docs/doc_examples/2d0244c020075595acb625aa5ba8f455.asciidoc
Normal file
25
docs/doc_examples/2d0244c020075595acb625aa5ba8f455.asciidoc
Normal file
@ -0,0 +1,25 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "idx_keep",
|
||||
id: 1,
|
||||
document: {
|
||||
path: {
|
||||
to: [
|
||||
{
|
||||
foo: [3, 2, 1],
|
||||
},
|
||||
{
|
||||
foo: [30, 20, 10],
|
||||
},
|
||||
],
|
||||
bar: "baz",
|
||||
},
|
||||
ids: [200, 100, 300, 100],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
16
docs/doc_examples/2e09666d3ad5ad9afc22763ee6e97a2b.asciidoc
Normal file
16
docs/doc_examples/2e09666d3ad5ad9afc22763ee6e97a2b.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.slm.putLifecycle({
|
||||
policy_id: "hourly-snapshots",
|
||||
schedule: "1h",
|
||||
name: "<hourly-snap-{now/d}>",
|
||||
repository: "my_repository",
|
||||
config: {
|
||||
indices: ["data-*", "important"],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
43
docs/doc_examples/2e7844477b41fcfa9efefee4ec0e7101.asciidoc
Normal file
43
docs/doc_examples/2e7844477b41fcfa9efefee4ec0e7101.asciidoc
Normal file
@ -0,0 +1,43 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
retriever: {
|
||||
rule: {
|
||||
match_criteria: {
|
||||
query_string: "puggles",
|
||||
user_country: "us",
|
||||
},
|
||||
ruleset_ids: ["my-ruleset"],
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query: "pugs",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query: "puggles",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
23
docs/doc_examples/30d051f534aeb884176eedb2c11dac85.asciidoc
Normal file
23
docs/doc_examples/30d051f534aeb884176eedb2c11dac85.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "my-elastic-rerank",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service_settings: {
|
||||
model_id: ".rerank-v1",
|
||||
num_threads: 1,
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 1,
|
||||
max_number_of_allocations: 4,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
25
docs/doc_examples/30fa37c9575fe81a0ea7c12cfc08e277.asciidoc
Normal file
25
docs/doc_examples/30fa37c9575fe81a0ea7c12cfc08e277.asciidoc
Normal file
@ -0,0 +1,25 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "bad_example_index",
|
||||
mappings: {
|
||||
properties: {
|
||||
field_1: {
|
||||
type: "text",
|
||||
copy_to: "field_2",
|
||||
},
|
||||
field_2: {
|
||||
type: "text",
|
||||
copy_to: "field_3",
|
||||
},
|
||||
field_3: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "azure_ai_studio_embeddings",
|
||||
id: "azure_ai_studio_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
@ -14,6 +14,7 @@ const response = await client.indices.putSettings({
|
||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||
"index.search.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -5,7 +5,7 @@
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "geoip",
|
||||
description: "Add geoip info",
|
||||
description: "Add ip geolocation info",
|
||||
processors: [
|
||||
{
|
||||
geoip: {
|
||||
52
docs/doc_examples/339c4e5af9f9069ad9912aa574488b59.asciidoc
Normal file
52
docs/doc_examples/339c4e5af9f9069ad9912aa574488b59.asciidoc
Normal file
@ -0,0 +1,52 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-bit-vectors",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_dense_vector: {
|
||||
type: "dense_vector",
|
||||
index: false,
|
||||
element_type: "bit",
|
||||
dims: 40,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-bit-vectors",
|
||||
id: 1,
|
||||
document: {
|
||||
my_dense_vector: [8, 5, -15, 1, -7],
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.index({
|
||||
index: "my-index-bit-vectors",
|
||||
id: 2,
|
||||
document: {
|
||||
my_dense_vector: [-1, 115, -3, 4, -128],
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
|
||||
const response3 = await client.index({
|
||||
index: "my-index-bit-vectors",
|
||||
id: 3,
|
||||
document: {
|
||||
my_dense_vector: [2, 18, -5, 0, -124],
|
||||
},
|
||||
});
|
||||
console.log(response3);
|
||||
|
||||
const response4 = await client.indices.refresh({
|
||||
index: "my-index-bit-vectors",
|
||||
});
|
||||
console.log(response4);
|
||||
----
|
||||
@ -3,14 +3,11 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_connector/my-connector",
|
||||
body: {
|
||||
index_name: "search-google-drive",
|
||||
name: "My Connector",
|
||||
service_type: "google_drive",
|
||||
},
|
||||
const response = await client.connector.put({
|
||||
connector_id: "my-connector",
|
||||
index_name: "search-google-drive",
|
||||
name: "My Connector",
|
||||
service_type: "google_drive",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "openai-embeddings",
|
||||
pipeline: "openai_embeddings",
|
||||
pipeline: "openai_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
67
docs/doc_examples/37f367ca81a16d3aef4ef7126ec33a2e.asciidoc
Normal file
67
docs/doc_examples/37f367ca81a16d3aef4ef7126ec33a2e.asciidoc
Normal file
@ -0,0 +1,67 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "movies",
|
||||
size: 10,
|
||||
retriever: {
|
||||
rescorer: {
|
||||
rescore: {
|
||||
query: {
|
||||
window_size: 50,
|
||||
rescore_query: {
|
||||
script_score: {
|
||||
script: {
|
||||
source:
|
||||
"cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0",
|
||||
params: {
|
||||
queryVector: [-0.5, 90, -10, 14.8, -156],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
retriever: {
|
||||
rrf: {
|
||||
rank_window_size: 100,
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "plot_embedding",
|
||||
inference_id: "my-elser-model",
|
||||
query: "films that explore psychological depths",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "crime",
|
||||
fields: ["plot", "title"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [10, 22, 77],
|
||||
k: 10,
|
||||
num_candidates: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
21
docs/doc_examples/398389933901b572a06a752bc780af7c.asciidoc
Normal file
21
docs/doc_examples/398389933901b572a06a752bc780af7c.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "completion",
|
||||
inference_id: "anthropic_completion",
|
||||
inference_config: {
|
||||
service: "anthropic",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
model_id: "<model_id>",
|
||||
},
|
||||
task_settings: {
|
||||
max_tokens: 1024,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.nodes.hotThreads();
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
||||
},
|
||||
dest: {
|
||||
index: "amazon-bedrock-embeddings",
|
||||
pipeline: "amazon_bedrock_embeddings",
|
||||
pipeline: "amazon_bedrock_embeddings_pipeline",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -29,6 +29,7 @@ const response = await client.indices.create({
|
||||
"arabic_normalization",
|
||||
"persian_normalization",
|
||||
"persian_stop",
|
||||
"persian_stem",
|
||||
],
|
||||
},
|
||||
},
|
||||
20
docs/doc_examples/3c0d0c38e1c819a35a68cdba5ae8ccc4.asciidoc
Normal file
20
docs/doc_examples/3c0d0c38e1c819a35a68cdba5ae8ccc4.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "alibabacloud_ai_search_embeddings",
|
||||
inference_config: {
|
||||
service: "alibabacloud-ai-search",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
service_id: "<service_id>",
|
||||
host: "<host>",
|
||||
workspace: "<workspace>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,15 +6,8 @@
|
||||
const response = await client.indices.updateAliases({
|
||||
actions: [
|
||||
{
|
||||
remove: {
|
||||
index: "my-index-000001",
|
||||
alias: "my-index",
|
||||
},
|
||||
},
|
||||
{
|
||||
add: {
|
||||
index: "my-index-000002",
|
||||
alias: "my-index",
|
||||
remove_index: {
|
||||
index: "my-index-2099.05.06-000001",
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -3,6 +3,14 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.asyncQuery({});
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_query/async",
|
||||
body: {
|
||||
query:
|
||||
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",
|
||||
wait_for_completion_timeout: "2s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
18
docs/doc_examples/3f9dcf2aa42f3ecfb5ebfe48c1774103.asciidoc
Normal file
18
docs/doc_examples/3f9dcf2aa42f3ecfb5ebfe48c1774103.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
order_stats: {
|
||||
stats: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,9 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.asyncQueryGet({
|
||||
id: "FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
|
||||
const response = await client.transport.request({
|
||||
method: "GET",
|
||||
path: "/_query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
16
docs/doc_examples/40f287bf733420bbab134b74c7d0ea5d.asciidoc
Normal file
16
docs/doc_examples/40f287bf733420bbab134b74c7d0ea5d.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "amazon-reviews",
|
||||
id: 1,
|
||||
document: {
|
||||
review_text:
|
||||
"This product is lifechanging! I'm telling all my friends about it.",
|
||||
review_vector: [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,16 +3,13 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_connector/my-connector/_pipeline",
|
||||
body: {
|
||||
pipeline: {
|
||||
extract_binary_content: true,
|
||||
name: "my-connector-pipeline",
|
||||
reduce_whitespace: true,
|
||||
run_ml_inference: true,
|
||||
},
|
||||
const response = await client.connector.updatePipeline({
|
||||
connector_id: "my-connector",
|
||||
pipeline: {
|
||||
extract_binary_content: true,
|
||||
name: "my-connector-pipeline",
|
||||
reduce_whitespace: true,
|
||||
run_ml_inference: true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user