Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 1ef1754623 | |||
| be2fe317f2 | |||
| 1d2d934b50 | |||
| d0f5ada03d | |||
| 47de886973 | |||
| 1e103baec1 | |||
| 7d5f622506 | |||
| 8377b58af3 | |||
| c150efbd21 | |||
| e7663aabde | |||
| c9615dc0ef | |||
| bb5fb24d73 | |||
| 38358e20ab | |||
| 9479d82644 | |||
| 18df52feb4 | |||
| f72f9e9a5a | |||
| c4151ceb35 | |||
| f3aedc7ad0 | |||
| 586c42161d | |||
| 9947b0e365 | |||
| 52b7264b45 |
@ -1,20 +1,17 @@
|
||||
---
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
memory: "8G"
|
||||
cpu: "2"
|
||||
|
||||
steps:
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }})"
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
env:
|
||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||
TEST_SUITE: "platinum"
|
||||
STACK_VERSION: 9.0.0
|
||||
GITHUB_TOKEN_PATH: "secret/ci/elastic-elasticsearch-js/github-token"
|
||||
TEST_ES_STACK: "1"
|
||||
TEST_SUITE: "{{ matrix.suite }}"
|
||||
STACK_VERSION: 8.16.0
|
||||
matrix:
|
||||
setup:
|
||||
suite:
|
||||
- "free"
|
||||
- "platinum"
|
||||
nodejs:
|
||||
- "18"
|
||||
- "20"
|
||||
@ -24,6 +21,9 @@ steps:
|
||||
- wait: ~
|
||||
continue_on_failure: true
|
||||
- label: ":junit: Test results"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
plugins:
|
||||
- junit-annotate#v2.4.1:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
|
||||
@ -10,29 +10,22 @@ export NODE_VERSION=${NODE_VERSION:-18}
|
||||
|
||||
echo "--- :javascript: Building Docker image"
|
||||
docker build \
|
||||
--file "$script_path/Dockerfile" \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_VERSION="$NODE_VERSION" \
|
||||
.
|
||||
--file "$script_path/Dockerfile" \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_VERSION="$NODE_VERSION" \
|
||||
.
|
||||
|
||||
GITHUB_TOKEN=$(vault read -field=token "$GITHUB_TOKEN_PATH")
|
||||
export GITHUB_TOKEN
|
||||
|
||||
echo "--- :javascript: Running tests"
|
||||
echo "--- :javascript: Running $TEST_SUITE tests"
|
||||
mkdir -p "$repo/junit-output"
|
||||
docker run \
|
||||
--network="${network_name}" \
|
||||
--env TEST_ES_STACK \
|
||||
--env STACK_VERSION \
|
||||
--env GITHUB_TOKEN \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "ELASTIC_PASSWORD=${elastic_password}" \
|
||||
--env "ELASTIC_USER=elastic" \
|
||||
--env "BUILDKITE=true" \
|
||||
--volume "/usr/src/app/node_modules" \
|
||||
--volume "$repo:/usr/src/app" \
|
||||
--volume "$repo/junit-output:/junit-output" \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
bash -c "npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
--network="${network_name}" \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "ELASTIC_PASSWORD=${elastic_password}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--env "ELASTIC_USER=elastic" \
|
||||
--env "BUILDKITE=true" \
|
||||
--volume "$repo/junit-output:/junit-output" \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
|
||||
@ -6,6 +6,3 @@ elasticsearch
|
||||
lib
|
||||
junit-output
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
|
||||
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@ -1,3 +0,0 @@
|
||||
package.json @joshmock
|
||||
renovate.json @joshmock
|
||||
catalog-info.yaml @joshmock
|
||||
14
.github/make.sh
vendored
14
.github/make.sh
vendored
@ -65,7 +65,7 @@ codegen)
|
||||
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
||||
# fall back to branch name or `main` if no VERSION is set
|
||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [[ "$branch_name" =~ ^[0-9]+\.([0-9]+|x) ]]; then
|
||||
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
||||
VERSION="$branch_name"
|
||||
else
|
||||
@ -150,7 +150,7 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
--volume "$(realpath "$repo/../elastic-client-generator-js"):/usr/src/elastic-client-generator-js" \
|
||||
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
@ -159,14 +159,6 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
else
|
||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||
|
||||
# determine branch to clone
|
||||
GENERATOR_BRANCH="main"
|
||||
if [[ "$VERSION" == 8.* ]]; then
|
||||
GENERATOR_BRANCH="8.x"
|
||||
fi
|
||||
echo -e "\033[34;1mINFO: Generator branch: $GENERATOR_BRANCH"
|
||||
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
@ -176,7 +168,7 @@ else
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
|
||||
26
.github/workflows/nodejs.yml
vendored
26
.github/workflows/nodejs.yml
vendored
@ -11,10 +11,10 @@ jobs:
|
||||
outputs:
|
||||
src-only: "${{ steps.changes.outputs.src-only }}"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dorny/paths-filter/@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
- uses: dorny/paths-filter/@v3.0.2
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
@ -36,12 +36,12 @@ jobs:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
@ -66,12 +66,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22.x
|
||||
|
||||
@ -96,12 +96,12 @@ jobs:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Bun
|
||||
uses: oven-sh/setup-bun@4bc047ad259df6fc24a6c9b0f9a0cb08cf17fbe5 # v2
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
@ -118,3 +118,13 @@ jobs:
|
||||
- name: ECMAScript module test
|
||||
run: |
|
||||
bun run test:esm
|
||||
|
||||
auto-approve:
|
||||
name: Auto-approve
|
||||
needs: [test, license]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
if: github.actor == 'elasticmachine'
|
||||
steps:
|
||||
- uses: hmarr/auto-approve-action@v4
|
||||
|
||||
4
.github/workflows/npm-publish.yml
vendored
4
.github/workflows/npm-publish.yml
vendored
@ -12,11 +12,11 @@ jobs:
|
||||
contents: write
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
- uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
6
.github/workflows/serverless-patch.yml
vendored
6
.github/workflows/serverless-patch.yml
vendored
@ -26,14 +26,14 @@ jobs:
|
||||
)
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: elastic/elasticsearch-js
|
||||
ref: main
|
||||
path: stack
|
||||
fetch-depth: 0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: elastic/elasticsearch-serverless-js
|
||||
@ -42,7 +42,7 @@ jobs:
|
||||
- name: Apply patch from stack to serverless
|
||||
id: apply-patch
|
||||
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
||||
- uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: serverless
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@ -8,7 +8,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@1160a2240286f5da8ec72b1c0816ce2481aabf84 # v8
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -68,7 +68,3 @@ bun.lockb
|
||||
test-results
|
||||
processinfo
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
schema
|
||||
|
||||
@ -74,6 +74,3 @@ CONTRIBUTING.md
|
||||
src
|
||||
bun.lockb
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
|
||||
@ -42,9 +42,6 @@ spec:
|
||||
main:
|
||||
branch: "main"
|
||||
cronline: "@daily"
|
||||
8_x:
|
||||
branch: "8.x"
|
||||
cronline: "@daily"
|
||||
8_14:
|
||||
branch: "8.16"
|
||||
branch: "8.14"
|
||||
cronline: "@daily"
|
||||
|
||||
@ -167,19 +167,16 @@ const client = new Client({
|
||||
----
|
||||
|
||||
|`nodeFilter`
|
||||
a|`function` - Takes a `Connection` and returns `true` if it can be sent a request, otherwise `false`. +
|
||||
a|`function` - Filters which node not to use for a request. +
|
||||
_Default:_
|
||||
[source,js]
|
||||
----
|
||||
function defaultNodeFilter (conn) {
|
||||
if (conn.roles != null) {
|
||||
if (
|
||||
// avoid master-only nodes
|
||||
conn.roles.master &&
|
||||
!conn.roles.data &&
|
||||
!conn.roles.ingest &&
|
||||
!conn.roles.ml
|
||||
) return false
|
||||
function defaultNodeFilter (node) {
|
||||
// avoid master only nodes
|
||||
if (node.roles.master === true &&
|
||||
node.roles.data === false &&
|
||||
node.roles.ingest === false) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -1,71 +1,6 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.18.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Ensure Apache Arrow ES|QL helper uses async iterator
|
||||
|
||||
The `esql.toArrowReader()` helper function was trying to return `RecordBatchStreamReader`, a synchronous iterator, despite the fact that the `apache-arrow` package was, in most cases, automatically coercing it to `AsyncRecordBatchStreamReader`, its asynchronous counterpart. It now is always returned as an async iterator.
|
||||
|
||||
[discrete]
|
||||
=== 8.18.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix broken node roles and node filter
|
||||
|
||||
The docs note a `nodeFilter` option on the client that will, by default, filter the nodes based on any `roles` values that are set at instantition. At some point, this functionality was partially disabled. This brings the feature back, ensuring that it matches what the documentation has said it does all along.
|
||||
|
||||
[discrete]
|
||||
=== 8.18.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.18`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.18/release-notes-8.18.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved Cloud ID parsing
|
||||
|
||||
When using a Cloud ID as the `cloud` parameter to instantiate the client, that ID was assumed to be in the correct format. New assertions have been added to verify that format and throw a `ConfigurationError` if it is invalid. See https://github.com/elastic/elasticsearch-js/issues/2694[#2694].
|
||||
|
||||
[discrete]
|
||||
=== 8.17.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.17`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.17/release-notes-8.17.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.16.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.2
|
||||
|
||||
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataStream({
|
||||
name: "my-data-stream",
|
||||
filter_path: "data_streams.indices.index_name",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,46 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
range: {
|
||||
year: {
|
||||
gt: 2023,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "elastic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
aggs: {
|
||||
topics: {
|
||||
terms: {
|
||||
field: "topic",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "my-rerank-model",
|
||||
inference_config: {
|
||||
service: "cohere",
|
||||
service_settings: {
|
||||
model_id: "rerank-english-v3.0",
|
||||
api_key: "{{COHERE_API_KEY}}",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,42 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -1,49 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example_nested",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
nested: {
|
||||
path: "nested_field",
|
||||
inner_hits: {
|
||||
name: "nested_vector",
|
||||
_source: false,
|
||||
fields: ["nested_field.paragraph_id"],
|
||||
},
|
||||
query: {
|
||||
knn: {
|
||||
field: "nested_field.nested_vector",
|
||||
query_vector: [1, 0, 0.5],
|
||||
k: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "ai",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: ["topic"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,9 +5,11 @@
|
||||
----
|
||||
const response = await client.esql.asyncQuery({
|
||||
format: "json",
|
||||
query:
|
||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
body: {
|
||||
query:
|
||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,57 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "elastic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query:
|
||||
"(information retrieval) OR (artificial intelligence)",
|
||||
default_field: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
size: 1,
|
||||
explain: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,18 +5,20 @@
|
||||
----
|
||||
const response = await client.searchApplication.renderQuery({
|
||||
name: "my-app",
|
||||
params: {
|
||||
query_string: "my first query",
|
||||
text_fields: [
|
||||
{
|
||||
name: "title",
|
||||
boost: 5,
|
||||
},
|
||||
{
|
||||
name: "description",
|
||||
boost: 1,
|
||||
},
|
||||
],
|
||||
body: {
|
||||
params: {
|
||||
query_string: "my first query",
|
||||
text_fields: [
|
||||
{
|
||||
name: "title",
|
||||
boost: 5,
|
||||
},
|
||||
{
|
||||
name: "description",
|
||||
boost: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
15
docs/doc_examples/0e83f140237d75469a428ff403564bb5.asciidoc
Normal file
15
docs/doc_examples/0e83f140237d75469a428ff403564bb5.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "100gb",
|
||||
"cluster.routing.allocation.disk.watermark.high": "50gb",
|
||||
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
|
||||
"cluster.info.update.interval": "1m",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.streamInference({
|
||||
task_type: "chat_completion",
|
||||
inference_id: "openai-completion",
|
||||
model: "gpt-4o",
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "What is Elastic?",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "read_only",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,25 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.createFrom({
|
||||
source: "my-index",
|
||||
dest: "my-new-index",
|
||||
create_from: {
|
||||
settings_override: {
|
||||
index: {
|
||||
number_of_shards: 5,
|
||||
},
|
||||
},
|
||||
mappings_override: {
|
||||
properties: {
|
||||
field2: {
|
||||
type: "boolean",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.update({
|
||||
inference_id: "my-inference-endpoint",
|
||||
inference_config: {
|
||||
service_settings: {
|
||||
api_key: "<API_KEY>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,15 +6,14 @@
|
||||
const response = await client.search({
|
||||
index: "test-index",
|
||||
query: {
|
||||
match: {
|
||||
my_semantic_field: "Which country is Paris in?",
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_semantic_field: {
|
||||
number_of_fragments: 2,
|
||||
order: "score",
|
||||
nested: {
|
||||
path: "inference_field.inference.chunks",
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "inference_field.inference.chunks.embeddings",
|
||||
inference_id: "my-inference-id",
|
||||
query: "mountain lake",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.cancelMigrateReindex({
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
||||
script: {
|
||||
lang: "mustache",
|
||||
source:
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
params: {
|
||||
query: "",
|
||||
_es_filters: {},
|
||||
|
||||
@ -1,12 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.delegatePki({
|
||||
x509_certificate_chain: [
|
||||
"MIIDeDCCAmCgAwIBAgIUBzj/nGGKxP2iXawsSquHmQjCJmMwDQYJKoZIhvcNAQELBQAwUzErMCkGA1UEAxMiRWxhc3RpY3NlYXJjaCBUZXN0IEludGVybWVkaWF0ZSBDQTEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMB4XDTIzMDcxODE5MjkwNloXDTQzMDcxMzE5MjkwNlowSjEiMCAGA1UEAxMZRWxhc3RpY3NlYXJjaCBUZXN0IENsaWVudDEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAllHL4pQkkfwAm/oLkxYYO+r950DEy1bjH+4viCHzNADLCTWO+lOZJVlNx7QEzJE3QGMdif9CCBBxQFMapA7oUFCLq84fPSQQu5AnvvbltVD9nwVtCs+9ZGDjMKsz98RhSLMFIkxdxi6HkQ3Lfa4ZSI4lvba4oo+T/GveazBDS+NgmKyq00EOXt3tWi1G9vEVItommzXWfv0agJWzVnLMldwkPqsw0W7zrpyT7FZS4iLbQADGceOW8fiauOGMkscu9zAnDR/SbWl/chYioQOdw6ndFLn1YIFPd37xL0WsdsldTpn0vH3YfzgLMffT/3P6YlwBegWzsx6FnM/93Ecb4wIDAQABo00wSzAJBgNVHRMEAjAAMB0GA1UdDgQWBBQKNRwjW+Ad/FN1Rpoqme/5+jrFWzAfBgNVHSMEGDAWgBRcya0c0x/PaI7MbmJVIylWgLqXNjANBgkqhkiG9w0BAQsFAAOCAQEACZ3PF7Uqu47lplXHP6YlzYL2jL0D28hpj5lGtdha4Muw1m/BjDb0Pu8l0NQ1z3AP6AVcvjNDkQq6Y5jeSz0bwQlealQpYfo7EMXjOidrft1GbqOMFmTBLpLA9SvwYGobSTXWTkJzonqVaTcf80HpMgM2uEhodwTcvz6v1WEfeT/HMjmdIsq4ImrOL9RNrcZG6nWfw0HR3JNOgrbfyEztEI471jHznZ336OEcyX7gQuvHE8tOv5+oD1d7s3Xg1yuFp+Ynh+FfOi3hPCuaHA+7F6fLmzMDLVUBAllugst1C3U+L/paD7tqIa4ka+KNPCbSfwazmJrt4XNiivPR4hwH5g==",
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,27 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
retriever: {
|
||||
text_similarity_reranker: {
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
match: {
|
||||
text: "How often does the moon hide the sun?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
field: "text",
|
||||
inference_id: "elastic-rerank",
|
||||
inference_text: "How often does the moon hide the sun?",
|
||||
rank_window_size: 100,
|
||||
min_score: 0.5,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -8,6 +8,11 @@ const response = await client.search({
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
term: {
|
||||
"category.keyword": "Main Course",
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
tags: "vegetarian",
|
||||
@ -22,11 +27,6 @@ const response = await client.search({
|
||||
},
|
||||
],
|
||||
should: [
|
||||
{
|
||||
term: {
|
||||
category: "Main Course",
|
||||
},
|
||||
},
|
||||
{
|
||||
multi_match: {
|
||||
query: "curry spicy",
|
||||
@ -4,22 +4,24 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.simulate.ingest({
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "bar",
|
||||
body: {
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
foo: "rab",
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
foo: "rab",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -9,6 +9,7 @@ const response = await client.indices.create({
|
||||
properties: {
|
||||
inference_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.queryRole({
|
||||
query: {
|
||||
bool: {
|
||||
must_not: {
|
||||
term: {
|
||||
"metadata._reserved": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
sort: ["name"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -14,7 +14,6 @@ const response = await client.indices.putSettings({
|
||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||
"index.search.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,67 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-rank-vectors-bit",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[
|
||||
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
|
||||
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
|
||||
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
|
||||
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
28
docs/doc_examples/2577acb462b95bd4394523cf2f8a661f.asciidoc
Normal file
28
docs/doc_examples/2577acb462b95bd4394523cf2f8a661f.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.query({
|
||||
format: "txt",
|
||||
query:
|
||||
"\n FROM library\n | SORT page_count DESC\n | KEEP name, author\n | LOOKUP era ON author\n | LIMIT 5\n ",
|
||||
tables: {
|
||||
era: {
|
||||
author: {
|
||||
keyword: [
|
||||
"Frank Herbert",
|
||||
"Peter F. Hamilton",
|
||||
"Vernor Vinge",
|
||||
"Alastair Reynolds",
|
||||
"James S.A. Corey",
|
||||
],
|
||||
},
|
||||
era: {
|
||||
keyword: ["The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "write",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,9 +4,11 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.oidcLogout({
|
||||
token:
|
||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||
body: {
|
||||
token:
|
||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-rank-vectors-float",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[0.5, 10, 6],
|
||||
[-0.5, 10, 10],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,35 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "attachment",
|
||||
description: "Extract attachment information including original binary",
|
||||
processors: [
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
pipeline: "attachment",
|
||||
document: {
|
||||
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.get({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
@ -1,24 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
query: {
|
||||
match: {
|
||||
my_field: "Which country is Paris in?",
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_field: {
|
||||
type: "semantic",
|
||||
number_of_fragments: 2,
|
||||
order: "score",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,6 +6,7 @@
|
||||
const response = await client.esql.asyncQueryGet({
|
||||
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||
wait_for_completion_timeout: "30s",
|
||||
body: null,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -5,8 +5,11 @@
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "90%",
|
||||
"cluster.routing.allocation.disk.watermark.high": "95%",
|
||||
"cluster.indices.close.enable": false,
|
||||
"indices.recovery.max_bytes_per_sec": "50mb",
|
||||
},
|
||||
transient: {
|
||||
"*": null,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,10 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "*",
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
"index.indexing.slowlog.include.user": true,
|
||||
"index.indexing.slowlog.threshold.index.warn": "30s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,23 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
mappings: {
|
||||
properties: {
|
||||
source_field: {
|
||||
type: "text",
|
||||
fields: {
|
||||
infer_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: ".elser-2-elasticsearch",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,22 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.createFrom({
|
||||
source: "my-index",
|
||||
dest: "my-new-index",
|
||||
create_from: {
|
||||
settings_override: {
|
||||
index: {
|
||||
"blocks.write": null,
|
||||
"blocks.read": null,
|
||||
"blocks.read_only": null,
|
||||
"blocks.read_only_allow_delete": null,
|
||||
"blocks.metadata": null,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -45,7 +45,7 @@ console.log(response);
|
||||
|
||||
const response1 = await client.indices.putIndexTemplate({
|
||||
name: 2,
|
||||
index_patterns: ["k9s*"],
|
||||
index_patterns: ["k8s*"],
|
||||
composed_of: ["destination_template"],
|
||||
data_stream: {},
|
||||
});
|
||||
@ -1,23 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "my-elastic-rerank",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service_settings: {
|
||||
model_id: ".rerank-v1",
|
||||
num_threads: 1,
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 1,
|
||||
max_number_of_allocations: 4,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,28 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-*",
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
"user.id": "kimchy",
|
||||
},
|
||||
},
|
||||
],
|
||||
must_not: [
|
||||
{
|
||||
terms: {
|
||||
_index: ["my-index-01"],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,6 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.resolveCluster();
|
||||
const response = await client.indices.unfreeze({
|
||||
index: "my-index-000001",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,31 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ilm.putLifecycle({
|
||||
name: "my_policy",
|
||||
policy: {
|
||||
phases: {
|
||||
hot: {
|
||||
actions: {
|
||||
rollover: {
|
||||
max_primary_shard_size: "50gb",
|
||||
},
|
||||
searchable_snapshot: {
|
||||
snapshot_repository: "backing_repo",
|
||||
replicate_for: "14d",
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
min_age: "28d",
|
||||
actions: {
|
||||
delete: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,11 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_replicas: 0,
|
||||
},
|
||||
"index.search.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -6,7 +6,6 @@
|
||||
const response = await client.indices.resolveCluster({
|
||||
name: "not-present,clust*:my-index*,oldcluster:*",
|
||||
ignore_unavailable: "false",
|
||||
timeout: "5s",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -1,70 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "movies",
|
||||
size: 10,
|
||||
retriever: {
|
||||
rescorer: {
|
||||
rescore: {
|
||||
window_size: 50,
|
||||
query: {
|
||||
rescore_query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source:
|
||||
"cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0",
|
||||
params: {
|
||||
queryVector: [-0.5, 90, -10, 14.8, -156],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
retriever: {
|
||||
rrf: {
|
||||
rank_window_size: 100,
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "plot_embedding",
|
||||
inference_id: "my-elser-model",
|
||||
query: "films that explore psychological depths",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "crime",
|
||||
fields: ["plot", "title"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [10, 22, 77],
|
||||
k: 10,
|
||||
num_candidates: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,23 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_shards: 3,
|
||||
"blocks.write": true,
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
field1: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
23
docs/doc_examples/38ba93890494bfa7beece58dffa44f98.asciidoc
Normal file
23
docs/doc_examples/38ba93890494bfa7beece58dffa44f98.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.bulk({
|
||||
index: "test-index",
|
||||
operations: [
|
||||
{
|
||||
update: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
doc: {
|
||||
infer_field: "updated inference field",
|
||||
source_field: "updated source field",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
size: 0,
|
||||
aggs: {
|
||||
job_ids: {
|
||||
terms: {
|
||||
field: "job_id",
|
||||
size: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,61 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
linear: {
|
||||
retrievers: [
|
||||
{
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
function_score: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "ai",
|
||||
},
|
||||
},
|
||||
functions: [
|
||||
{
|
||||
script_score: {
|
||||
script: {
|
||||
source: "doc['timestamp'].value.millis",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
boost_mode: "replace",
|
||||
},
|
||||
},
|
||||
sort: {
|
||||
timestamp: {
|
||||
order: "asc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
weight: 2,
|
||||
normalizer: "minmax",
|
||||
},
|
||||
{
|
||||
retriever: {
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
weight: 1.5,
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,16 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.updateAliases({
|
||||
actions: [
|
||||
{
|
||||
remove_index: {
|
||||
index: "my-index-2099.05.06-000001",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,9 +4,11 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.asyncQuery({
|
||||
query:
|
||||
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",
|
||||
wait_for_completion_timeout: "2s",
|
||||
body: {
|
||||
query:
|
||||
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",
|
||||
wait_for_completion_timeout: "2s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
order_stats: {
|
||||
stats: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,6 +5,7 @@
|
||||
----
|
||||
const response = await client.esql.asyncQueryGet({
|
||||
id: "FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
|
||||
body: null,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "jinaai-index",
|
||||
mappings: {
|
||||
properties: {
|
||||
content: {
|
||||
type: "semantic_text",
|
||||
inference_id: "jinaai-embeddings",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,11 +6,15 @@
|
||||
const response = await client.update({
|
||||
index: "test",
|
||||
id: 1,
|
||||
doc: {
|
||||
product_price: 100,
|
||||
script: {
|
||||
source: "ctx._source.counter += params.count",
|
||||
lang: "painless",
|
||||
params: {
|
||||
count: 4,
|
||||
},
|
||||
},
|
||||
upsert: {
|
||||
product_price: 50,
|
||||
counter: 1,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,45 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.streamInference({
|
||||
task_type: "chat_completion",
|
||||
inference_id: "openai-completion",
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "What's the price of a scarf?",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
tools: [
|
||||
{
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_price",
|
||||
description: "Get the current price of a item",
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {
|
||||
item: {
|
||||
id: "123",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
tool_choice: {
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_price",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getMigrateReindexStatus({
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,11 +6,6 @@
|
||||
const response = await client.ml.startTrainedModelDeployment({
|
||||
model_id: "my_model",
|
||||
deployment_id: "my_model_for_search",
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -3,18 +3,15 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "image-index",
|
||||
const response = await client.knnSearch({
|
||||
index: "my-index",
|
||||
knn: {
|
||||
field: "image-vector",
|
||||
query_vector: [-5, 9, -12],
|
||||
field: "image_vector",
|
||||
query_vector: [0.3, 0.1, 1.2],
|
||||
k: 10,
|
||||
num_candidates: 100,
|
||||
rescore_vector: {
|
||||
oversample: 2,
|
||||
},
|
||||
},
|
||||
fields: ["title", "file-type"],
|
||||
_source: ["name", "file_type"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,7 +6,9 @@
|
||||
const response = await client.inference.streamInference({
|
||||
task_type: "completion",
|
||||
inference_id: "openai-completion",
|
||||
input: "What is Elastic?",
|
||||
body: {
|
||||
input: "What is Elastic?",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.migration.deprecations({
|
||||
index: ".ml-anomalies-*",
|
||||
const response = await client.cluster.getSettings({
|
||||
flat_settings: "true",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,17 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "sparse_embedding",
|
||||
inference_id: "elser-model-eis",
|
||||
inference_config: {
|
||||
service: "elastic",
|
||||
service_settings: {
|
||||
model_name: "elser",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.simulate({
|
||||
id: "query_helper_pipeline",
|
||||
docs: [
|
||||
{
|
||||
_source: {
|
||||
content:
|
||||
"artificial intelligence in medicine articles published in the last 12 months",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,9 +4,11 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.oidcPrepareAuthentication({
|
||||
realm: "oidc1",
|
||||
state: "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO",
|
||||
nonce: "zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5",
|
||||
body: {
|
||||
realm: "oidc1",
|
||||
state: "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO",
|
||||
nonce: "zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,16 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "jinaai-index",
|
||||
query: {
|
||||
semantic: {
|
||||
field: "content",
|
||||
query: "who inspired taking care of the sea?",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000003",
|
||||
index: "my-index-000002",
|
||||
mappings: {
|
||||
properties: {
|
||||
inference_field: {
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -9,6 +9,7 @@ const response = await client.indices.create({
|
||||
properties: {
|
||||
semantic_text: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
content: {
|
||||
type: "text",
|
||||
@ -11,7 +11,7 @@ const response = await client.ingest.putPipeline({
|
||||
attachment: {
|
||||
field: "data",
|
||||
properties: ["content", "title"],
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -5,9 +5,11 @@
|
||||
----
|
||||
const response = await client.esql.asyncQuery({
|
||||
format: "json",
|
||||
query:
|
||||
"\n FROM cluster_one:my-index*,cluster_two:logs*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
body: {
|
||||
query:
|
||||
"\n FROM cluster_one:my-index*,cluster_two:logs*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataLifecycleStats({
|
||||
human: "true",
|
||||
pretty: "true",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,12 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.createFrom({
|
||||
source: "my-index",
|
||||
dest: "my-new-index",
|
||||
create_from: null,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.asyncQueryDelete({
|
||||
id: "FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI=",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "*",
|
||||
settings: {
|
||||
"index.search.slowlog.include.user": true,
|
||||
"index.search.slowlog.threshold.fetch.warn": "30s",
|
||||
"index.search.slowlog.threshold.query.warn": "30s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
||||
script: {
|
||||
lang: "mustache",
|
||||
source:
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
params: {
|
||||
query: "",
|
||||
_es_filters: {},
|
||||
|
||||
@ -1,16 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.reindex({
|
||||
wait_for_completion: "false",
|
||||
source: {
|
||||
index: ".ml-anomalies-custom-example",
|
||||
},
|
||||
dest: {
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.bulkUpdateApiKeys({
|
||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||
body: {
|
||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,24 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
prefix: {
|
||||
full_name: {
|
||||
value: "ki",
|
||||
},
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
full_name: {
|
||||
matched_fields: ["full_name._index_prefix"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,33 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
daily_sales: {
|
||||
date_histogram: {
|
||||
field: "order_date",
|
||||
calendar_interval: "day",
|
||||
},
|
||||
aggs: {
|
||||
daily_revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
smoothed_revenue: {
|
||||
moving_fn: {
|
||||
buckets_path: "daily_revenue",
|
||||
window: 3,
|
||||
script: "MovingFunctions.unweightedAvg(values)",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
26
docs/doc_examples/74b229a6e020113e5749099451979c89.asciidoc
Normal file
26
docs/doc_examples/74b229a6e020113e5749099451979c89.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "test-index",
|
||||
query: {
|
||||
nested: {
|
||||
path: "inference_field.inference.chunks",
|
||||
query: {
|
||||
knn: {
|
||||
field: "inference_field.inference.chunks.embeddings",
|
||||
query_vector_builder: {
|
||||
text_embedding: {
|
||||
model_id: "my_inference_id",
|
||||
model_text: "mountain lake",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,44 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
{
|
||||
text_similarity_reranker: {
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "ai",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
field: "text",
|
||||
inference_id: "my-rerank-model",
|
||||
inference_text:
|
||||
"Can I use generative AI to identify user intent and improve search relevance?",
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,30 +4,32 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.textStructure.findMessageStructure({
|
||||
messages: [
|
||||
"[2024-03-05T10:52:36,256][INFO ][o.a.l.u.VectorUtilPanamaProvider] [laptop] Java vector incubator API enabled; uses preferredBitSize=128",
|
||||
"[2024-03-05T10:52:41,038][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-url]",
|
||||
"[2024-03-05T10:52:41,042][INFO ][o.e.p.PluginsService ] [laptop] loaded module [rest-root]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-core]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-redact]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [ingest-user-agent]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-monitoring]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-s3]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-analytics]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-ent-search]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-autoscaling]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-painless]]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-expression]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-eql]",
|
||||
"[2024-03-05T10:52:43,291][INFO ][o.e.e.NodeEnvironment ] [laptop] heap size [16gb], compressed ordinary object pointers [true]",
|
||||
"[2024-03-05T10:52:46,098][INFO ][o.e.x.s.Security ] [laptop] Security is enabled",
|
||||
"[2024-03-05T10:52:47,227][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] Profiling is enabled",
|
||||
"[2024-03-05T10:52:47,259][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] profiling index templates will not be installed or reinstalled",
|
||||
"[2024-03-05T10:52:47,755][INFO ][o.e.i.r.RecoverySettings ] [laptop] using rate limit [40mb] with [default=40mb, read=0b, write=0b, max=0b]",
|
||||
"[2024-03-05T10:52:47,787][INFO ][o.e.d.DiscoveryModule ] [laptop] using discovery type [multi-node] and seed hosts providers [settings]",
|
||||
"[2024-03-05T10:52:49,188][INFO ][o.e.n.Node ] [laptop] initialized",
|
||||
"[2024-03-05T10:52:49,199][INFO ][o.e.n.Node ] [laptop] starting ...",
|
||||
],
|
||||
body: {
|
||||
messages: [
|
||||
"[2024-03-05T10:52:36,256][INFO ][o.a.l.u.VectorUtilPanamaProvider] [laptop] Java vector incubator API enabled; uses preferredBitSize=128",
|
||||
"[2024-03-05T10:52:41,038][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-url]",
|
||||
"[2024-03-05T10:52:41,042][INFO ][o.e.p.PluginsService ] [laptop] loaded module [rest-root]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-core]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-redact]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [ingest-user-agent]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-monitoring]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-s3]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-analytics]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-ent-search]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-autoscaling]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-painless]]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-expression]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-eql]",
|
||||
"[2024-03-05T10:52:43,291][INFO ][o.e.e.NodeEnvironment ] [laptop] heap size [16gb], compressed ordinary object pointers [true]",
|
||||
"[2024-03-05T10:52:46,098][INFO ][o.e.x.s.Security ] [laptop] Security is enabled",
|
||||
"[2024-03-05T10:52:47,227][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] Profiling is enabled",
|
||||
"[2024-03-05T10:52:47,259][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] profiling index templates will not be installed or reinstalled",
|
||||
"[2024-03-05T10:52:47,755][INFO ][o.e.i.r.RecoverySettings ] [laptop] using rate limit [40mb] with [default=40mb, read=0b, write=0b, max=0b]",
|
||||
"[2024-03-05T10:52:47,787][INFO ][o.e.d.DiscoveryModule ] [laptop] using discovery type [multi-node] and seed hosts providers [settings]",
|
||||
"[2024-03-05T10:52:49,188][INFO ][o.e.n.Node ] [laptop] initialized",
|
||||
"[2024-03-05T10:52:49,199][INFO ][o.e.n.Node ] [laptop] starting ...",
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,46 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
text_similarity_reranker: {
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query:
|
||||
"(information retrieval) OR (artificial intelligence)",
|
||||
default_field: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
field: "text",
|
||||
inference_id: "my-rerank-model",
|
||||
inference_text:
|
||||
"What are the state of the art applications of AI in information retrieval?",
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,35 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
query: {
|
||||
intervals: {
|
||||
my_text: {
|
||||
all_of: {
|
||||
ordered: false,
|
||||
max_gaps: 1,
|
||||
intervals: [
|
||||
{
|
||||
match: {
|
||||
query: "my favorite food",
|
||||
max_gaps: 0,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
query: "cold porridge",
|
||||
max_gaps: 4,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -12,7 +12,7 @@ const response = await client.inference.put({
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 1,
|
||||
max_number_of_allocations: 4,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
num_threads: 1,
|
||||
model_id: ".elser_model_2",
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.deleteIpLocationDatabase({
|
||||
id: "my-database-id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user