Compare commits
122 Commits
v8.17.0
...
v9.0.0-alp
| Author | SHA1 | Date | |
|---|---|---|---|
| dea4db1736 | |||
| d5bd34fc23 | |||
| b2a490718d | |||
| e8dc747c61 | |||
| c5f9625463 | |||
| a9a5aca736 | |||
| 64ef5359e7 | |||
| 1abb4e3c9f | |||
| d9e9906c4e | |||
| 1519963dd9 | |||
| 867ceda5a3 | |||
| 88270bf354 | |||
| 0f09faefbd | |||
| b775456296 | |||
| 9de4dc5009 | |||
| 461f9b7f66 | |||
| afc83338b0 | |||
| 85396ddc67 | |||
| 16b51c2315 | |||
| 3ed94d71e0 | |||
| e2eb6ef586 | |||
| 6836a3f1c7 | |||
| 0eaeb78c96 | |||
| c713e599d1 | |||
| 8ca68a4178 | |||
| 3e5e568c07 | |||
| ac231c859e | |||
| a22c4622d9 | |||
| 6f9e1062f3 | |||
| 7449adbd1f | |||
| d5a0f1171f | |||
| d86eb82e82 | |||
| a411cc7c7d | |||
| c99abab058 | |||
| c5b2915b5a | |||
| 20fb610d82 | |||
| c8504fe616 | |||
| 4795a8c0d5 | |||
| 9139662bcc | |||
| 9e4572f4e1 | |||
| a25bc97297 | |||
| 172180cb21 | |||
| 947e09e62a | |||
| 86169003b4 | |||
| 11ff146ae8 | |||
| d2c63b4c5f | |||
| 0ad42ff1a2 | |||
| 6dbf91a9c3 | |||
| 39b2700add | |||
| 869174f953 | |||
| 5eb3554083 | |||
| 26ce906b5b | |||
| 5f9561d566 | |||
| 3676c8229a | |||
| 2bcbd36d75 | |||
| 63104b5e5e | |||
| 2b2a9947c7 | |||
| b95aa986b7 | |||
| 0ee486bc9c | |||
| f835fa3b12 | |||
| e688f36396 | |||
| b8c3ac446e | |||
| 3a288cf86b | |||
| 100be27ad1 | |||
| c490dd0821 | |||
| e992c329c3 | |||
| bfdae66333 | |||
| ed3cace127 | |||
| f33aa8cccd | |||
| 7cb973a206 | |||
| a4315a905e | |||
| 6447fc10bf | |||
| e9c2f8b0af | |||
| 15b9ee2f06 | |||
| e30e964131 | |||
| 0f187f47c4 | |||
| 101f34bd5e | |||
| ec0c561e36 | |||
| c1e90b12f0 | |||
| 5cb670256e | |||
| 86f488f68f | |||
| 6009fab7fe | |||
| 26ae260058 | |||
| fbbbece711 | |||
| a30c3dca2d | |||
| 36cfacc409 | |||
| 6dc83cd33e | |||
| 7c7ce29127 | |||
| 2b890af355 | |||
| 421f953b00 | |||
| c5e4107181 | |||
| 5880c84c13 | |||
| 290639d168 | |||
| 0b90613694 | |||
| 1ad057abcc | |||
| 44d890ec57 | |||
| 2b2a2f03e6 | |||
| 7bcd75bdb0 | |||
| 2455dac4e5 | |||
| edb5563bf8 | |||
| 11939fd22c | |||
| e0c613f898 | |||
| 20f2c740cd | |||
| 97bdca22d8 | |||
| a7123f807d | |||
| 20ac2a637e | |||
| e287c1edd9 | |||
| 90d43f4f28 | |||
| 572927b4f1 | |||
| 86b4d4e2f9 | |||
| 8e79bf847a | |||
| cef328c93d | |||
| c3247d0c66 | |||
| e9fdcb0647 | |||
| 82acfc33a9 | |||
| 661caf8422 | |||
| 3430734fe0 | |||
| 810e009202 | |||
| c274b1b32f | |||
| 428a7b023d | |||
| aad41df231 | |||
| 34704b2e5c |
@ -12,5 +12,3 @@ WORKDIR /usr/src/app
|
|||||||
|
|
||||||
COPY package.json .
|
COPY package.json .
|
||||||
RUN npm install
|
RUN npm install
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|||||||
@ -25,6 +25,3 @@ USER ${BUILDER_UID}:${BUILDER_GID}
|
|||||||
# install dependencies
|
# install dependencies
|
||||||
COPY package.json .
|
COPY package.json .
|
||||||
RUN npm install
|
RUN npm install
|
||||||
|
|
||||||
# copy project files
|
|
||||||
COPY . .
|
|
||||||
|
|||||||
@ -1,20 +1,6 @@
|
|||||||
/*
|
/*
|
||||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
* Copyright Elasticsearch B.V. and contributors
|
||||||
* license agreements. See the NOTICE file distributed with
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/* global $ argv */
|
/* global $ argv */
|
||||||
@ -123,7 +109,7 @@ async function codegen (args) {
|
|||||||
await $`rm -rf ${join(import.meta.url, '..', 'src', 'api')}`
|
await $`rm -rf ${join(import.meta.url, '..', 'src', 'api')}`
|
||||||
await $`mkdir ${join(import.meta.url, '..', 'src', 'api')}`
|
await $`mkdir ${join(import.meta.url, '..', 'src', 'api')}`
|
||||||
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
|
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
|
||||||
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
|
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.md')} ${join(import.meta.url, '..', 'docs', 'reference', 'api-reference.md')}`
|
||||||
await $`npm run build`
|
await $`npm run build`
|
||||||
|
|
||||||
// run docs example generation
|
// run docs example generation
|
||||||
|
|||||||
@ -6,7 +6,7 @@ steps:
|
|||||||
env:
|
env:
|
||||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||||
TEST_SUITE: "{{ matrix.suite }}"
|
TEST_SUITE: "{{ matrix.suite }}"
|
||||||
STACK_VERSION: 8.15.0
|
STACK_VERSION: 8.16.0
|
||||||
matrix:
|
matrix:
|
||||||
setup:
|
setup:
|
||||||
suite:
|
suite:
|
||||||
@ -25,7 +25,7 @@ steps:
|
|||||||
provider: "gcp"
|
provider: "gcp"
|
||||||
image: family/core-ubuntu-2204
|
image: family/core-ubuntu-2204
|
||||||
plugins:
|
plugins:
|
||||||
- junit-annotate#v2.4.1:
|
- junit-annotate#v2.6.0:
|
||||||
artifacts: "junit-output/junit-*.xml"
|
artifacts: "junit-output/junit-*.xml"
|
||||||
job-uuid-file-pattern: "junit-(.*).xml"
|
job-uuid-file-pattern: "junit-(.*).xml"
|
||||||
fail-build-on-error: true
|
fail-build-on-error: true
|
||||||
|
|||||||
@ -5,3 +5,4 @@ elasticsearch
|
|||||||
.git
|
.git
|
||||||
lib
|
lib
|
||||||
junit-output
|
junit-output
|
||||||
|
.tap
|
||||||
|
|||||||
14
.github/make.sh
vendored
14
.github/make.sh
vendored
@ -65,7 +65,7 @@ codegen)
|
|||||||
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
||||||
# fall back to branch name or `main` if no VERSION is set
|
# fall back to branch name or `main` if no VERSION is set
|
||||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||||
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
|
if [[ "$branch_name" =~ ^[0-9]+\.([0-9]+|x) ]]; then
|
||||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
||||||
VERSION="$branch_name"
|
VERSION="$branch_name"
|
||||||
else
|
else
|
||||||
@ -150,7 +150,7 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
|||||||
-u "$(id -u):$(id -g)" \
|
-u "$(id -u):$(id -g)" \
|
||||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||||
--volume /usr/src/elasticsearch-js/node_modules \
|
--volume /usr/src/elasticsearch-js/node_modules \
|
||||||
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
|
--volume "$(realpath "$repo/../elastic-client-generator-js"):/usr/src/elastic-client-generator-js" \
|
||||||
--env "WORKFLOW=$WORKFLOW" \
|
--env "WORKFLOW=$WORKFLOW" \
|
||||||
--name make-elasticsearch-js \
|
--name make-elasticsearch-js \
|
||||||
--rm \
|
--rm \
|
||||||
@ -159,6 +159,14 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
|||||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||||
else
|
else
|
||||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||||
|
|
||||||
|
# determine branch to clone
|
||||||
|
GENERATOR_BRANCH="main"
|
||||||
|
if [[ "$VERSION" == 8.* ]]; then
|
||||||
|
GENERATOR_BRANCH="8.x"
|
||||||
|
fi
|
||||||
|
echo -e "\033[34;1mINFO: Generator branch: $GENERATOR_BRANCH"
|
||||||
|
|
||||||
docker run \
|
docker run \
|
||||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||||
--volume /usr/src/elasticsearch-js/node_modules \
|
--volume /usr/src/elasticsearch-js/node_modules \
|
||||||
@ -168,7 +176,7 @@ else
|
|||||||
--rm \
|
--rm \
|
||||||
$product \
|
$product \
|
||||||
/bin/bash -c "cd /usr/src && \
|
/bin/bash -c "cd /usr/src && \
|
||||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||||
cd /usr/src/elasticsearch-js && \
|
cd /usr/src/elasticsearch-js && \
|
||||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||||
|
|||||||
26
.github/stale.yml
vendored
26
.github/stale.yml
vendored
@ -1,26 +0,0 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
|
||||||
daysUntilStale: 15
|
|
||||||
|
|
||||||
# Number of days of inactivity before a stale issue is closed
|
|
||||||
daysUntilClose: 7
|
|
||||||
|
|
||||||
# Issues with these labels will never be considered stale
|
|
||||||
exemptLabels:
|
|
||||||
- "discussion"
|
|
||||||
- "feature request"
|
|
||||||
- "bug"
|
|
||||||
- "todo"
|
|
||||||
- "good first issue"
|
|
||||||
|
|
||||||
# Label to use when marking an issue as stale
|
|
||||||
staleLabel: stale
|
|
||||||
|
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
|
||||||
markComment: |
|
|
||||||
We understand that this might be important for you, but this issue has been automatically marked as stale because it has not had recent activity either from our end or yours.
|
|
||||||
It will be closed if no further activity occurs, please write a comment if you would like to keep this going.
|
|
||||||
|
|
||||||
Note: in the past months we have built a new client, that has just landed in master. If you want to open an issue or a pr for the legacy client, you should do that in https://github.com/elastic/elasticsearch-js-legacy
|
|
||||||
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
|
||||||
closeComment: false
|
|
||||||
18
.github/workflows/auto-merge.yml
vendored
18
.github/workflows/auto-merge.yml
vendored
@ -1,18 +0,0 @@
|
|||||||
name: Automerge
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_review:
|
|
||||||
types:
|
|
||||||
- submitted
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
automerge:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event.review.state == 'approved'
|
|
||||||
steps:
|
|
||||||
- uses: reitermarkus/automerge@v2
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GH_TOKEN }}
|
|
||||||
merge-method: squash
|
|
||||||
pull-request-author-associations: OWNER
|
|
||||||
review-author-associations: OWNER,CONTRIBUTOR
|
|
||||||
19
.github/workflows/docs-build.yml
vendored
Normal file
19
.github/workflows/docs-build.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
name: docs-build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request_target: ~
|
||||||
|
merge_group: ~
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
docs-preview:
|
||||||
|
uses: elastic/docs-builder/.github/workflows/preview-build.yml@main
|
||||||
|
with:
|
||||||
|
path-pattern: docs/**
|
||||||
|
permissions:
|
||||||
|
deployments: write
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
|
pull-requests: read
|
||||||
14
.github/workflows/docs-cleanup.yml
vendored
Normal file
14
.github/workflows/docs-cleanup.yml
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
name: docs-cleanup
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types:
|
||||||
|
- closed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
docs-preview:
|
||||||
|
uses: elastic/docs-builder/.github/workflows/preview-cleanup.yml@main
|
||||||
|
permissions:
|
||||||
|
contents: none
|
||||||
|
id-token: write
|
||||||
|
deployments: write
|
||||||
45
.github/workflows/nodejs.yml
vendored
45
.github/workflows/nodejs.yml
vendored
@ -11,8 +11,10 @@ jobs:
|
|||||||
outputs:
|
outputs:
|
||||||
src-only: "${{ steps.changes.outputs.src-only }}"
|
src-only: "${{ steps.changes.outputs.src-only }}"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
- uses: dorny/paths-filter/@v3.0.2
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- uses: dorny/paths-filter/@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
id: changes
|
id: changes
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
@ -30,14 +32,16 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
node-version: [18.x, 20.x, 22.x]
|
node-version: [18.x, 20.x, 22.x, 23.x]
|
||||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
|
|
||||||
@ -53,15 +57,21 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
npm run test:unit
|
npm run test:unit
|
||||||
|
|
||||||
|
- name: ECMAScript module test
|
||||||
|
run: |
|
||||||
|
npm run test:esm
|
||||||
|
|
||||||
license:
|
license:
|
||||||
name: License check
|
name: License check
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Use Node.js
|
- name: Use Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||||
with:
|
with:
|
||||||
node-version: 22.x
|
node-version: 22.x
|
||||||
|
|
||||||
@ -73,6 +83,9 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
npm run license-checker
|
npm run license-checker
|
||||||
|
|
||||||
|
- name: SPDX header check
|
||||||
|
run: npm run license-header
|
||||||
|
|
||||||
test-bun:
|
test-bun:
|
||||||
name: Test Bun
|
name: Test Bun
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@ -86,10 +99,12 @@ jobs:
|
|||||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Use Bun
|
- name: Use Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@4bc047ad259df6fc24a6c9b0f9a0cb08cf17fbe5 # v2
|
||||||
|
|
||||||
- name: Install
|
- name: Install
|
||||||
run: |
|
run: |
|
||||||
@ -103,12 +118,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
bun run test:unit-bun
|
bun run test:unit-bun
|
||||||
|
|
||||||
auto-approve:
|
- name: ECMAScript module test
|
||||||
name: Auto-approve
|
run: |
|
||||||
needs: [test, license]
|
bun run test:esm
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
if: github.actor == 'elasticmachine'
|
|
||||||
steps:
|
|
||||||
- uses: hmarr/auto-approve-action@v4
|
|
||||||
|
|||||||
41
.github/workflows/npm-publish.yml
vendored
41
.github/workflows/npm-publish.yml
vendored
@ -12,26 +12,49 @@ jobs:
|
|||||||
contents: write
|
contents: write
|
||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
ref: ${{ github.event.inputs.branch }}
|
ref: ${{ github.event.inputs.branch }}
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "22.x"
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
- run: npm install -g npm
|
- run: npm install -g npm
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npm test
|
- run: npm test
|
||||||
- run: npm publish --provenance --access public
|
- name: npm publish
|
||||||
|
run: |
|
||||||
|
version=$(jq -r .version package.json)
|
||||||
|
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
||||||
|
if [[ -z "$tag_meta" ]]; then
|
||||||
|
npm publish --provenance --access public
|
||||||
|
else
|
||||||
|
tag=$(echo "$tag_meta" | cut -d '.' -f1)
|
||||||
|
npm publish --provenance --access public --tag "$tag"
|
||||||
|
fi
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
- run: |
|
- name: Publish version on GitHub
|
||||||
|
run: |
|
||||||
version=$(jq -r .version package.json)
|
version=$(jq -r .version package.json)
|
||||||
gh release create \
|
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
||||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
|
if [[ -z "$tag_meta" ]]; then
|
||||||
--target "$BRANCH_NAME" \
|
gh release create \
|
||||||
-t "v$version" \
|
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)"
|
||||||
"v$version"
|
--target "$BRANCH_NAME" \
|
||||||
|
--title "v$version" \
|
||||||
|
"v$version"
|
||||||
|
else
|
||||||
|
tag_main=$(echo "$version" | cut -d '-' -f1)
|
||||||
|
gh release create \
|
||||||
|
-n "This is a $tag_main pre-release. Changes may not be stable." \
|
||||||
|
--latest=false \
|
||||||
|
--prerelease \
|
||||||
|
--target "$BRANCH_NAME" \
|
||||||
|
--title "v$version" \
|
||||||
|
"v$version"
|
||||||
|
fi
|
||||||
env:
|
env:
|
||||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
|||||||
43
.github/workflows/serverless-patch.sh
vendored
43
.github/workflows/serverless-patch.sh
vendored
@ -1,43 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -exuo pipefail
|
|
||||||
|
|
||||||
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
|
|
||||||
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
|
|
||||||
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
|
|
||||||
|
|
||||||
# generate patch file
|
|
||||||
cd "$GITHUB_WORKSPACE/stack"
|
|
||||||
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
|
|
||||||
|
|
||||||
# set committer info
|
|
||||||
git config --global user.email "elasticmachine@users.noreply.github.com"
|
|
||||||
git config --global user.name "Elastic Machine"
|
|
||||||
|
|
||||||
# apply patch file
|
|
||||||
cd "$GITHUB_WORKSPACE/serverless"
|
|
||||||
git am -C1 --reject /tmp/patch.diff || git am --quit
|
|
||||||
|
|
||||||
# generate PR body comment
|
|
||||||
comment="Patch applied from $pr_shortcode"
|
|
||||||
|
|
||||||
# enumerate rejected patches in PR comment
|
|
||||||
has_rejects='false'
|
|
||||||
for f in ./**/*.rej; do
|
|
||||||
has_rejects='true'
|
|
||||||
comment="$comment
|
|
||||||
|
|
||||||
## Rejected patch \`$f\` must be resolved:
|
|
||||||
|
|
||||||
\`\`\`diff
|
|
||||||
$(cat "$f")
|
|
||||||
\`\`\`
|
|
||||||
"
|
|
||||||
done
|
|
||||||
|
|
||||||
# delete .rej files
|
|
||||||
rm -fv ./**/*.rej
|
|
||||||
|
|
||||||
# send data to output parameters
|
|
||||||
echo "$comment" > /tmp/pr_body
|
|
||||||
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"
|
|
||||||
51
.github/workflows/serverless-patch.yml
vendored
51
.github/workflows/serverless-patch.yml
vendored
@ -1,51 +0,0 @@
|
|||||||
---
|
|
||||||
name: Apply PR changes to serverless
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types:
|
|
||||||
- closed
|
|
||||||
- labeled
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
apply-patch:
|
|
||||||
name: Apply patch
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Only react to merged PRs for security reasons.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
|
||||||
if: >
|
|
||||||
github.event.pull_request.merged
|
|
||||||
&& (
|
|
||||||
(
|
|
||||||
github.event.action == 'closed'
|
|
||||||
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
|
|
||||||
)
|
|
||||||
||
|
|
||||||
(
|
|
||||||
github.event.action == 'labeled'
|
|
||||||
&& github.event.label.name == 'apply-to-serverless'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
repository: elastic/elasticsearch-js
|
|
||||||
ref: main
|
|
||||||
path: stack
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
repository: elastic/elasticsearch-serverless-js
|
|
||||||
ref: main
|
|
||||||
path: serverless
|
|
||||||
- name: Apply patch from stack to serverless
|
|
||||||
id: apply-patch
|
|
||||||
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
|
||||||
- uses: peter-evans/create-pull-request@v6
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GH_TOKEN }}
|
|
||||||
path: serverless
|
|
||||||
title: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
|
||||||
commit-message: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
|
||||||
body-path: /tmp/pr_body
|
|
||||||
draft: '${{ steps.apply-patch.outputs.PR_DRAFT }}'
|
|
||||||
add-paths: ':!*.rej'
|
|
||||||
12
.github/workflows/stale.yml
vendored
12
.github/workflows/stale.yml
vendored
@ -1,21 +1,21 @@
|
|||||||
---
|
---
|
||||||
name: 'Close stale issues and PRs'
|
name: "Close stale issues and PRs"
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '30 1 * * *'
|
- cron: "30 1 * * *"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v8
|
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9
|
||||||
with:
|
with:
|
||||||
stale-issue-label: stale
|
stale-issue-label: stale
|
||||||
stale-pr-label: stale
|
stale-pr-label: stale
|
||||||
days-before-stale: 90
|
days-before-stale: 90
|
||||||
days-before-close: 14
|
days-before-close: 14
|
||||||
exempt-issue-labels: 'good first issue'
|
exempt-issue-labels: "good first issue,tracking"
|
||||||
close-issue-label: closed-stale
|
close-issue-label: closed-stale
|
||||||
close-pr-label: closed-stale
|
close-pr-label: closed-stale
|
||||||
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
stale-issue-message: "This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||||
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
stale-pr-message: "This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -65,3 +65,6 @@ test/bundlers/parcel-test/.parcel-cache
|
|||||||
lib
|
lib
|
||||||
junit-output
|
junit-output
|
||||||
bun.lockb
|
bun.lockb
|
||||||
|
test-results
|
||||||
|
processinfo
|
||||||
|
.tap
|
||||||
|
|||||||
@ -73,3 +73,4 @@ CONTRIBUTING.md
|
|||||||
|
|
||||||
src
|
src
|
||||||
bun.lockb
|
bun.lockb
|
||||||
|
.tap
|
||||||
|
|||||||
@ -28,6 +28,9 @@ spec:
|
|||||||
spec:
|
spec:
|
||||||
repository: elastic/elasticsearch-js
|
repository: elastic/elasticsearch-js
|
||||||
pipeline_file: .buildkite/pipeline.yml
|
pipeline_file: .buildkite/pipeline.yml
|
||||||
|
env:
|
||||||
|
ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true"
|
||||||
|
SLACK_NOTIFICATIONS_CHANNEL: "#devtools-notify-javascript"
|
||||||
teams:
|
teams:
|
||||||
devtools-team:
|
devtools-team:
|
||||||
access_level: MANAGE_BUILD_AND_READ
|
access_level: MANAGE_BUILD_AND_READ
|
||||||
@ -42,6 +45,12 @@ spec:
|
|||||||
main:
|
main:
|
||||||
branch: "main"
|
branch: "main"
|
||||||
cronline: "@daily"
|
cronline: "@daily"
|
||||||
8_14:
|
8_x:
|
||||||
branch: "8.14"
|
branch: "8.x"
|
||||||
|
cronline: "@daily"
|
||||||
|
8_17:
|
||||||
|
branch: "8.17"
|
||||||
|
cronline: "@daily"
|
||||||
|
8_18:
|
||||||
|
branch: "8.18"
|
||||||
cronline: "@daily"
|
cronline: "@daily"
|
||||||
|
|||||||
@ -1,270 +0,0 @@
|
|||||||
[[basic-config]]
|
|
||||||
=== Basic configuration
|
|
||||||
|
|
||||||
This page shows you the possible basic configuration options that the clients
|
|
||||||
offers.
|
|
||||||
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
|
|
||||||
const client = new Client({
|
|
||||||
cloud: { id: '<cloud-id>' },
|
|
||||||
auth: { apiKey: 'base64EncodedKey' },
|
|
||||||
maxRetries: 5,
|
|
||||||
requestTimeout: 60000,
|
|
||||||
sniffOnStart: true
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
[cols=2*]
|
|
||||||
|===
|
|
||||||
|`node` or `nodes`
|
|
||||||
a|The Elasticsearch endpoint to use. +
|
|
||||||
It can be a single string or an array of strings:
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
node: 'http://localhost:9200'
|
|
||||||
----
|
|
||||||
Or it can be an object (or an array of objects) that represents the node:
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
node: {
|
|
||||||
url: new URL('http://localhost:9200'),
|
|
||||||
tls: 'tls options',
|
|
||||||
agent: 'http agent options',
|
|
||||||
id: 'custom node id',
|
|
||||||
headers: { 'custom': 'headers' }
|
|
||||||
roles: {
|
|
||||||
master: true,
|
|
||||||
data: true,
|
|
||||||
ingest: true,
|
|
||||||
ml: false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
|`auth`
|
|
||||||
a|Your authentication data. You can use both basic authentication and
|
|
||||||
{ref}/security-api-create-api-key.html[ApiKey]. +
|
|
||||||
See <<authentication,Authentication>> for more details. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
Basic authentication:
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
auth: {
|
|
||||||
username: 'elastic',
|
|
||||||
password: 'changeme'
|
|
||||||
}
|
|
||||||
----
|
|
||||||
{ref}/security-api-create-api-key.html[ApiKey] authentication:
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
auth: {
|
|
||||||
apiKey: 'base64EncodedKey'
|
|
||||||
}
|
|
||||||
----
|
|
||||||
Bearer authentication, useful for https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html[service account tokens]. Be aware that it does not handle automatic token refresh:
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
auth: {
|
|
||||||
bearer: 'token'
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
|`maxRetries`
|
|
||||||
|`number` - Max number of retries for each request. +
|
|
||||||
_Default:_ `3`
|
|
||||||
|
|
||||||
|`requestTimeout`
|
|
||||||
|`number` - Max request timeout in milliseconds for each request. +
|
|
||||||
_Default:_ `30000`
|
|
||||||
|
|
||||||
|`pingTimeout`
|
|
||||||
|`number` - Max ping request timeout in milliseconds for each request. +
|
|
||||||
_Default:_ `3000`
|
|
||||||
|
|
||||||
|`sniffInterval`
|
|
||||||
|`number, boolean` - Perform a sniff operation every `n` milliseconds. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`sniffOnStart`
|
|
||||||
|`boolean` - Perform a sniff once the client is started. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`sniffEndpoint`
|
|
||||||
|`string` - Endpoint to ping during a sniff. +
|
|
||||||
_Default:_ `'_nodes/_all/http'`
|
|
||||||
|
|
||||||
|`sniffOnConnectionFault`
|
|
||||||
|`boolean` - Perform a sniff on connection fault. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`resurrectStrategy`
|
|
||||||
|`string` - Configure the node resurrection strategy. +
|
|
||||||
_Options:_ `'ping'`, `'optimistic'`, `'none'` +
|
|
||||||
_Default:_ `'ping'`
|
|
||||||
|
|
||||||
|`suggestCompression`
|
|
||||||
|`boolean` - Adds `accept-encoding` header to every request. +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`compression`
|
|
||||||
|`string, boolean` - Enables gzip request body compression. +
|
|
||||||
_Options:_ `'gzip'`, `false` +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`tls`
|
|
||||||
|`http.SecureContextOptions` - tls https://nodejs.org/api/tls.html[configuraton]. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`proxy`
|
|
||||||
a|`string, URL` - If you are using an http(s) proxy, you can put its url here.
|
|
||||||
The client will automatically handle the connection to it. +
|
|
||||||
_Default:_ `null`
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
proxy: 'http://localhost:8080'
|
|
||||||
})
|
|
||||||
|
|
||||||
// Proxy with basic authentication
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
proxy: 'http://user:pwd@localhost:8080'
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|`agent`
|
|
||||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options],
|
|
||||||
or a function that returns an actual http agent instance. If you want to disable the http agent use entirely
|
|
||||||
(and disable the `keep-alive` feature), set the agent to `false`. +
|
|
||||||
_Default:_ `null`
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
agent: { agent: 'options' }
|
|
||||||
})
|
|
||||||
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
// the function takes as parameter the option
|
|
||||||
// object passed to the Connection constructor
|
|
||||||
agent: (opts) => new CustomAgent()
|
|
||||||
})
|
|
||||||
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
// Disable agent and keep-alive
|
|
||||||
agent: false
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|`nodeFilter`
|
|
||||||
a|`function` - Filters which node not to use for a request. +
|
|
||||||
_Default:_
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
function defaultNodeFilter (node) {
|
|
||||||
// avoid master only nodes
|
|
||||||
if (node.roles.master === true &&
|
|
||||||
node.roles.data === false &&
|
|
||||||
node.roles.ingest === false) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
|`nodeSelector`
|
|
||||||
a|`function` - custom selection strategy. +
|
|
||||||
_Options:_ `'round-robin'`, `'random'`, custom function +
|
|
||||||
_Default:_ `'round-robin'` +
|
|
||||||
_Custom function example:_
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
function nodeSelector (connections) {
|
|
||||||
const index = calculateIndex()
|
|
||||||
return connections[index]
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
|`generateRequestId`
|
|
||||||
a|`function` - function to generate the request id for every request, it takes
|
|
||||||
two parameters, the request parameters and options. +
|
|
||||||
By default it generates an incremental integer for every request. +
|
|
||||||
_Custom function example:_
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
function generateRequestId (params, options) {
|
|
||||||
// your id generation logic
|
|
||||||
// must be syncronous
|
|
||||||
return 'id'
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
|`name`
|
|
||||||
|`string, symbol` - The name to identify the client instance in the events. +
|
|
||||||
_Default:_ `elasticsearch-js`
|
|
||||||
|
|
||||||
|`opaqueIdPrefix`
|
|
||||||
|`string` - A string that will be use to prefix any `X-Opaque-Id` header. +
|
|
||||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html#_x-opaque-id_support[`X-Opaque-Id` support] for more details. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`headers`
|
|
||||||
|`object` - A set of custom headers to send in every request. +
|
|
||||||
_Default:_ `{}`
|
|
||||||
|
|
||||||
|`context`
|
|
||||||
|`object` - A custom object that you can use for observability in your events.
|
|
||||||
It will be merged with the API level context option. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`enableMetaHeader`
|
|
||||||
|`boolean` - If true, adds an header named `'x-elastic-client-meta'`, containing some minimal telemetry data,
|
|
||||||
such as the client and platform version. +
|
|
||||||
_Default:_ `true`
|
|
||||||
|
|
||||||
|`cloud`
|
|
||||||
a|`object` - Custom configuration for connecting to
|
|
||||||
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
|
||||||
for more details. +
|
|
||||||
_Default:_ `null` +
|
|
||||||
_Cloud configuration example:_
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const client = new Client({
|
|
||||||
cloud: {
|
|
||||||
id: '<cloud-id>'
|
|
||||||
},
|
|
||||||
auth: {
|
|
||||||
username: 'elastic',
|
|
||||||
password: 'changeme'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|`disablePrototypePoisoningProtection`
|
|
||||||
|`boolean`, `'proto'`, `'constructor'` - By the default the client will protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more. If needed you can disable prototype poisoning protection entirely or one of the two checks. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`caFingerprint`
|
|
||||||
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`maxResponseSize`
|
|
||||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`maxCompressedResponseSize`
|
|
||||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|===
|
|
||||||
@ -1,804 +0,0 @@
|
|||||||
[[changelog-client]]
|
|
||||||
== Release notes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.15.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.15.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== OpenTelemetry zero-code instrumentation support
|
|
||||||
|
|
||||||
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
|
|
||||||
See {jsclient}/observability.html#_opentelemetry[the docs]
|
|
||||||
for more information.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.14.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `8.14`
|
|
||||||
|
|
||||||
Updated types based on fixes and changes to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.14.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.14.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== ES|QL object API helper
|
|
||||||
|
|
||||||
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
|
|
||||||
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== `onSuccess` callback added to bulk helper
|
|
||||||
|
|
||||||
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Request retries are more polite
|
|
||||||
|
|
||||||
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
|
|
||||||
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.13.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Pin @elastic/transport to `~8.4.1`
|
|
||||||
|
|
||||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.13.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.13.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
|
|
||||||
|
|
||||||
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.12.3
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to `~8.4.1`
|
|
||||||
|
|
||||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.12 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.12.2
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Upgrade transport to 8.4.1 https://github.com/elastic/elasticsearch-js/pull/2137[#2137]
|
|
||||||
|
|
||||||
Upgrades `@elastic/transport` to 8.4.1 to resolve https://github.com/elastic/elastic-transport-js/pull/83[a bug] where arrays in error diagnostics were unintentionally transformed into objects.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.12.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix hang in bulk helper semaphore https://github.com/elastic/elasticsearch-js/pull/2027[#2027]
|
|
||||||
|
|
||||||
The failing state could be reached when a server's response times are slower than flushInterval.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.12.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.12.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.11.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to `~8.4.0`
|
|
||||||
|
|
||||||
Switching from `^8.4.0` to `~8.4.0` ensures 8.11 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.11.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.11.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.11/release-notes-8.11.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Enhanced support for redacting potentially sensitive data https://github.com/elastic/elasticsearch-js/pull/2095[#2095]
|
|
||||||
|
|
||||||
`@elastic/transport` https://github.com/elastic/elastic-transport-js/releases/tag/v8.4.0[version 8.4.0] introduces enhanced measures for ensuring that request metadata attached to some `Error` objects is redacted. This functionality is primarily to address custom logging solutions that don't use common serialization methods like `JSON.stringify`, `console.log`, or `util.inspect`, which were already accounted for.
|
|
||||||
|
|
||||||
See <<redaction>> for more information.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.10.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to `~8.3.4`
|
|
||||||
|
|
||||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.10.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.10.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.9.2
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to `~8.3.4`
|
|
||||||
|
|
||||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.9 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.9.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Upgrade Transport https://github.com/elastic/elasticsearch-js/pull/1968[#1968]
|
|
||||||
|
|
||||||
Upgrades `@elastic/transport` to the latest patch release to fix https://github.com/elastic/elastic-transport-js/pull/69[a bug] that could cause the process to exit when handling malformed `HEAD` requests.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.9.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.9.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
|
|
||||||
|
|
||||||
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Updated `user-agent` header https://github.com/elastic/elasticsearch-js/pull/1954[#1954]
|
|
||||||
|
|
||||||
The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.8.2
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to `~8.3.2`
|
|
||||||
|
|
||||||
Switching from `^8.3.2` to `~8.3.2` ensures 8.8 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.8.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.8.1`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.1.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix index drift bug in bulk helper https://github.com/elastic/elasticsearch-js/pull/1759[#1759]
|
|
||||||
|
|
||||||
Fixes a bug in the bulk helper that would cause `onDrop` to send back the wrong JSON document or error on a nonexistent document when an error occurred on a bulk HTTP request that contained a `delete` action.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix a memory leak caused by an outdated version of Undici https://github.com/elastic/elasticsearch-js/pull/1902[#1902]
|
|
||||||
|
|
||||||
Undici 5.5.1, used by https://github.com/elastic/elastic-transport-js[elastic-transport-js], could create a memory leak when a high volume of requests created too many HTTP `abort` listeners. Upgrading Undici to 5.22.1 removed the memory leak.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.8.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.8.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix type declarations for legacy types with a body key https://github.com/elastic/elasticsearch-js/pull/1784[#1784]
|
|
||||||
|
|
||||||
Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.7.3
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to `~8.3.1`
|
|
||||||
|
|
||||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.7.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.7.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.6.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to `~8.3.1`
|
|
||||||
|
|
||||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.6.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Bump @elastic/transport to 8.3.1+ https://github.com/elastic/elasticsearch-js/pull/1802[#1802]
|
|
||||||
|
|
||||||
The `@elastic/transport` dependency has been bumped to `~8.3.1` to ensure
|
|
||||||
fixes to the `maxResponseSize` option are available in the client.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.6.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.6/release-notes-8.6.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.5.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.5.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.5/release-notes-8.5.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.4.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.4.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.4/release-notes-8.4.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.2.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.2.1`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.1.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix ndjson APIs https://github.com/elastic/elasticsearch-js/pull/1688[#1688]
|
|
||||||
|
|
||||||
The previous release contained a bug that broken ndjson APIs.
|
|
||||||
We have released `v8.2.0-patch.1` to address this.
|
|
||||||
This fix is the same as the one we have released and we strongly recommend upgrading to this version.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix node shutdown apis https://github.com/elastic/elasticsearch-js/pull/1697[#1697]
|
|
||||||
|
|
||||||
The shutdown APIs wheren't complete, this fix completes them.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Types: move query keys to body https://github.com/elastic/elasticsearch-js/pull/1693[#1693]
|
|
||||||
|
|
||||||
The types definitions where wrongly representing the types of fields present in both query and body.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.2.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Breaking changes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Drop Node.js v12 https://github.com/elastic/elasticsearch-js/pull/1670[#1670]
|
|
||||||
|
|
||||||
According to our https://github.com/elastic/elasticsearch-js#nodejs-support[Node.js support matrix].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.2`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== More lenient parameter checks https://github.com/elastic/elasticsearch-js/pull/1662[#1662]
|
|
||||||
|
|
||||||
When creating a new client, an `undefined` `caFingerprint` no longer trigger an error for a http connection.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Update TypeScript docs and export estypes https://github.com/elastic/elasticsearch-js/pull/1675[#1675]
|
|
||||||
|
|
||||||
You can import the full TypeScript requests & responses definitions as it follows:
|
|
||||||
[source,ts]
|
|
||||||
----
|
|
||||||
import { estypes } from '@elastic/elasticsearch'
|
|
||||||
----
|
|
||||||
|
|
||||||
If you need the legacy definitions with the body, you can do the following:
|
|
||||||
|
|
||||||
[source,ts]
|
|
||||||
----
|
|
||||||
import { estypesWithBody } from '@elastic/elasticsearch'
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Updated hpagent to the latest version https://github.com/elastic/elastic-transport-js/pull/49[transport/#49]
|
|
||||||
|
|
||||||
You can fing the related changes https://github.com/delvedor/hpagent/releases/tag/v1.0.0[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.1.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.1`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.1/release-notes-8.1.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Export SniffingTransport https://github.com/elastic/elasticsearch-js/pull/1653[#1653]
|
|
||||||
|
|
||||||
Now the client exports the SniffingTransport class.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix onFlushTimeout timer not being cleared when upstream errors https://github.com/elastic/elasticsearch-js/pull/1616[#1616]
|
|
||||||
|
|
||||||
Fixes a memory leak caused by an error in the upstream dataset of the bulk helper.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Cleanup abort listener https://github.com/elastic/elastic-transport-js/pull/42[transport/#42]
|
|
||||||
|
|
||||||
The legacy http client was not cleaning up the abort listener, which could cause a memory leak.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improve undici performances https://github.com/elastic/elastic-transport-js/pull/41[transport/#41]
|
|
||||||
|
|
||||||
Improve the stream body collection and keep alive timeout.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.0.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.0`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.0/release-notes-8.0.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Drop old typescript definitions
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Medium*
|
|
||||||
|
|
||||||
The current TypeScript definitions will be removed from the client, and the new definitions, which contain request and response definitions as well will be shipped by default.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Drop callback-style API
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Large*
|
|
||||||
|
|
||||||
Maintaining both API styles is not a problem per se, but it makes error handling more convoluted due to async stack traces.
|
|
||||||
Moving to a full-promise API will solve this issue.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
// callback-style api
|
|
||||||
client.search({ params }, { options }, (err, result) => {
|
|
||||||
console.log(err || result)
|
|
||||||
})
|
|
||||||
|
|
||||||
// promise-style api
|
|
||||||
client.search({ params }, { options })
|
|
||||||
.then(console.log)
|
|
||||||
.catch(console.log)
|
|
||||||
|
|
||||||
// async-style (sugar syntax on top of promises)
|
|
||||||
const response = await client.search({ params }, { options })
|
|
||||||
console.log(response)
|
|
||||||
----
|
|
||||||
|
|
||||||
If you are already using the promise-style API, this won't be a breaking change for you.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Remove the current abort API and use the new AbortController standard
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
|
||||||
|
|
||||||
The old abort API makes sense for callbacks but it's annoying to use with promises
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
// callback-style api
|
|
||||||
const request = client.search({ params }, { options }, (err, result) => {
|
|
||||||
console.log(err) // RequestAbortedError
|
|
||||||
})
|
|
||||||
|
|
||||||
request.abort()
|
|
||||||
|
|
||||||
// promise-style api
|
|
||||||
const promise = client.search({ params }, { options })
|
|
||||||
|
|
||||||
promise
|
|
||||||
.then(console.log)
|
|
||||||
.catch(console.log) // RequestAbortedError
|
|
||||||
|
|
||||||
promise.abort()
|
|
||||||
----
|
|
||||||
|
|
||||||
Node v12 has added the standard https://nodejs.org/api/globals.html#globals_class_abortcontroller[`AbortController`] API which is designed to work well with both callbacks and promises.
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const ac = new AbortController()
|
|
||||||
client.search({ params }, { signal: ac.signal })
|
|
||||||
.then(console.log)
|
|
||||||
.catch(console.log) // RequestAbortedError
|
|
||||||
|
|
||||||
ac.abort()
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Remove the body key from the request
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
|
||||||
|
|
||||||
Thanks to the new types we are developing now we know exactly where a parameter should go.
|
|
||||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
|
||||||
|
|
||||||
This could be a rather big breaking change, so a double solution could be used during the 8.x lifecycle. (accepting body keys without them being wrapped in the body as well as the current solution).
|
|
||||||
|
|
||||||
To convert code from 7.x, you need to remove the `body` parameter in all the endpoints request.
|
|
||||||
For instance, this is an example for the `search` endpoint:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
// from
|
|
||||||
const response = await client.search({
|
|
||||||
index: 'test',
|
|
||||||
body: {
|
|
||||||
query: {
|
|
||||||
match_all: {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// to
|
|
||||||
const response = await client.search({
|
|
||||||
index: 'test',
|
|
||||||
query: {
|
|
||||||
match_all: {}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Migrate to new separate transport
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small to none*
|
|
||||||
|
|
||||||
The separated transport has been rewritten in TypeScript and has already dropped the callback style API.
|
|
||||||
Given that now is separated, most of the Elasticsearch specific concepts have been removed, and the client will likely need to extend parts of it for reintroducing them.
|
|
||||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== The returned value of API calls is the body and not the HTTP related keys
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
|
||||||
|
|
||||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
|
||||||
The client will expose a new request-specific option to still get the full response details.
|
|
||||||
|
|
||||||
The new behaviour returns the `body` value directly as response.
|
|
||||||
If you want to have the 7.x response format, you need to add `meta : true` in the request.
|
|
||||||
This will return all the HTTP meta information, including the `body`.
|
|
||||||
|
|
||||||
For instance, this is an example for the `search` endpoint:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
// from
|
|
||||||
const response = await client.search({
|
|
||||||
index: 'test',
|
|
||||||
body: {
|
|
||||||
query: {
|
|
||||||
match_all: {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
|
||||||
|
|
||||||
// to
|
|
||||||
const response = await client.search({
|
|
||||||
index: 'test',
|
|
||||||
query: {
|
|
||||||
match_all: {}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
console.log(response) // SearchResponse
|
|
||||||
|
|
||||||
// with a bit of TypeScript and JavaScript magic...
|
|
||||||
const response = await client.search({
|
|
||||||
index: 'test',
|
|
||||||
query: {
|
|
||||||
match_all: {}
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
meta: true
|
|
||||||
})
|
|
||||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Use a weighted connection pool
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small to none*
|
|
||||||
|
|
||||||
Move from the current cluster connection pool to a weight-based implementation.
|
|
||||||
This new implementation offers better performances and runs less code in the background, the old connection pool can still be used.
|
|
||||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Migrate to the "undici" http client
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small to none*
|
|
||||||
|
|
||||||
By default, the HTTP client will no longer be the default Node.js HTTP client, but https://github.com/nodejs/undici[undici] instead.
|
|
||||||
Undici is a brand new HTTP client written from scratch, it offers vastly improved performances and has better support for promises.
|
|
||||||
Furthermore, it offers comprehensive and predictable error handling. The old HTTP client can still be used.
|
|
||||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Drop support for old camelCased keys
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Medium*
|
|
||||||
|
|
||||||
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
|
|
||||||
This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense.
|
|
||||||
If you are already using `snake_case` keys, this won't be a breaking change for you.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Rename `ssl` option to `tls`
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
|
||||||
|
|
||||||
People usually refers to this as `tls`, furthermore, internally we use the tls API and Node.js refers to it as tls everywhere.
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
// before
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://localhost:9200',
|
|
||||||
ssl: {
|
|
||||||
rejectUnauthorized: false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// after
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://localhost:9200',
|
|
||||||
tls: {
|
|
||||||
rejectUnauthorized: false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Remove prototype poisoning protection
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
|
||||||
|
|
||||||
Prototype poisoning protection is very useful, but it can cause performances issues with big payloads.
|
|
||||||
In v8 it will be removed, and the documentation will show how to add it back with a custom serializer.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Remove client extensions API
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Large*
|
|
||||||
|
|
||||||
Nowadays the client support the entire Elasticsearch API, and the `transport.request` method can be used if necessary. The client extensions API have no reason to exist.
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
client.extend('utility.index', ({ makeRequest }) => {
|
|
||||||
return function _index (params, options) {
|
|
||||||
// your code
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
client.utility.index(...)
|
|
||||||
----
|
|
||||||
|
|
||||||
If you weren't using client extensions, this won't be a breaking change for you.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Move to TypeScript
|
|
||||||
|
|
||||||
*Breaking: No* | *Migration effort: None*
|
|
||||||
|
|
||||||
The new separated transport is already written in TypeScript, and it makes sense that the client v8 will be fully written in TypeScript as well.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Move from emitter-like interface to a diagnostic method
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
|
||||||
|
|
||||||
Currently, the client offers a subset of methods of the `EventEmitter` class, v8 will ship with a `diagnostic` property which will be a proper event emitter.
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
// from
|
|
||||||
client.on('request', console.log)
|
|
||||||
|
|
||||||
// to
|
|
||||||
client.diagnostic.on('request', console.log)
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Remove username & password properties from Cloud configuration
|
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
|
||||||
|
|
||||||
The Cloud configuration does not support ApiKey and Bearer auth, while the `auth` options does.
|
|
||||||
There is no need to keep the legacy basic auth support in the cloud configuration.
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
// before
|
|
||||||
const client = new Client({
|
|
||||||
cloud: {
|
|
||||||
id: '<cloud-id>',
|
|
||||||
username: 'elastic',
|
|
||||||
password: 'changeme'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// after
|
|
||||||
const client = new Client({
|
|
||||||
cloud: {
|
|
||||||
id: '<cloud-id>'
|
|
||||||
},
|
|
||||||
auth: {
|
|
||||||
username: 'elastic',
|
|
||||||
password: 'changeme'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
If you are already passing the basic auth options in the `auth` configuration, this won't be a breaking change for you.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Calling `client.close` will reject new requests
|
|
||||||
|
|
||||||
Once you call `client.close` every new request after that will be rejected with a `NoLivingConnectionsError`. In-flight requests will be executed normally unless an in-flight request requires a retry, in which case it will be rejected.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Parameters rename
|
|
||||||
|
|
||||||
- `ilm.delete_lifecycle`: `policy` parameter has been renamed to `name`
|
|
||||||
- `ilm.get_lifecycle`: `policy` parameter has been renamed to `name`
|
|
||||||
- `ilm.put_lifecycle`: `policy` parameter has been renamed to `name`
|
|
||||||
- `snapshot.cleanup_repository`: `repository` parameter has been renamed to `name`
|
|
||||||
- `snapshot.create_repository`: `repository` parameter has been renamed to `name`
|
|
||||||
- `snapshot.delete_repository`: `repository` parameter has been renamed to `name`
|
|
||||||
- `snapshot.get_repository`: `repository` parameter has been renamed to `name`
|
|
||||||
- `snapshot.verify_repository`: `repository` parameter has been renamed to `name`
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Removal of snake_cased methods
|
|
||||||
|
|
||||||
The v7 client provided snake_cased methods, such as `client.delete_by_query`. This is no longer supported, now only camelCased method are present.
|
|
||||||
So `client.delete_by_query` can be accessed with `client.deleteByQuery`
|
|
||||||
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
[[child]]
|
|
||||||
=== Creating a child client
|
|
||||||
|
|
||||||
There are some use cases where you may need multiple instances of the client.
|
|
||||||
You can easily do that by calling `new Client()` as many times as you need, but
|
|
||||||
you will lose all the benefits of using one single client, such as the long
|
|
||||||
living connections and the connection pool handling. To avoid this problem, the
|
|
||||||
client offers a `child` API, which returns a new client instance that shares the
|
|
||||||
connection pool with the parent client.
|
|
||||||
|
|
||||||
NOTE: The event emitter is shared between the parent and the child(ren). If you
|
|
||||||
extend the parent client, the child client will have the same extensions, while
|
|
||||||
if the child client adds an extension, the parent client will not be extended.
|
|
||||||
|
|
||||||
You can pass to the `child` every client option you would pass to a normal
|
|
||||||
client, but the connection pool specific options (`ssl`, `agent`, `pingTimeout`,
|
|
||||||
`Connection`, and `resurrectStrategy`).
|
|
||||||
|
|
||||||
CAUTION: If you call `close` in any of the parent/child clients, every client
|
|
||||||
will be closed.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
cloud: { id: '<cloud-id>' },
|
|
||||||
auth: { apiKey: 'base64EncodedKey' }
|
|
||||||
})
|
|
||||||
const child = client.child({
|
|
||||||
headers: { 'x-foo': 'bar' },
|
|
||||||
requestTimeout: 1000
|
|
||||||
})
|
|
||||||
|
|
||||||
client.info().then(console.log, console.log)
|
|
||||||
child.info().then(console.log, console.log)
|
|
||||||
----
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
[[client-configuration]]
|
|
||||||
== Configuration
|
|
||||||
|
|
||||||
|
|
||||||
The client is designed to be easily configured for your needs. In the following
|
|
||||||
section, you can see the possible options that you can use to configure it.
|
|
||||||
|
|
||||||
* <<basic-config>>
|
|
||||||
* <<advanced-config>>
|
|
||||||
* <<child>>
|
|
||||||
* <<client-testing>>
|
|
||||||
@ -1,719 +0,0 @@
|
|||||||
[[client-connecting]]
|
|
||||||
== Connecting
|
|
||||||
|
|
||||||
This page contains the information you need to connect and use the Client with
|
|
||||||
{es}.
|
|
||||||
|
|
||||||
**On this page**
|
|
||||||
|
|
||||||
* <<authentication, Authentication options>>
|
|
||||||
* <<client-usage, Using the client>>
|
|
||||||
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
|
|
||||||
* <<client-connect-proxy, Connecting through a proxy>>
|
|
||||||
* <<client-error-handling, Handling errors>>
|
|
||||||
* <<keep-alive, Keep-alive connections>>
|
|
||||||
* <<close-connections, Closing a client's connections>>
|
|
||||||
* <<product-check, Automatic product check>>
|
|
||||||
|
|
||||||
[[authentication]]
|
|
||||||
[discrete]
|
|
||||||
=== Authentication
|
|
||||||
|
|
||||||
This document contains code snippets to show you how to connect to various {es}
|
|
||||||
providers.
|
|
||||||
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[auth-ec]]
|
|
||||||
==== Elastic Cloud
|
|
||||||
|
|
||||||
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers
|
|
||||||
an easy way to connect to it via the `cloud` option. You must pass the Cloud ID
|
|
||||||
that you can find in the cloud console, then your username and password inside
|
|
||||||
the `auth` option.
|
|
||||||
|
|
||||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable
|
|
||||||
both request and response compression by default, since it yields significant
|
|
||||||
throughput improvements. Moreover, the client will also set the tls option
|
|
||||||
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
|
|
||||||
override this option by configuring them.
|
|
||||||
|
|
||||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
|
|
||||||
behind a load balancer, Elastic Cloud will take care of everything for you.
|
|
||||||
Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here]
|
|
||||||
to know more.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
cloud: {
|
|
||||||
id: '<cloud-id>'
|
|
||||||
},
|
|
||||||
auth: {
|
|
||||||
username: 'elastic',
|
|
||||||
password: 'changeme'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[connect-self-managed-new]]
|
|
||||||
=== Connecting to a self-managed cluster
|
|
||||||
|
|
||||||
By default {es} will start with security features like authentication and TLS
|
|
||||||
enabled. To connect to the {es} cluster you'll need to configure the Node.js {es}
|
|
||||||
client to use HTTPS with the generated CA certificate in order to make requests
|
|
||||||
successfully.
|
|
||||||
|
|
||||||
If you're just getting started with {es} we recommend reading the documentation
|
|
||||||
on https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html[configuring]
|
|
||||||
and
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html[starting {es}]
|
|
||||||
to ensure your cluster is running as expected.
|
|
||||||
|
|
||||||
When you start {es} for the first time you'll see a distinct block like the one
|
|
||||||
below in the output from {es} (you may have to scroll up if it's been a while):
|
|
||||||
|
|
||||||
[source,sh]
|
|
||||||
----
|
|
||||||
|
|
||||||
-> Elasticsearch security features have been automatically configured!
|
|
||||||
-> Authentication is enabled and cluster connections are encrypted.
|
|
||||||
|
|
||||||
-> Password for the elastic user (reset with `bin/elasticsearch-reset-password -u elastic`):
|
|
||||||
lhQpLELkjkrawaBoaz0Q
|
|
||||||
|
|
||||||
-> HTTP CA certificate SHA-256 fingerprint:
|
|
||||||
a52dd93511e8c6045e21f16654b77c9ee0f34aea26d9f40320b531c474676228
|
|
||||||
...
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Depending on the circumstances there are two options for verifying the HTTPS
|
|
||||||
connection, either verifying with the CA certificate itself or via the HTTP CA
|
|
||||||
certificate fingerprint.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[auth-tls]]
|
|
||||||
==== TLS configuration
|
|
||||||
|
|
||||||
The generated root CA certificate can be found in the `certs` directory in your
|
|
||||||
{es} config location (`$ES_CONF_PATH/certs/http_ca.crt`). If you're running {es}
|
|
||||||
in Docker there is
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html[additional documentation for retrieving the CA certificate].
|
|
||||||
|
|
||||||
Without any additional configuration you can specify `https://` node urls, and
|
|
||||||
the certificates used to sign these requests will be verified. To turn off
|
|
||||||
certificate verification, you must specify an `tls` object in the top level
|
|
||||||
config and set `rejectUnauthorized: false`. The default `tls` values are the
|
|
||||||
same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
|
||||||
uses.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://localhost:9200',
|
|
||||||
auth: {
|
|
||||||
username: 'elastic',
|
|
||||||
password: 'changeme'
|
|
||||||
},
|
|
||||||
tls: {
|
|
||||||
ca: fs.readFileSync('./http_ca.crt'),
|
|
||||||
rejectUnauthorized: false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[auth-ca-fingerprint]]
|
|
||||||
==== CA fingerprint
|
|
||||||
|
|
||||||
You can configure the client to only trust certificates that are signed by a specific CA certificate
|
|
||||||
(CA certificate pinning) by providing a `caFingerprint` option.
|
|
||||||
This will verify that the fingerprint of the CA certificate that has signed
|
|
||||||
the certificate of the server matches the supplied value.
|
|
||||||
You must configure a SHA256 digest.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://example.com'
|
|
||||||
auth: { ... },
|
|
||||||
// the fingerprint (SHA256) of the CA certificate that is used to sign
|
|
||||||
// the certificate that the Elasticsearch node presents for TLS.
|
|
||||||
caFingerprint: '20:0D:CA:FA:76:...',
|
|
||||||
tls: {
|
|
||||||
// might be required if it's a self-signed certificate
|
|
||||||
rejectUnauthorized: false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
The certificate fingerprint can be calculated using `openssl x509` with the
|
|
||||||
certificate file:
|
|
||||||
|
|
||||||
[source,sh]
|
|
||||||
----
|
|
||||||
openssl x509 -fingerprint -sha256 -noout -in /path/to/http_ca.crt
|
|
||||||
----
|
|
||||||
|
|
||||||
If you don't have access to the generated CA file from {es} you can use the
|
|
||||||
following script to output the root CA fingerprint of the {es} instance with
|
|
||||||
`openssl s_client`:
|
|
||||||
|
|
||||||
[source,sh]
|
|
||||||
----
|
|
||||||
# Replace the values of 'localhost' and '9200' to the
|
|
||||||
# corresponding host and port values for the cluster.
|
|
||||||
openssl s_client -connect localhost:9200 -servername localhost -showcerts </dev/null 2>/dev/null \
|
|
||||||
| openssl x509 -fingerprint -sha256 -noout -in /dev/stdin
|
|
||||||
----
|
|
||||||
|
|
||||||
The output of `openssl x509` will look something like this:
|
|
||||||
|
|
||||||
[source,sh]
|
|
||||||
----
|
|
||||||
SHA256 Fingerprint=A5:2D:D9:35:11:E8:C6:04:5E:21:F1:66:54:B7:7C:9E:E0:F3:4A:EA:26:D9:F4:03:20:B5:31:C4:74:67:62:28
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[connect-no-security]]
|
|
||||||
=== Connecting without security enabled
|
|
||||||
|
|
||||||
WARNING: Running {es} without security enabled is not recommended.
|
|
||||||
|
|
||||||
If your cluster is configured with
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html[security explicitly disabled]
|
|
||||||
then you can connect via HTTP:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://example.com'
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[auth-strategies]]
|
|
||||||
=== Authentication strategies
|
|
||||||
|
|
||||||
Following you can find all the supported authentication strategies.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[auth-apikey]]
|
|
||||||
==== ApiKey authentication
|
|
||||||
|
|
||||||
You can use the
|
|
||||||
{ref-7x}/security-api-create-api-key.html[ApiKey]
|
|
||||||
authentication by passing the `apiKey` parameter via the `auth` option. The
|
|
||||||
`apiKey` parameter can be either a base64 encoded string or an object with the
|
|
||||||
values that you can obtain from the
|
|
||||||
{ref-7x}/security-api-create-api-key.html[create api key endpoint].
|
|
||||||
|
|
||||||
NOTE: If you provide both basic authentication credentials and the ApiKey
|
|
||||||
configuration, the ApiKey takes precedence.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://localhost:9200',
|
|
||||||
auth: {
|
|
||||||
apiKey: 'base64EncodedKey'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://localhost:9200',
|
|
||||||
auth: {
|
|
||||||
apiKey: {
|
|
||||||
id: 'foo',
|
|
||||||
api_key: 'bar'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[auth-bearer]]
|
|
||||||
==== Bearer authentication
|
|
||||||
|
|
||||||
You can provide your credentials by passing the `bearer` token
|
|
||||||
parameter via the `auth` option.
|
|
||||||
Useful for https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html[service account tokens].
|
|
||||||
Be aware that it does not handle automatic token refresh.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://localhost:9200',
|
|
||||||
auth: {
|
|
||||||
bearer: 'token'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[auth-basic]]
|
|
||||||
==== Basic authentication
|
|
||||||
|
|
||||||
You can provide your credentials by passing the `username` and `password`
|
|
||||||
parameters via the `auth` option.
|
|
||||||
|
|
||||||
NOTE: If you provide both basic authentication credentials and the Api Key
|
|
||||||
configuration, the Api Key will take precedence.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://localhost:9200',
|
|
||||||
auth: {
|
|
||||||
username: 'elastic',
|
|
||||||
password: 'changeme'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
Otherwise, you can provide your credentials in the node(s) URL.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'https://username:password@localhost:9200'
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[client-usage]]
|
|
||||||
=== Usage
|
|
||||||
|
|
||||||
Using the client is straightforward, it supports all the public APIs of {es},
|
|
||||||
and every method exposes the same signature.
|
|
||||||
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
cloud: { id: '<cloud-id>' },
|
|
||||||
auth: { apiKey: 'base64EncodedKey' }
|
|
||||||
})
|
|
||||||
|
|
||||||
const result = await client.search({
|
|
||||||
index: 'my-index',
|
|
||||||
query: {
|
|
||||||
match: { hello: 'world' }
|
|
||||||
}
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
The returned value of every API call is the response body from {es}.
|
|
||||||
If you need to access additonal metadata, such as the status code or headers,
|
|
||||||
you must specify `meta: true` in the request options:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
cloud: { id: '<cloud-id>' },
|
|
||||||
auth: { apiKey: 'base64EncodedKey' }
|
|
||||||
})
|
|
||||||
|
|
||||||
const result = await client.search({
|
|
||||||
index: 'my-index',
|
|
||||||
query: {
|
|
||||||
match: { hello: 'world' }
|
|
||||||
}
|
|
||||||
}, { meta: true })
|
|
||||||
----
|
|
||||||
|
|
||||||
In this case, the result will be:
|
|
||||||
[source,ts]
|
|
||||||
----
|
|
||||||
{
|
|
||||||
body: object | boolean
|
|
||||||
statusCode: number
|
|
||||||
headers: object
|
|
||||||
warnings: [string],
|
|
||||||
meta: object
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
NOTE: The body is a boolean value when you use `HEAD` APIs.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Aborting a request
|
|
||||||
|
|
||||||
If needed, you can abort a running request by using the `AbortController` standard.
|
|
||||||
|
|
||||||
CAUTION: If you abort a request, the request will fail with a
|
|
||||||
`RequestAbortedError`.
|
|
||||||
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const AbortController = require('node-abort-controller')
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
const client = new Client({
|
|
||||||
cloud: { id: '<cloud-id>' },
|
|
||||||
auth: { apiKey: 'base64EncodedKey' }
|
|
||||||
})
|
|
||||||
|
|
||||||
const abortController = new AbortController()
|
|
||||||
setImmediate(() => abortController.abort())
|
|
||||||
|
|
||||||
const result = await client.search({
|
|
||||||
index: 'my-index',
|
|
||||||
query: {
|
|
||||||
match: { hello: 'world' }
|
|
||||||
}
|
|
||||||
}, { signal: abortController.signal })
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Request specific options
|
|
||||||
|
|
||||||
If needed you can pass request specific options in a second object:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const result = await client.search({
|
|
||||||
index: 'my-index',
|
|
||||||
body: {
|
|
||||||
query: {
|
|
||||||
match: { hello: 'world' }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
ignore: [404],
|
|
||||||
maxRetries: 3
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
The supported request specific options are:
|
|
||||||
[cols=2*]
|
|
||||||
|===
|
|
||||||
|`ignore`
|
|
||||||
|`[number]` - HTTP status codes which should not be considered errors for this request. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`requestTimeout`
|
|
||||||
|`number` - Max request timeout for the request in milliseconds, it overrides the client default. +
|
|
||||||
_Default:_ `30000`
|
|
||||||
|
|
||||||
|`maxRetries`
|
|
||||||
|`number` - Max number of retries for the request, it overrides the client default. +
|
|
||||||
_Default:_ `3`
|
|
||||||
|
|
||||||
|`compression`
|
|
||||||
|`string, boolean` - Enables body compression for the request. +
|
|
||||||
_Options:_ `false`, `'gzip'` +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`asStream`
|
|
||||||
|`boolean` - Instead of getting the parsed body back, you get the raw Node.js stream of data. +
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`headers`
|
|
||||||
|`object` - Custom headers for the request. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`querystring`
|
|
||||||
|`object` - Custom querystring for the request. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`id`
|
|
||||||
|`any` - Custom request id. _(overrides the top level request id generator)_ +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`context`
|
|
||||||
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`maxResponseSize`
|
|
||||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`maxCompressedResponseSize`
|
|
||||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`signal`
|
|
||||||
|`AbortSignal` - The AbortSignal instance to allow request abortion. +
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|===
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[client-faas-env]]
|
|
||||||
=== Using the Client in a Function-as-a-Service Environment
|
|
||||||
|
|
||||||
This section illustrates the best practices for leveraging the {es} client in a Function-as-a-Service (FaaS) environment.
|
|
||||||
The most influential optimization is to initialize the client outside of the function, the global scope.
|
|
||||||
This practice does not only improve performance but also enables background functionality as – for example – https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[sniffing].
|
|
||||||
The following examples provide a skeleton for the best practices.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== GCP Cloud Functions
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
'use strict'
|
|
||||||
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
|
|
||||||
const client = new Client({
|
|
||||||
// client initialisation
|
|
||||||
})
|
|
||||||
|
|
||||||
exports.testFunction = async function (req, res) {
|
|
||||||
// use the client
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== AWS Lambda
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
'use strict'
|
|
||||||
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
|
|
||||||
const client = new Client({
|
|
||||||
// client initialisation
|
|
||||||
})
|
|
||||||
|
|
||||||
exports.handler = async function (event, context) {
|
|
||||||
// use the client
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Azure Functions
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
'use strict'
|
|
||||||
|
|
||||||
const { Client } = require('@elastic/elasticsearch')
|
|
||||||
|
|
||||||
const client = new Client({
|
|
||||||
// client initialisation
|
|
||||||
})
|
|
||||||
|
|
||||||
module.exports = async function (context, req) {
|
|
||||||
// use the client
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
Resources used to assess these recommendations:
|
|
||||||
|
|
||||||
- https://cloud.google.com/functions/docs/bestpractices/tips#use_global_variables_to_reuse_objects_in_future_invocations[GCP Cloud Functions: Tips & Tricks]
|
|
||||||
- https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html[Best practices for working with AWS Lambda functions]
|
|
||||||
- https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python?tabs=azurecli-linux%2Capplication-level#global-variables[Azure Functions Python developer guide]
|
|
||||||
- https://docs.aws.amazon.com/lambda/latest/operatorguide/global-scope.html[AWS Lambda: Comparing the effect of global scope]
|
|
||||||
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[client-connect-proxy]]
|
|
||||||
=== Connecting through a proxy
|
|
||||||
|
|
||||||
~Added~ ~in~ ~`v7.10.0`~
|
|
||||||
|
|
||||||
If you need to pass through an http(s) proxy for connecting to {es}, the client
|
|
||||||
out of the box offers a handy configuration for helping you with it. Under the
|
|
||||||
hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
|
|
||||||
|
|
||||||
IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations.
|
|
||||||
To use a proxy, you will need to use the `HttpConnection` class from `@elastic/transport` instead.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
import { HttpConnection } from '@elastic/transport'
|
|
||||||
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
proxy: 'http://localhost:8080',
|
|
||||||
Connection: HttpConnection,
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
Basic authentication is supported as well:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
proxy: 'http:user:pwd@//localhost:8080',
|
|
||||||
Connection: HttpConnection,
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
If you are connecting through a non-http(s) proxy, such as a `socks5` or `pac`,
|
|
||||||
you can use the `agent` option to configure it.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const SocksProxyAgent = require('socks-proxy-agent')
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
agent () {
|
|
||||||
return new SocksProxyAgent('socks://127.0.0.1:1080')
|
|
||||||
},
|
|
||||||
Connection: HttpConnection,
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[client-error-handling]]
|
|
||||||
=== Error handling
|
|
||||||
|
|
||||||
The client exposes a variety of error objects that you can use to enhance your
|
|
||||||
error handling. You can find all the error objects inside the `errors` key in
|
|
||||||
the client.
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const { errors } = require('@elastic/elasticsearch')
|
|
||||||
console.log(errors)
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
You can find the errors exported by the client in the table below.
|
|
||||||
|
|
||||||
[cols=3*]
|
|
||||||
|===
|
|
||||||
|*Error*
|
|
||||||
|*Description*
|
|
||||||
|*Properties*
|
|
||||||
|
|
||||||
|`ElasticsearchClientError`
|
|
||||||
|Every error inherits from this class, it is the basic error generated by the client.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
|
|
||||||
|`TimeoutError`
|
|
||||||
|Generated when a request exceeds the `requestTimeout` option.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
* `meta` - `object`, contains all the information about the request
|
|
||||||
|
|
||||||
|`ConnectionError`
|
|
||||||
|Generated when an error occurs during the request, it can be a connection error or a malformed stream of data.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
* `meta` - `object`, contains all the information about the request
|
|
||||||
|
|
||||||
|`RequestAbortedError`
|
|
||||||
|Generated if the user calls the `request.abort()` method.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
* `meta` - `object`, contains all the information about the request
|
|
||||||
|
|
||||||
|`NoLivingConnectionsError`
|
|
||||||
|Given the configuration, the ConnectionPool was not able to find a usable Connection for this request.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
* `meta` - `object`, contains all the information about the request
|
|
||||||
|
|
||||||
|`SerializationError`
|
|
||||||
|Generated if the serialization fails.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
* `data` - `object`, the object to serialize
|
|
||||||
|
|
||||||
|`DeserializationError`
|
|
||||||
|Generated if the deserialization fails.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
* `data` - `string`, the string to deserialize
|
|
||||||
|
|
||||||
|`ConfigurationError`
|
|
||||||
|Generated if there is a malformed configuration or parameter.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
|
|
||||||
|`ResponseError`
|
|
||||||
|Generated when in case of a `4xx` or `5xx` response.
|
|
||||||
a|* `name` - `string`
|
|
||||||
* `message` - `string`
|
|
||||||
* `meta` - `object`, contains all the information about the request
|
|
||||||
* `body` - `object`, the response body
|
|
||||||
* `statusCode` - `object`, the response headers
|
|
||||||
* `headers` - `object`, the response status code
|
|
||||||
|===
|
|
||||||
|
|
||||||
[[keep-alive]]
|
|
||||||
[discrete]
|
|
||||||
=== Keep-alive connections
|
|
||||||
|
|
||||||
By default, the client uses persistent, keep-alive connections to reduce the overhead of creating a new HTTP connection for each Elasticsearch request.
|
|
||||||
If you are using the default `UndiciConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 10 minutes.
|
|
||||||
If you are using the legacy `HttpConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 1 minute.
|
|
||||||
|
|
||||||
If you need to disable keep-alive connections, you can override the HTTP agent with your preferred https://nodejs.org/api/http.html#http_new_agent_options[HTTP agent options]:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
// the function takes as parameter the option
|
|
||||||
// object passed to the Connection constructor
|
|
||||||
agent: (opts) => new CustomAgent()
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
Or you can disable the HTTP agent entirely:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200',
|
|
||||||
// Disable agent and keep-alive
|
|
||||||
agent: false
|
|
||||||
})
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[close-connections]]
|
|
||||||
=== Closing a client's connections
|
|
||||||
|
|
||||||
If you would like to close all open connections being managed by an instance of the client, use the `close()` function:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
----
|
|
||||||
const client = new Client({
|
|
||||||
node: 'http://localhost:9200'
|
|
||||||
});
|
|
||||||
client.close();
|
|
||||||
----
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[product-check]]
|
|
||||||
=== Automatic product check
|
|
||||||
|
|
||||||
Since v7.14.0, the client performs a required product check before the first call.
|
|
||||||
This pre-flight product check allows the client to establish the version of Elasticsearch
|
|
||||||
that it is communicating with. The product check requires one additional HTTP request to
|
|
||||||
be sent to the server as part of the request pipeline before the main API call is sent.
|
|
||||||
In most cases, this will succeed during the very first API call that the client sends.
|
|
||||||
Once the product check completes, no further product check HTTP requests are sent for
|
|
||||||
subsequent API calls.
|
|
||||||
11
docs/doc_examples/00ad41bde67beac991534ae0e04b1296.asciidoc
Normal file
11
docs/doc_examples/00ad41bde67beac991534ae0e04b1296.asciidoc
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.getDataStream({
|
||||||
|
name: "my-data-stream",
|
||||||
|
filter_path: "data_streams.indices.index_name",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
range: {
|
||||||
|
year: {
|
||||||
|
gt: 2023,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
term: {
|
||||||
|
topic: "elastic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
aggs: {
|
||||||
|
topics: {
|
||||||
|
terms: {
|
||||||
|
field: "topic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.put({
|
||||||
|
task_type: "rerank",
|
||||||
|
inference_id: "my-rerank-model",
|
||||||
|
inference_config: {
|
||||||
|
service: "cohere",
|
||||||
|
service_settings: {
|
||||||
|
model_id: "rerank-english-v3.0",
|
||||||
|
api_key: "{{COHERE_API_KEY}}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -5,10 +5,16 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
mappings: {
|
settings: {
|
||||||
_source: {
|
index: {
|
||||||
mode: "synthetic",
|
mapping: {
|
||||||
|
source: {
|
||||||
|
mode: "synthetic",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
mappings: {
|
||||||
properties: {
|
properties: {
|
||||||
kwd: {
|
kwd: {
|
||||||
type: "keyword",
|
type: "keyword",
|
||||||
@ -26,7 +26,7 @@ const response1 = await client.cluster.putComponentTemplate({
|
|||||||
type: "keyword",
|
type: "keyword",
|
||||||
script: {
|
script: {
|
||||||
source:
|
source:
|
||||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.tasks.list({
|
||||||
|
human: "true",
|
||||||
|
detailed: "true",
|
||||||
|
actions: "indices:data/write/bulk",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -4,7 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.ingest.putPipeline({
|
const response = await client.ingest.putPipeline({
|
||||||
id: "alibabacloud_ai_search_embeddings",
|
id: "alibabacloud_ai_search_embeddings_pipeline",
|
||||||
processors: [
|
processors: [
|
||||||
{
|
{
|
||||||
inference: {
|
inference: {
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.cluster.reroute();
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
10
docs/doc_examples/0722b302b2b3275a988d858044f99d5d.asciidoc
Normal file
10
docs/doc_examples/0722b302b2b3275a988d858044f99d5d.asciidoc
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.getMapping({
|
||||||
|
index: "kibana_sample_data_ecommerce",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -11,6 +11,8 @@ const response = await client.indices.putSettings({
|
|||||||
"index.indexing.slowlog.threshold.index.debug": "2s",
|
"index.indexing.slowlog.threshold.index.debug": "2s",
|
||||||
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
||||||
"index.indexing.slowlog.source": "1000",
|
"index.indexing.slowlog.source": "1000",
|
||||||
|
"index.indexing.slowlog.reformat": true,
|
||||||
|
"index.indexing.slowlog.include.user": true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
42
docs/doc_examples/082e78c7a2061a7c4a52b494e5ede0e8.asciidoc
Normal file
42
docs/doc_examples/082e78c7a2061a7c4a52b494e5ede0e8.asciidoc
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "my-rank-vectors-bit",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
my_vector: {
|
||||||
|
type: "rank_vectors",
|
||||||
|
element_type: "bit",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.bulk({
|
||||||
|
index: "my-rank-vectors-bit",
|
||||||
|
refresh: "true",
|
||||||
|
operations: [
|
||||||
|
{
|
||||||
|
index: {
|
||||||
|
_id: "1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
my_vector: [127, -127, 0, 1, 42],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
index: {
|
||||||
|
_id: "2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
my_vector: "8100012a7f",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
----
|
||||||
@ -14,6 +14,7 @@ const response = await client.inference.put({
|
|||||||
min_number_of_allocations: 3,
|
min_number_of_allocations: 3,
|
||||||
max_number_of_allocations: 10,
|
max_number_of_allocations: 10,
|
||||||
},
|
},
|
||||||
|
num_threads: 1,
|
||||||
model_id: ".multilingual-e5-small",
|
model_id: ".multilingual-e5-small",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -10,7 +10,7 @@ const response = await client.search({
|
|||||||
type: "keyword",
|
type: "keyword",
|
||||||
script: {
|
script: {
|
||||||
source:
|
source:
|
||||||
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
"emit(doc['@timestamp'].value.dayOfWeekEnum\n .getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
49
docs/doc_examples/0bc6155e0c88062a4d8490da49db3aa8.asciidoc
Normal file
49
docs/doc_examples/0bc6155e0c88062a4d8490da49db3aa8.asciidoc
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example_nested",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
nested: {
|
||||||
|
path: "nested_field",
|
||||||
|
inner_hits: {
|
||||||
|
name: "nested_vector",
|
||||||
|
_source: false,
|
||||||
|
fields: ["nested_field.paragraph_id"],
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
knn: {
|
||||||
|
field: "nested_field.nested_vector",
|
||||||
|
query_vector: [1, 0, 0.5],
|
||||||
|
k: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
term: {
|
||||||
|
topic: "ai",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: ["topic"],
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
13
docs/doc_examples/0bee07a581c5776e068f6f4efad5a399.asciidoc
Normal file
13
docs/doc_examples/0bee07a581c5776e068f6f4efad5a399.asciidoc
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.esql.asyncQuery({
|
||||||
|
format: "json",
|
||||||
|
query:
|
||||||
|
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||||
|
include_ccs_metadata: true,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
|||||||
{
|
{
|
||||||
attachment: {
|
attachment: {
|
||||||
field: "data",
|
field: "data",
|
||||||
remove_binary: false,
|
remove_binary: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
18
docs/doc_examples/0c8be7aec84ea86b243904f5d4162f5a.asciidoc
Normal file
18
docs/doc_examples/0c8be7aec84ea86b243904f5d4162f5a.asciidoc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "cooking_blog",
|
||||||
|
query: {
|
||||||
|
match: {
|
||||||
|
title: {
|
||||||
|
query: "fluffy pancakes breakfast",
|
||||||
|
minimum_should_match: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -9,7 +9,7 @@ const response = await client.connector.put({
|
|||||||
name: "My Connector",
|
name: "My Connector",
|
||||||
description: "My Connector to sync data to Elastic index from Google Drive",
|
description: "My Connector to sync data to Elastic index from Google Drive",
|
||||||
service_type: "google_drive",
|
service_type: "google_drive",
|
||||||
language: "english",
|
language: "en",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
57
docs/doc_examples/0d689ac6e78be5d438f9b5d441be2b44.asciidoc
Normal file
57
docs/doc_examples/0d689ac6e78be5d438f9b5d441be2b44.asciidoc
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "retrievers_example",
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
term: {
|
||||||
|
topic: "elastic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query:
|
||||||
|
"(information retrieval) OR (artificial intelligence)",
|
||||||
|
default_field: "text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [0.23, 0.67, 0.89],
|
||||||
|
k: 3,
|
||||||
|
num_candidates: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rank_window_size: 10,
|
||||||
|
rank_constant: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_source: false,
|
||||||
|
size: 1,
|
||||||
|
explain: true,
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -5,20 +5,18 @@
|
|||||||
----
|
----
|
||||||
const response = await client.searchApplication.renderQuery({
|
const response = await client.searchApplication.renderQuery({
|
||||||
name: "my-app",
|
name: "my-app",
|
||||||
body: {
|
params: {
|
||||||
params: {
|
query_string: "my first query",
|
||||||
query_string: "my first query",
|
text_fields: [
|
||||||
text_fields: [
|
{
|
||||||
{
|
name: "title",
|
||||||
name: "title",
|
boost: 5,
|
||||||
boost: 5,
|
},
|
||||||
},
|
{
|
||||||
{
|
name: "description",
|
||||||
name: "description",
|
boost: 1,
|
||||||
boost: 1,
|
},
|
||||||
},
|
],
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
|
|||||||
21
docs/doc_examples/0e31b8ad176b31028becf9500989bcbd.asciidoc
Normal file
21
docs/doc_examples/0e31b8ad176b31028becf9500989bcbd.asciidoc
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.put({
|
||||||
|
task_type: "text_embedding",
|
||||||
|
inference_id: "watsonx-embeddings",
|
||||||
|
inference_config: {
|
||||||
|
service: "watsonxai",
|
||||||
|
service_settings: {
|
||||||
|
api_key: "<api_key>",
|
||||||
|
url: "<url>",
|
||||||
|
model_id: "ibm/slate-30m-english-rtrvr",
|
||||||
|
project_id: "<project_id>",
|
||||||
|
api_version: "2024-03-14",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -1,15 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.cluster.putSettings({
|
|
||||||
persistent: {
|
|
||||||
"cluster.routing.allocation.disk.watermark.low": "100gb",
|
|
||||||
"cluster.routing.allocation.disk.watermark.high": "50gb",
|
|
||||||
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
|
|
||||||
"cluster.info.update.interval": "1m",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -4,9 +4,11 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.putSettings({
|
const response = await client.indices.putSettings({
|
||||||
index: "my-index-000001",
|
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||||
settings: {
|
settings: {
|
||||||
"index.merge.policy.max_merge_at_once_explicit": null,
|
index: {
|
||||||
|
number_of_replicas: "<original_number_of_replicas>",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -5,7 +5,11 @@
|
|||||||
----
|
----
|
||||||
const response = await client.transport.request({
|
const response = await client.transport.request({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
path: "/_ingest/geoip/database/my-database-id",
|
path: "/_connector/_sync_job",
|
||||||
|
querystring: {
|
||||||
|
connector_id: "my-connector-id",
|
||||||
|
size: "1",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
18
docs/doc_examples/120fcf9f55128d6a81d5e87a9c235bbd.asciidoc
Normal file
18
docs/doc_examples/120fcf9f55128d6a81d5e87a9c235bbd.asciidoc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.streamInference({
|
||||||
|
task_type: "chat_completion",
|
||||||
|
inference_id: "openai-completion",
|
||||||
|
model: "gpt-4o",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: "What is Elastic?",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
11
docs/doc_examples/12adea5d76f73d94d80d42f53f67563f.asciidoc
Normal file
11
docs/doc_examples/12adea5d76f73d94d80d42f53f67563f.asciidoc
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.addBlock({
|
||||||
|
index: ".ml-anomalies-custom-example",
|
||||||
|
block: "read_only",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
18
docs/doc_examples/12e9e758f7f18a6cbf27e9d0aea57a19.asciidoc
Normal file
18
docs/doc_examples/12e9e758f7f18a6cbf27e9d0aea57a19.asciidoc
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.update({
|
||||||
|
index: ".elastic-connectors",
|
||||||
|
id: "connector_id",
|
||||||
|
doc: {
|
||||||
|
features: {
|
||||||
|
native_connector_api_keys: {
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
25
docs/doc_examples/13d91782399ba1f291e103c18b5338cc.asciidoc
Normal file
25
docs/doc_examples/13d91782399ba1f291e103c18b5338cc.asciidoc
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.createFrom({
|
||||||
|
source: "my-index",
|
||||||
|
dest: "my-new-index",
|
||||||
|
create_from: {
|
||||||
|
settings_override: {
|
||||||
|
index: {
|
||||||
|
number_of_shards: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
mappings_override: {
|
||||||
|
properties: {
|
||||||
|
field2: {
|
||||||
|
type: "boolean",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
16
docs/doc_examples/141ef0ebaa3b0772892b79b9bb85efb0.asciidoc
Normal file
16
docs/doc_examples/141ef0ebaa3b0772892b79b9bb85efb0.asciidoc
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.put({
|
||||||
|
task_type: "my-inference-endpoint",
|
||||||
|
inference_id: "_update",
|
||||||
|
inference_config: {
|
||||||
|
service_settings: {
|
||||||
|
api_key: "<API_KEY>",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
19
docs/doc_examples/1522a9297151d7046e6345b9b27539ca.asciidoc
Normal file
19
docs/doc_examples/1522a9297151d7046e6345b9b27539ca.asciidoc
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.connector.updateConfiguration({
|
||||||
|
connector_id: "my-connector-id",
|
||||||
|
values: {
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 5432,
|
||||||
|
username: "myuser",
|
||||||
|
password: "mypassword",
|
||||||
|
database: "chinook",
|
||||||
|
schema: "public",
|
||||||
|
tables: "album,artist",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
10
docs/doc_examples/15ac33d641b376d9494075eb1f0d4066.asciidoc
Normal file
10
docs/doc_examples/15ac33d641b376d9494075eb1f0d4066.asciidoc
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.cancelMigrateReindex({
|
||||||
|
index: "my-data-stream",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
28
docs/doc_examples/15f769bbd7b5fddeb3353ae726b71b14.asciidoc
Normal file
28
docs/doc_examples/15f769bbd7b5fddeb3353ae726b71b14.asciidoc
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "my-index-bit-vectors",
|
||||||
|
query: {
|
||||||
|
script_score: {
|
||||||
|
query: {
|
||||||
|
match_all: {},
|
||||||
|
},
|
||||||
|
script: {
|
||||||
|
source: "dotProduct(params.query_vector, 'my_dense_vector')",
|
||||||
|
params: {
|
||||||
|
query_vector: [
|
||||||
|
0.23, 1.45, 3.67, 4.89, -0.56, 2.34, 3.21, 1.78, -2.45, 0.98, -0.12,
|
||||||
|
3.45, 4.56, 2.78, 1.23, 0.67, 3.89, 4.12, -2.34, 1.56, 0.78, 3.21,
|
||||||
|
4.12, 2.45, -1.67, 0.34, -3.45, 4.56, -2.78, 1.23, -0.67, 3.89,
|
||||||
|
-4.34, 2.12, -1.56, 0.78, -3.21, 4.45, 2.12, 1.67,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
|||||||
script: {
|
script: {
|
||||||
lang: "mustache",
|
lang: "mustache",
|
||||||
source:
|
source:
|
||||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||||
params: {
|
params: {
|
||||||
query: "",
|
query: "",
|
||||||
_es_filters: {},
|
_es_filters: {},
|
||||||
|
|||||||
15
docs/doc_examples/16a7ce08b4a6b3af269f27eecc71d664.asciidoc
Normal file
15
docs/doc_examples/16a7ce08b4a6b3af269f27eecc71d664.asciidoc
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.delete({
|
||||||
|
index: "books",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.indices.delete({
|
||||||
|
index: "my-explicit-mappings-books",
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
----
|
||||||
26
docs/doc_examples/17316a81c9dbdd120b7754116bf0461c.asciidoc
Normal file
26
docs/doc_examples/17316a81c9dbdd120b7754116bf0461c.asciidoc
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.security.createApiKey({
|
||||||
|
name: "my-connector-api-key",
|
||||||
|
role_descriptors: {
|
||||||
|
"my-connector-connector-role": {
|
||||||
|
cluster: ["monitor", "manage_connector"],
|
||||||
|
indices: [
|
||||||
|
{
|
||||||
|
names: [
|
||||||
|
"my-index_name",
|
||||||
|
".search-acl-filter-my-index_name",
|
||||||
|
".elastic-connectors*",
|
||||||
|
],
|
||||||
|
privileges: ["all"],
|
||||||
|
allow_restricted_indices: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
12
docs/doc_examples/174b93c323aa8e9cc8ee2a3df5736810.asciidoc
Normal file
12
docs/doc_examples/174b93c323aa8e9cc8ee2a3df5736810.asciidoc
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.security.delegatePki({
|
||||||
|
x509_certificate_chain: [
|
||||||
|
"MIIDeDCCAmCgAwIBAgIUBzj/nGGKxP2iXawsSquHmQjCJmMwDQYJKoZIhvcNAQELBQAwUzErMCkGA1UEAxMiRWxhc3RpY3NlYXJjaCBUZXN0IEludGVybWVkaWF0ZSBDQTEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMB4XDTIzMDcxODE5MjkwNloXDTQzMDcxMzE5MjkwNlowSjEiMCAGA1UEAxMZRWxhc3RpY3NlYXJjaCBUZXN0IENsaWVudDEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAllHL4pQkkfwAm/oLkxYYO+r950DEy1bjH+4viCHzNADLCTWO+lOZJVlNx7QEzJE3QGMdif9CCBBxQFMapA7oUFCLq84fPSQQu5AnvvbltVD9nwVtCs+9ZGDjMKsz98RhSLMFIkxdxi6HkQ3Lfa4ZSI4lvba4oo+T/GveazBDS+NgmKyq00EOXt3tWi1G9vEVItommzXWfv0agJWzVnLMldwkPqsw0W7zrpyT7FZS4iLbQADGceOW8fiauOGMkscu9zAnDR/SbWl/chYioQOdw6ndFLn1YIFPd37xL0WsdsldTpn0vH3YfzgLMffT/3P6YlwBegWzsx6FnM/93Ecb4wIDAQABo00wSzAJBgNVHRMEAjAAMB0GA1UdDgQWBBQKNRwjW+Ad/FN1Rpoqme/5+jrFWzAfBgNVHSMEGDAWgBRcya0c0x/PaI7MbmJVIylWgLqXNjANBgkqhkiG9w0BAQsFAAOCAQEACZ3PF7Uqu47lplXHP6YlzYL2jL0D28hpj5lGtdha4Muw1m/BjDb0Pu8l0NQ1z3AP6AVcvjNDkQq6Y5jeSz0bwQlealQpYfo7EMXjOidrft1GbqOMFmTBLpLA9SvwYGobSTXWTkJzonqVaTcf80HpMgM2uEhodwTcvz6v1WEfeT/HMjmdIsq4ImrOL9RNrcZG6nWfw0HR3JNOgrbfyEztEI471jHznZ336OEcyX7gQuvHE8tOv5+oD1d7s3Xg1yuFp+Ynh+FfOi3hPCuaHA+7F6fLmzMDLVUBAllugst1C3U+L/paD7tqIa4ka+KNPCbSfwazmJrt4XNiivPR4hwH5g==",
|
||||||
|
],
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -16,7 +16,7 @@ const response = await client.search({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
field: "text",
|
field: "text",
|
||||||
inference_id: "my-cohere-rerank-model",
|
inference_id: "elastic-rerank",
|
||||||
inference_text: "How often does the moon hide the sun?",
|
inference_text: "How often does the moon hide the sun?",
|
||||||
rank_window_size: 100,
|
rank_window_size: 100,
|
||||||
min_score: 0.5,
|
min_score: 0.5,
|
||||||
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.cluster.reroute({
|
const response = await client.indices.create({
|
||||||
metric: "none",
|
index: "books",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -4,24 +4,22 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.simulate.ingest({
|
const response = await client.simulate.ingest({
|
||||||
body: {
|
docs: [
|
||||||
docs: [
|
{
|
||||||
{
|
_index: "my-index",
|
||||||
_index: "my-index",
|
_id: "123",
|
||||||
_id: "123",
|
_source: {
|
||||||
_source: {
|
foo: "bar",
|
||||||
foo: "bar",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
},
|
||||||
_index: "my-index",
|
{
|
||||||
_id: "456",
|
_index: "my-index",
|
||||||
_source: {
|
_id: "456",
|
||||||
foo: "rab",
|
_source: {
|
||||||
},
|
foo: "rab",
|
||||||
},
|
},
|
||||||
],
|
},
|
||||||
},
|
],
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
19
docs/doc_examples/1a7483796087053ba55029d0dc2ab356.asciidoc
Normal file
19
docs/doc_examples/1a7483796087053ba55029d0dc2ab356.asciidoc
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.index({
|
||||||
|
index: "mv",
|
||||||
|
refresh: "true",
|
||||||
|
document: {
|
||||||
|
a: [2, null, 1],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.esql.query({
|
||||||
|
query: "FROM mv | LIMIT 1",
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
----
|
||||||
@ -4,7 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.ingest.putPipeline({
|
const response = await client.ingest.putPipeline({
|
||||||
id: "hugging_face_embeddings",
|
id: "hugging_face_embeddings_pipeline",
|
||||||
processors: [
|
processors: [
|
||||||
{
|
{
|
||||||
inference: {
|
inference: {
|
||||||
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.queryRole({
|
const response = await client.indices.delete({
|
||||||
sort: ["name"],
|
index: "music",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -4,7 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.ingest.putPipeline({
|
const response = await client.ingest.putPipeline({
|
||||||
id: "google_vertex_ai_embeddings",
|
id: "google_vertex_ai_embeddings_pipeline",
|
||||||
processors: [
|
processors: [
|
||||||
{
|
{
|
||||||
inference: {
|
inference: {
|
||||||
19
docs/doc_examples/1ead35c954963e83f89872048dabdbe9.asciidoc
Normal file
19
docs/doc_examples/1ead35c954963e83f89872048dabdbe9.asciidoc
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.security.queryRole({
|
||||||
|
query: {
|
||||||
|
bool: {
|
||||||
|
must_not: {
|
||||||
|
term: {
|
||||||
|
"metadata._reserved": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
sort: ["name"],
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
33
docs/doc_examples/1fb2c77c0988bc6545040b20e3afa7e9.asciidoc
Normal file
33
docs/doc_examples/1fb2c77c0988bc6545040b20e3afa7e9.asciidoc
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.security.createApiKey({
|
||||||
|
name: "john-api-key",
|
||||||
|
expiration: "1d",
|
||||||
|
role_descriptors: {
|
||||||
|
"sharepoint-online-role": {
|
||||||
|
index: [
|
||||||
|
{
|
||||||
|
names: ["sharepoint-search-application"],
|
||||||
|
privileges: ["read"],
|
||||||
|
query: {
|
||||||
|
template: {
|
||||||
|
params: {
|
||||||
|
access_control: ["john@example.co", "Engineering Members"],
|
||||||
|
},
|
||||||
|
source:
|
||||||
|
'\n {\n "bool": {\n "should": [\n {\n "bool": {\n "must_not": {\n "exists": {\n "field": "_allow_access_control"\n }\n }\n }\n },\n {\n "terms": {\n "_allow_access_control.enum": {{#toJson}}access_control{{/toJson}}\n }\n }\n ]\n }\n }\n ',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
restriction: {
|
||||||
|
workflows: ["search_application_query"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -5,7 +5,7 @@
|
|||||||
----
|
----
|
||||||
const response = await client.ingest.putPipeline({
|
const response = await client.ingest.putPipeline({
|
||||||
id: "geoip",
|
id: "geoip",
|
||||||
description: "Add geoip info",
|
description: "Add ip geolocation info",
|
||||||
processors: [
|
processors: [
|
||||||
{
|
{
|
||||||
geoip: {
|
geoip: {
|
||||||
@ -5,10 +5,16 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
mappings: {
|
settings: {
|
||||||
_source: {
|
index: {
|
||||||
mode: "synthetic",
|
mapping: {
|
||||||
|
source: {
|
||||||
|
mode: "synthetic",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
mappings: {
|
||||||
properties: {
|
properties: {
|
||||||
card: {
|
card: {
|
||||||
type: "wildcard",
|
type: "wildcard",
|
||||||
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.unfreeze({
|
const response = await client.indices.rollover({
|
||||||
index: "my-index-000001",
|
alias: "datastream",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
67
docs/doc_examples/246763219ec06172f7aa57bba28d344a.asciidoc
Normal file
67
docs/doc_examples/246763219ec06172f7aa57bba28d344a.asciidoc
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "my-rank-vectors-bit",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
my_vector: {
|
||||||
|
type: "rank_vectors",
|
||||||
|
element_type: "bit",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.bulk({
|
||||||
|
index: "my-rank-vectors-bit",
|
||||||
|
refresh: "true",
|
||||||
|
operations: [
|
||||||
|
{
|
||||||
|
index: {
|
||||||
|
_id: "1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
my_vector: [127, -127, 0, 1, 42],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
index: {
|
||||||
|
_id: "2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
my_vector: "8100012a7f",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
|
||||||
|
const response2 = await client.search({
|
||||||
|
index: "my-rank-vectors-bit",
|
||||||
|
query: {
|
||||||
|
script_score: {
|
||||||
|
query: {
|
||||||
|
match_all: {},
|
||||||
|
},
|
||||||
|
script: {
|
||||||
|
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||||
|
params: {
|
||||||
|
query_vector: [
|
||||||
|
[
|
||||||
|
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
|
||||||
|
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
|
||||||
|
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
|
||||||
|
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
|
||||||
|
],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response2);
|
||||||
|
----
|
||||||
@ -10,7 +10,7 @@ const response = await client.search({
|
|||||||
"date.day_of_week": {
|
"date.day_of_week": {
|
||||||
type: "keyword",
|
type: "keyword",
|
||||||
script:
|
script:
|
||||||
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))",
|
"emit(doc['date'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
aggs: {
|
aggs: {
|
||||||
@ -1,28 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.esql.query({
|
|
||||||
format: "txt",
|
|
||||||
query:
|
|
||||||
"\n FROM library\n | SORT page_count DESC\n | KEEP name, author\n | LOOKUP era ON author\n | LIMIT 5\n ",
|
|
||||||
tables: {
|
|
||||||
era: {
|
|
||||||
author: {
|
|
||||||
keyword: [
|
|
||||||
"Frank Herbert",
|
|
||||||
"Peter F. Hamilton",
|
|
||||||
"Vernor Vinge",
|
|
||||||
"Alastair Reynolds",
|
|
||||||
"James S.A. Corey",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
era: {
|
|
||||||
keyword: ["The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
11
docs/doc_examples/272e27bf1fcc4fe5dbd4092679dd0342.asciidoc
Normal file
11
docs/doc_examples/272e27bf1fcc4fe5dbd4092679dd0342.asciidoc
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.addBlock({
|
||||||
|
index: ".ml-anomalies-custom-example",
|
||||||
|
block: "write",
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
|||||||
},
|
},
|
||||||
dest: {
|
dest: {
|
||||||
index: "azure-ai-studio-embeddings",
|
index: "azure-ai-studio-embeddings",
|
||||||
pipeline: "azure_ai_studio_embeddings",
|
pipeline: "azure_ai_studio_embeddings_pipeline",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
15
docs/doc_examples/29aeabacb1fdf5b083d5f091b6d1bd44.asciidoc
Normal file
15
docs/doc_examples/29aeabacb1fdf5b083d5f091b6d1bd44.asciidoc
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.migrateReindex({
|
||||||
|
reindex: {
|
||||||
|
source: {
|
||||||
|
index: "my-data-stream",
|
||||||
|
},
|
||||||
|
mode: "upgrade",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -4,11 +4,9 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.security.oidcLogout({
|
const response = await client.security.oidcLogout({
|
||||||
body: {
|
token:
|
||||||
token:
|
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
26
docs/doc_examples/2a21674c40f9b182a8944769d20b2357.asciidoc
Normal file
26
docs/doc_examples/2a21674c40f9b182a8944769d20b2357.asciidoc
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "my-rank-vectors-float",
|
||||||
|
query: {
|
||||||
|
script_score: {
|
||||||
|
query: {
|
||||||
|
match_all: {},
|
||||||
|
},
|
||||||
|
script: {
|
||||||
|
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||||
|
params: {
|
||||||
|
query_vector: [
|
||||||
|
[0.5, 10, 6],
|
||||||
|
[-0.5, 10, 10],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
35
docs/doc_examples/2a67608dadbf220a2f040f3a79d3677d.asciidoc
Normal file
35
docs/doc_examples/2a67608dadbf220a2f040f3a79d3677d.asciidoc
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.ingest.putPipeline({
|
||||||
|
id: "attachment",
|
||||||
|
description: "Extract attachment information including original binary",
|
||||||
|
processors: [
|
||||||
|
{
|
||||||
|
attachment: {
|
||||||
|
field: "data",
|
||||||
|
remove_binary: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.index({
|
||||||
|
index: "my-index-000001",
|
||||||
|
id: "my_id",
|
||||||
|
pipeline: "attachment",
|
||||||
|
document: {
|
||||||
|
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
|
||||||
|
const response2 = await client.get({
|
||||||
|
index: "my-index-000001",
|
||||||
|
id: "my_id",
|
||||||
|
});
|
||||||
|
console.log(response2);
|
||||||
|
----
|
||||||
24
docs/doc_examples/2afd49985950cbcccf727fa858d00067.asciidoc
Normal file
24
docs/doc_examples/2afd49985950cbcccf727fa858d00067.asciidoc
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "test-index",
|
||||||
|
query: {
|
||||||
|
match: {
|
||||||
|
my_field: "Which country is Paris in?",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
highlight: {
|
||||||
|
fields: {
|
||||||
|
my_field: {
|
||||||
|
type: "semantic",
|
||||||
|
number_of_fragments: 2,
|
||||||
|
order: "score",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -6,7 +6,6 @@
|
|||||||
const response = await client.esql.asyncQueryGet({
|
const response = await client.esql.asyncQueryGet({
|
||||||
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||||
wait_for_completion_timeout: "30s",
|
wait_for_completion_timeout: "30s",
|
||||||
body: null,
|
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
77
docs/doc_examples/2c079d1ae4819a0c206b9e1aa5623523.asciidoc
Normal file
77
docs/doc_examples/2c079d1ae4819a0c206b9e1aa5623523.asciidoc
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "my-index-000001",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
attributes: {
|
||||||
|
type: "passthrough",
|
||||||
|
priority: 10,
|
||||||
|
properties: {
|
||||||
|
id: {
|
||||||
|
type: "keyword",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.index({
|
||||||
|
index: "my-index-000001",
|
||||||
|
id: 1,
|
||||||
|
document: {
|
||||||
|
attributes: {
|
||||||
|
id: "foo",
|
||||||
|
zone: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
|
||||||
|
const response2 = await client.search({
|
||||||
|
index: "my-index-000001",
|
||||||
|
query: {
|
||||||
|
bool: {
|
||||||
|
must: [
|
||||||
|
{
|
||||||
|
match: {
|
||||||
|
id: "foo",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
match: {
|
||||||
|
zone: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response2);
|
||||||
|
|
||||||
|
const response3 = await client.search({
|
||||||
|
index: "my-index-000001",
|
||||||
|
query: {
|
||||||
|
bool: {
|
||||||
|
must: [
|
||||||
|
{
|
||||||
|
match: {
|
||||||
|
"attributes.id": "foo",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
match: {
|
||||||
|
"attributes.zone": 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response3);
|
||||||
|
----
|
||||||
28
docs/doc_examples/2c86840a46242a38cf82024a9321be46.asciidoc
Normal file
28
docs/doc_examples/2c86840a46242a38cf82024a9321be46.asciidoc
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "my-explicit-mappings-books",
|
||||||
|
mappings: {
|
||||||
|
dynamic: false,
|
||||||
|
properties: {
|
||||||
|
name: {
|
||||||
|
type: "text",
|
||||||
|
},
|
||||||
|
author: {
|
||||||
|
type: "text",
|
||||||
|
},
|
||||||
|
release_date: {
|
||||||
|
type: "date",
|
||||||
|
format: "yyyy-MM-dd",
|
||||||
|
},
|
||||||
|
page_count: {
|
||||||
|
type: "integer",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
25
docs/doc_examples/2d0244c020075595acb625aa5ba8f455.asciidoc
Normal file
25
docs/doc_examples/2d0244c020075595acb625aa5ba8f455.asciidoc
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.index({
|
||||||
|
index: "idx_keep",
|
||||||
|
id: 1,
|
||||||
|
document: {
|
||||||
|
path: {
|
||||||
|
to: [
|
||||||
|
{
|
||||||
|
foo: [3, 2, 1],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
foo: [30, 20, 10],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
bar: "baz",
|
||||||
|
},
|
||||||
|
ids: [200, 100, 300, 100],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
43
docs/doc_examples/2e7844477b41fcfa9efefee4ec0e7101.asciidoc
Normal file
43
docs/doc_examples/2e7844477b41fcfa9efefee4ec0e7101.asciidoc
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "my-index-000001",
|
||||||
|
retriever: {
|
||||||
|
rule: {
|
||||||
|
match_criteria: {
|
||||||
|
query_string: "puggles",
|
||||||
|
user_country: "us",
|
||||||
|
},
|
||||||
|
ruleset_ids: ["my-ruleset"],
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query: "pugs",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
query_string: {
|
||||||
|
query: "puggles",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
23
docs/doc_examples/2f72a63c73dd672ac2dc3997ad15dd41.asciidoc
Normal file
23
docs/doc_examples/2f72a63c73dd672ac2dc3997ad15dd41.asciidoc
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "test-index",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
source_field: {
|
||||||
|
type: "text",
|
||||||
|
fields: {
|
||||||
|
infer_field: {
|
||||||
|
type: "semantic_text",
|
||||||
|
inference_id: ".elser-2-elasticsearch",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
22
docs/doc_examples/2f9ee29fe49f7d206a41212aa5945296.asciidoc
Normal file
22
docs/doc_examples/2f9ee29fe49f7d206a41212aa5945296.asciidoc
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.createFrom({
|
||||||
|
source: "my-index",
|
||||||
|
dest: "my-new-index",
|
||||||
|
create_from: {
|
||||||
|
settings_override: {
|
||||||
|
index: {
|
||||||
|
"blocks.write": null,
|
||||||
|
"blocks.read": null,
|
||||||
|
"blocks.read_only": null,
|
||||||
|
"blocks.read_only_allow_delete": null,
|
||||||
|
"blocks.metadata": null,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
23
docs/doc_examples/30d051f534aeb884176eedb2c11dac85.asciidoc
Normal file
23
docs/doc_examples/30d051f534aeb884176eedb2c11dac85.asciidoc
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.inference.put({
|
||||||
|
task_type: "rerank",
|
||||||
|
inference_id: "my-elastic-rerank",
|
||||||
|
inference_config: {
|
||||||
|
service: "elasticsearch",
|
||||||
|
service_settings: {
|
||||||
|
model_id: ".rerank-v1",
|
||||||
|
num_threads: 1,
|
||||||
|
adaptive_allocations: {
|
||||||
|
enabled: true,
|
||||||
|
min_number_of_allocations: 1,
|
||||||
|
max_number_of_allocations: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -4,7 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.ingest.putPipeline({
|
const response = await client.ingest.putPipeline({
|
||||||
id: "azure_ai_studio_embeddings",
|
id: "azure_ai_studio_embeddings_pipeline",
|
||||||
processors: [
|
processors: [
|
||||||
{
|
{
|
||||||
inference: {
|
inference: {
|
||||||
28
docs/doc_examples/31832bd71c31c46a1ccf8d1c210d89d4.asciidoc
Normal file
28
docs/doc_examples/31832bd71c31c46a1ccf8d1c210d89d4.asciidoc
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "my-index-*",
|
||||||
|
query: {
|
||||||
|
bool: {
|
||||||
|
must: [
|
||||||
|
{
|
||||||
|
match: {
|
||||||
|
"user.id": "kimchy",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
must_not: [
|
||||||
|
{
|
||||||
|
terms: {
|
||||||
|
_index: ["my-index-01"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
31
docs/doc_examples/32c8c86702ccd68eb70f1573409c2a1f.asciidoc
Normal file
31
docs/doc_examples/32c8c86702ccd68eb70f1573409c2a1f.asciidoc
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.ilm.putLifecycle({
|
||||||
|
name: "my_policy",
|
||||||
|
policy: {
|
||||||
|
phases: {
|
||||||
|
hot: {
|
||||||
|
actions: {
|
||||||
|
rollover: {
|
||||||
|
max_primary_shard_size: "50gb",
|
||||||
|
},
|
||||||
|
searchable_snapshot: {
|
||||||
|
snapshot_repository: "backing_repo",
|
||||||
|
replicate_for: "14d",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
min_age: "28d",
|
||||||
|
actions: {
|
||||||
|
delete: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -14,6 +14,7 @@ const response = await client.indices.putSettings({
|
|||||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||||
|
"index.search.slowlog.include.user": true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -5,7 +5,7 @@
|
|||||||
----
|
----
|
||||||
const response = await client.ingest.putPipeline({
|
const response = await client.ingest.putPipeline({
|
||||||
id: "geoip",
|
id: "geoip",
|
||||||
description: "Add geoip info",
|
description: "Add ip geolocation info",
|
||||||
processors: [
|
processors: [
|
||||||
{
|
{
|
||||||
geoip: {
|
geoip: {
|
||||||
52
docs/doc_examples/339c4e5af9f9069ad9912aa574488b59.asciidoc
Normal file
52
docs/doc_examples/339c4e5af9f9069ad9912aa574488b59.asciidoc
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "my-index-bit-vectors",
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
my_dense_vector: {
|
||||||
|
type: "dense_vector",
|
||||||
|
index: false,
|
||||||
|
element_type: "bit",
|
||||||
|
dims: 40,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
|
const response1 = await client.index({
|
||||||
|
index: "my-index-bit-vectors",
|
||||||
|
id: 1,
|
||||||
|
document: {
|
||||||
|
my_dense_vector: [8, 5, -15, 1, -7],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response1);
|
||||||
|
|
||||||
|
const response2 = await client.index({
|
||||||
|
index: "my-index-bit-vectors",
|
||||||
|
id: 2,
|
||||||
|
document: {
|
||||||
|
my_dense_vector: [-1, 115, -3, 4, -128],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response2);
|
||||||
|
|
||||||
|
const response3 = await client.index({
|
||||||
|
index: "my-index-bit-vectors",
|
||||||
|
id: 3,
|
||||||
|
document: {
|
||||||
|
my_dense_vector: [2, 18, -5, 0, -124],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response3);
|
||||||
|
|
||||||
|
const response4 = await client.indices.refresh({
|
||||||
|
index: "my-index-bit-vectors",
|
||||||
|
});
|
||||||
|
console.log(response4);
|
||||||
|
----
|
||||||
@ -9,7 +9,6 @@ const response = await client.indices.create({
|
|||||||
properties: {
|
properties: {
|
||||||
semantic_text: {
|
semantic_text: {
|
||||||
type: "semantic_text",
|
type: "semantic_text",
|
||||||
inference_id: "my-elser-endpoint",
|
|
||||||
},
|
},
|
||||||
content: {
|
content: {
|
||||||
type: "text",
|
type: "text",
|
||||||
70
docs/doc_examples/36792c81c053e0555407d1e83e7e054f.asciidoc
Normal file
70
docs/doc_examples/36792c81c053e0555407d1e83e7e054f.asciidoc
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.search({
|
||||||
|
index: "movies",
|
||||||
|
size: 10,
|
||||||
|
retriever: {
|
||||||
|
rescorer: {
|
||||||
|
rescore: {
|
||||||
|
window_size: 50,
|
||||||
|
query: {
|
||||||
|
rescore_query: {
|
||||||
|
script_score: {
|
||||||
|
query: {
|
||||||
|
match_all: {},
|
||||||
|
},
|
||||||
|
script: {
|
||||||
|
source:
|
||||||
|
"cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0",
|
||||||
|
params: {
|
||||||
|
queryVector: [-0.5, 90, -10, 14.8, -156],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
retriever: {
|
||||||
|
rrf: {
|
||||||
|
rank_window_size: 100,
|
||||||
|
retrievers: [
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
sparse_vector: {
|
||||||
|
field: "plot_embedding",
|
||||||
|
inference_id: "my-elser-model",
|
||||||
|
query: "films that explore psychological depths",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
standard: {
|
||||||
|
query: {
|
||||||
|
multi_match: {
|
||||||
|
query: "crime",
|
||||||
|
fields: ["plot", "title"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
knn: {
|
||||||
|
field: "vector",
|
||||||
|
query_vector: [10, 22, 77],
|
||||||
|
k: 10,
|
||||||
|
num_candidates: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -11,7 +11,7 @@ const response = await client.reindex({
|
|||||||
},
|
},
|
||||||
dest: {
|
dest: {
|
||||||
index: "openai-embeddings",
|
index: "openai-embeddings",
|
||||||
pipeline: "openai_embeddings",
|
pipeline: "openai_embeddings_pipeline",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
23
docs/doc_examples/3722dad876023e0757138dd5a6d3240e.asciidoc
Normal file
23
docs/doc_examples/3722dad876023e0757138dd5a6d3240e.asciidoc
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.indices.create({
|
||||||
|
index: "my-index",
|
||||||
|
settings: {
|
||||||
|
index: {
|
||||||
|
number_of_shards: 3,
|
||||||
|
"blocks.write": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
mappings: {
|
||||||
|
properties: {
|
||||||
|
field1: {
|
||||||
|
type: "text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -1,23 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.bulk({
|
|
||||||
index: "test-index",
|
|
||||||
operations: [
|
|
||||||
{
|
|
||||||
update: {
|
|
||||||
_id: "1",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
doc: {
|
|
||||||
infer_field: "updated inference field",
|
|
||||||
source_field: "updated source field",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user