Compare commits

..

29 Commits
8.16 ... 8.11

Author SHA1 Message Date
f929d8f6c6 Switch 8.11 clients to only get patch updates to transport (#2215)
* Backport changelog from 8.10

* Pin transport to ~8.4.0
2024-04-09 12:55:49 -05:00
90cfe2a063 Auto-generated code for 8.11 (#2132) 2024-02-27 14:23:09 -06:00
5451452c16 Auto-generated API code (#2121) 2024-01-31 11:33:28 +04:00
3bf710d7ba Improved the body BC break description in request/response for 8.x documentation (#2117) (#2118)
* Improved the body bc break in 8.x documentation

* Removed just in the sentence

(cherry picked from commit 6eabf37097)

Co-authored-by: Enrico Zimuel <e.zimuel@gmail.com>
2024-01-04 13:57:58 +01:00
656550922e [Backport 8.11] Add missing snippets (#2115)
For https://github.com/elastic/clients-team/issues/728

(cherry picked from commit 5413eb5f35)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-14 17:21:09 -06:00
5ea47d84e0 Backport integration test fixes from #2109 (#2112) 2023-12-14 16:47:10 -06:00
345a255c2f Drop snapshot for 8.11.0 testing (#2110) 2023-12-14 16:34:41 -06:00
f75b83b1d8 [Backport 8.11] Add doc for closing connections (#2107)
(cherry picked from commit d3f22f1e14)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-14 09:47:39 -06:00
813c60fd81 Bump canary version (#2102) 2023-12-12 16:22:08 -06:00
0f54c56c56 [Backport 8.11] 8.11.0 changelog (#2101)
* Changelog for 8.11.0

* Add redaction docs link to changelog
2023-12-12 16:10:50 -06:00
d00cc52f4f [Backport 8.11] Bump transport to 8.4.0 (#2096)
* Support for transport 8.4.0 redaction functionality

* Docs for `redaction` options

(cherry picked from commit c2c417a9fd)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-12 15:42:35 -06:00
dda20a4d07 Auto-generated code for 8.11 (#2091) 2023-12-12 10:56:49 -06:00
7cc6824974 [Backport 8.11] [DOCS] Adds a signpost for downloading ES or signing-up for ESS (#2086)
Co-authored-by: István Zoltán Szabó <szabosteve@gmail.com>
2023-11-27 10:48:00 -06:00
dbb558cbf5 Auto-generated code for 8.11 (#2082) 2023-11-27 10:45:15 -06:00
7c4a9874ef Auto-generated code for 8.11 (#2076) 2023-11-16 10:50:39 -06:00
f9007d360f [Backport 8.11] Throw an explicit error when asStream is used with bulk helper (#2079)
Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-11-16 10:44:50 -06:00
a6cae9f625 Auto-generated code for 8.11 (#2075) 2023-11-15 13:09:10 -06:00
c1dc8bb6b0 Auto-generated code for 8.11 (#2065) 2023-11-13 13:43:09 -06:00
a79bfac7fb [Backport 8.11] Update how users/permissions are set up on CI Docker image (#2071)
Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-11-08 13:24:38 -06:00
ce1c029204 [Backport 8.11] Fix arg-parsing issue in codegen script (#2069)
Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-11-08 13:09:11 -06:00
e269443ecb Auto-generated code for 8.11 (#2062) 2023-11-06 11:00:26 -06:00
5c61c15bfa Bump version to 8.11.0 (#2063) 2023-11-06 10:58:15 -06:00
d64edd74da Auto-generated code for 8.11 (#2057) 2023-11-03 10:28:22 -05:00
272d04f36b [Backport 8.11] Stop supporting Node v14 and v16 (#2055) (#2056) 2023-11-02 13:50:56 -05:00
1c68dfdd8f [Backport 8.11] Add more docs about keep-alive connections (#2050)
(cherry picked from commit f1e83ae853)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-11-02 10:20:39 -05:00
e59d6c5985 Auto-generated code for 8.11 (#2046) 2023-11-01 11:14:32 -05:00
fae6e98da1 [Backport 8.11] Add important flag to proxy config docs (#2043)
(cherry picked from commit f7ed0394bf)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-10-24 15:40:41 -05:00
7048c323c5 [Backport 8.11] Improve proxy configuration docs (#2035) (#2037) 2023-10-24 14:10:31 -05:00
4fd11a9135 Auto-generated code for 8.11 (#2029) 2023-10-16 13:02:47 -05:00
3806 changed files with 11842 additions and 92476 deletions

View File

@ -3,14 +3,14 @@ FROM node:$NODE_VERSION
# Install required tools # Install required tools
RUN apt-get clean -y && \ RUN apt-get clean -y && \
apt-get -qy update && \ apt-get -qy update && \
apt-get -y install zip && \ apt-get -y install zip && \
apt-get clean && \ apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
WORKDIR /usr/src/app WORKDIR /usr/src/app
COPY package.json . COPY package.json .
RUN npm install RUN npm install --production=false
COPY . . COPY . .

View File

@ -1,30 +0,0 @@
ARG NODE_JS_VERSION=${NODE_JS_VERSION:-18}
FROM node:${NODE_JS_VERSION}
ARG BUILDER_UID=1000
ARG BUILDER_GID=1000
ENV BUILDER_USER elastic
ENV BUILDER_GROUP elastic
# install zip util
RUN apt-get clean -y && \
apt-get update -y && \
apt-get install -y zip
# Set user permissions and directory
RUN (id -g ${BUILDER_GID} || groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP}) \
&& (id -u ${BUILDER_UID} || useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GID} -m elastic) \
&& mkdir -p /usr/src/elasticsearch-js \
&& chown -R ${BUILDER_UID}:${BUILDER_GID} /usr/src/
WORKDIR /usr/src/elasticsearch-js
# run remainder of commands as non-root user
USER ${BUILDER_UID}:${BUILDER_GID}
# install dependencies
COPY package.json .
RUN npm install
# copy project files
COPY . .

View File

@ -2,7 +2,7 @@
# #
# Shared cleanup routines between different steps # Shared cleanup routines between different steps
# #
# Please source .buildkite/functions/imports.sh as a whole not just this file # Please source .ci/functions/imports.sh as a whole not just this file
# #
# Version 1.0.0 # Version 1.0.0
# - Initial version after refactor # - Initial version after refactor

View File

@ -2,7 +2,7 @@
# #
# Exposes a routine scripts can call to wait for a container if that container set up a health command # Exposes a routine scripts can call to wait for a container if that container set up a health command
# #
# Please source .buildkite/functions/imports.sh as a whole not just this file # Please source .ci/functions/imports.sh as a whole not just this file
# #
# Version 1.0.1 # Version 1.0.1
# - Initial version after refactor # - Initial version after refactor

View File

@ -6,7 +6,7 @@ steps:
env: env:
NODE_VERSION: "{{ matrix.nodejs }}" NODE_VERSION: "{{ matrix.nodejs }}"
TEST_SUITE: "{{ matrix.suite }}" TEST_SUITE: "{{ matrix.suite }}"
STACK_VERSION: 8.16.0 STACK_VERSION: 8.11.0
matrix: matrix:
setup: setup:
suite: suite:
@ -15,7 +15,6 @@ steps:
nodejs: nodejs:
- "18" - "18"
- "20" - "20"
- "22"
command: ./.buildkite/run-tests.sh command: ./.buildkite/run-tests.sh
artifact_paths: "./junit-output/junit-*.xml" artifact_paths: "./junit-output/junit-*.xml"
- wait: ~ - wait: ~
@ -27,6 +26,6 @@ steps:
plugins: plugins:
- junit-annotate#v2.4.1: - junit-annotate#v2.4.1:
artifacts: "junit-output/junit-*.xml" artifacts: "junit-output/junit-*.xml"
job-uuid-file-pattern: "junit-(.*).xml" job-uuid-file-pattern: 'junit-(.*).xml'
fail-build-on-error: true fail-build-on-error: true
failure-format: file failure-format: file

View File

@ -9,6 +9,7 @@
"\\.md$", "\\.md$",
"\\.asciidoc$", "\\.asciidoc$",
"^docs\\/", "^docs\\/",
"^\\.ci\\/",
"^scripts\\/", "^scripts\\/",
"^catalog-info\\.yaml$", "^catalog-info\\.yaml$",
"^test\\/unit\\/", "^test\\/unit\\/",

30
.ci/Dockerfile Normal file
View File

@ -0,0 +1,30 @@
ARG NODE_JS_VERSION=18
FROM node:${NODE_JS_VERSION}
ARG BUILDER_UID=1000
ARG BUILDER_GID=1000
ENV BUILDER_USER elastic
ENV BUILDER_GROUP elastic
# install zip util
RUN apt-get clean -y && \
apt-get update -y && \
apt-get install -y zip
# Set user permissions and directory
RUN (id -g ${BUILDER_GID} || groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP}) \
&& (id -u ${BUILDER_UID} || useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GID} -m elastic) \
&& mkdir -p /usr/src/elasticsearch-js \
&& chown -R ${BUILDER_UID}:${BUILDER_GID} /usr/src/
WORKDIR /usr/src/elasticsearch-js
# run remainder of commands as non-root user
USER ${BUILDER_UID}:${BUILDER_GID}
# install dependencies
COPY package.json .
RUN npm install --production=false
# copy project files
COPY . .

View File

@ -74,15 +74,14 @@ async function release (args) {
async function bump (args) { async function bump (args) {
assert(args.length === 1, 'Bump task expects one parameter') assert(args.length === 1, 'Bump task expects one parameter')
let [version] = args const [version] = args
const packageJson = JSON.parse(await readFile( const packageJson = JSON.parse(await readFile(
join(import.meta.url, '..', 'package.json'), join(import.meta.url, '..', 'package.json'),
'utf8' 'utf8'
)) ))
if (version.split('.').length === 2) version = `${version}.0`
const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version) const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
assert(semver.valid(cleanVersion), `${cleanVersion} is not seen as a valid semver version. raw version: ${version}`) assert(semver.valid(cleanVersion))
packageJson.version = cleanVersion packageJson.version = cleanVersion
packageJson.versionCanary = `${cleanVersion}-canary.0` packageJson.versionCanary = `${cleanVersion}-canary.0`
@ -92,10 +91,10 @@ async function bump (args) {
'utf8' 'utf8'
) )
const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), 'utf8') const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'))
await writeFile( await writeFile(
join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), join(import.meta.url, '..', '.buildkite', 'pipeline.yml'),
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}`), pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}-SNAPSHOT`), // eslint-disable-line
'utf8' 'utf8'
) )
} }
@ -125,13 +124,6 @@ async function codegen (args) {
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}` await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}` await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
await $`npm run build` await $`npm run build`
// run docs example generation
if (version === 'main') {
await $`node ./scripts/generate-docs-examples.js`
} else {
await $`node ./scripts/generate-docs-examples.js ${version.split('.').slice(0, 2).join('.')}`
}
} }
function onError (err) { function onError (err) {

214
.ci/make.sh Executable file
View File

@ -0,0 +1,214 @@
#!/usr/bin/env bash
# ------------------------------------------------------- #
#
# Build entry script for elasticsearch-js
#
# Must be called: ./.ci/make.sh <target> <params>
#
# Version: 1.1.0
#
# Targets:
# ---------------------------
# assemble <VERSION> : build client artifacts with version
# bump <VERSION> : bump client internals to version
# bumpmatrix <VERSION> : bump stack version in test matrix to version
# codegen <VERSION> : generate endpoints
# docsgen <VERSION> : generate documentation
# examplegen : generate the doc examples
# clean : clean workspace
#
# ------------------------------------------------------- #
# ------------------------------------------------------- #
# Bootstrap
# ------------------------------------------------------- #
script_path=$(dirname "$(realpath -s "$0")")
repo=$(realpath "$script_path/../")
# shellcheck disable=SC1090
CMD=$1
TASK=$1
TASK_ARGS=()
VERSION=$2
STACK_VERSION=$VERSION
set -euo pipefail
product="elastic/elasticsearch-js"
output_folder=".ci/output"
codegen_folder=".ci/output"
OUTPUT_DIR="$repo/${output_folder}"
NODE_JS_VERSION=18
WORKFLOW=${WORKFLOW-staging}
mkdir -p "$OUTPUT_DIR"
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
case $CMD in
clean)
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
rm -rf "$output_folder"
echo -e "\033[32;1mdone.\033[0m"
exit 0
;;
assemble)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: assemble artifact $VERSION\033[0m"
TASK=release
TASK_ARGS=("$VERSION" "$output_folder")
;;
codegen)
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
# fall back to branch name or `main` if no VERSION is set
branch_name=$(git rev-parse --abbrev-ref HEAD)
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
VERSION="$branch_name"
else
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using \`main\`\033[0m"
VERSION="main"
fi
fi
if [ "$VERSION" = 'main' ]; then
echo -e "\033[36;1mTARGET: codegen API $VERSION\033[0m"
else
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
fi
TASK=codegen
TASK_ARGS=("$VERSION")
;;
docsgen)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
TASK=codegen
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
examplesgen)
echo -e "\033[36;1mTARGET: generate examples\033[0m"
TASK=codegen
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
bump)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
TASK=bump
TASK_ARGS=("$VERSION")
;;
bumpmatrix)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
TASK=bumpmatrix
TASK_ARGS=("$VERSION")
;;
*)
echo -e "\n'$CMD' is not supported right now\n"
echo -e "\nUsage:"
echo -e "\t $0 release \$VERSION\n"
echo -e "\t $0 bump \$VERSION"
echo -e "\t $0 codegen \$VERSION"
exit 1
esac
# ------------------------------------------------------- #
# Build Container
# ------------------------------------------------------- #
echo -e "\033[34;1mINFO: building $product container\033[0m"
docker build \
--file .ci/Dockerfile \
--tag "$product" \
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
--build-arg "BUILDER_UID=$(id -u)" \
--build-arg "BUILDER_GID=$(id -g)" \
.
# ------------------------------------------------------- #
# Run the Container
# ------------------------------------------------------- #
echo -e "\033[34;1mINFO: running $product container\033[0m"
if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}" ]]; then
echo -e "\033[34;1mINFO: Running in local mode"
docker run \
-u "$(id -u):$(id -g)" \
--volume "$repo:/usr/src/elasticsearch-js" \
--volume /usr/src/elasticsearch-js/node_modules \
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
--env "WORKFLOW=$WORKFLOW" \
--name make-elasticsearch-js \
--rm \
$product \
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
else
echo -e "\033[34;1mINFO: Running in CI mode"
docker run \
--volume "$repo:/usr/src/elasticsearch-js" \
--volume /usr/src/elasticsearch-js/node_modules \
-u "$(id -u):$(id -g)" \
--env "WORKFLOW=$WORKFLOW" \
--name make-elasticsearch-js \
--rm \
$product \
/bin/bash -c "cd /usr/src && \
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
mkdir -p /usr/src/elastic-client-generator-js/output && \
cd /usr/src/elasticsearch-js && \
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
fi
# ------------------------------------------------------- #
# Post Command tasks & checks
# ------------------------------------------------------- #
if [[ "$CMD" == "assemble" ]]; then
if compgen -G ".ci/output/*" > /dev/null; then
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "bump" ]]; then
if [ -n "$(git status --porcelain)" ]; then
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "codegen" ]]; then
if [ -n "$(git status --porcelain)" ]; then
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "docsgen" ]]; then
echo "TODO"
fi
if [[ "$CMD" == "examplesgen" ]]; then
echo "TODO"
fi

View File

@ -5,4 +5,3 @@ elasticsearch
.git .git
lib lib
junit-output junit-output
.tap

49
.github/ISSUE_TEMPLATE/bug.md vendored Normal file
View File

@ -0,0 +1,49 @@
---
name: 🐛 Bug report
about: Create a report to help us improve
---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `not reproducible` label.**
## 🐛 Bug Report
A clear and concise description of what the bug is.
## To Reproduce
Steps to reproduce the behavior:
Paste your code here:
```js
```
<!--
In some cases, it might be challenging to reproduce the bug in a few lines of code.
You can fork the following repository, which contains all the configuration needed
to spin up a three nodes Elasticsearch cluster with security enabled.
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
https://github.com/delvedor/es-reproduce-issue
--->
## Expected behavior
A clear and concise description of what you expected to happen.
Paste the results here:
```js
```
## Your Environment
- *node version*: 6,8,10
- `@elastic/elasticsearch` *version*: >=7.0.0
- *os*: Mac, Windows, Linux
- *any other relevant information*

View File

@ -1,66 +0,0 @@
---
name: 🐛 Bug report
description: Create a report to help us improve
labels: ["Category: Bug"]
body:
- type: markdown
attributes:
value: |
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.**
- type: textarea
id: bug-report
attributes:
label: 🐛 Bug report
description: A clear and concise description of what the bug is.
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: To reproduce
description: Steps to reproduce the behavior
validations:
required: true
- type: textarea
id: expected
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: input
id: node-js-version
attributes:
label: Node.js version
placeholder: 18.x, 20.x, etc.
validations:
required: true
- type: input
id: client-version
attributes:
label: "@elastic/elasticsearch version"
placeholder: 7.17.0, 8.14.1, etc.
validations:
required: true
- type: input
id: os
attributes:
label: Operating system
placeholder: Ubuntu 22.04, macOS, etc.
validations:
required: true
- type: input
id: env-info
attributes:
label: Any other relevant environment information

22
.github/ISSUE_TEMPLATE/feature.md vendored Normal file
View File

@ -0,0 +1,22 @@
---
name: 🚀 Feature Proposal
about: Submit a proposal for a new feature
---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `invalid` label.**
## 🚀 Feature Proposal
A clear and concise description of what the feature is.
## Motivation
Please outline the motivation for the proposal.
## Example
Please provide an example for how this feature would be used.

View File

@ -1,33 +0,0 @@
---
name: 🚀 Feature Proposal
description: Submit a proposal for a new feature
labels: ["Category: Feature"]
body:
- type: markdown
attributes:
value: |
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.**
- type: textarea
id: feature-proposal
attributes:
label: 🚀 Feature Proposal
description: A clear and concise description of what the feature is.
validations:
required: true
- type: textarea
id: motivation
attributes:
label: Motivation
description: Please outline the motivation for the proposal.
- type: textarea
id: example
attributes:
label: Example
description: Please provide an example for how this feature would be used.

10
.github/ISSUE_TEMPLATE/question.md vendored Normal file
View File

@ -0,0 +1,10 @@
---
name: 💬 Questions / Help
about: If you have questions, please check our Gitter or Help repo
---
## 💬 Questions and Help
### Please note that this issue tracker is not a help forum and this issue may be closed.
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.

View File

@ -1,21 +0,0 @@
---
name: 💬 Questions / Help
description: If you have questions, please check our community forum or support
labels: ["Category: Question"]
body:
- type: markdown
attributes:
value: |
### Please note that this issue tracker is not a help forum and this issue may be closed.
Please check our [community forum](https://discuss.elastic.co/) or [contact Elastic support](https://www.elastic.co/support) if your issue is not specifically related to the documented functionality of this client library.
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
- type: textarea
id: question
attributes:
label: Question
description: Your question or comment
validations:
required: true

56
.github/ISSUE_TEMPLATE/regression.md vendored Normal file
View File

@ -0,0 +1,56 @@
---
name: 💥 Regression Report
about: Report unexpected behavior that worked in previous versions
---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `invalid` label.**
## 💥 Regression Report
A clear and concise description of what the regression is.
## Last working version
Worked up to version:
Stopped working in version:
## To Reproduce
Steps to reproduce the behavior:
Paste your code here:
```js
```
<!--
In some cases, it might be challenging to reproduce the bug in a few lines of code.
You can fork the following repository, which contains all the configuration needed
to spin up a three nodes Elasticsearch cluster with security enabled.
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
https://github.com/delvedor/es-reproduce-issue
--->
## Expected behavior
A clear and concise description of what you expected to happen.
Paste the results here:
```js
```
## Your Environment
- *node version*: 6,8,10
- `@elastic/elasticsearch` *version*: >=7.0.0
- *typescript version*: 4.x (if applicable)
- *os*: Mac, Windows, Linux
- *any other relevant information*

View File

@ -1,92 +0,0 @@
---
name: 💥 Regression Report
description: Report unexpected behavior that worked in previous versions
labels: ["Category: Bug"]
body:
- type: markdown
attributes:
value: |
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.**
- type: textarea
id: report
attributes:
label: Regression report
description: A clear and concise description of what the regression is.
validations:
required: true
- type: input
id: last-working-version
attributes:
label: Last working version
description: Version of `@elastic/elasticsearch` where this last worked.
validations:
required: true
- type: textarea
id: to-reproduce
attributes:
label: To reproduce
description: |
Paste your code here that shows how to reproduce the behavior.
In some cases, it might be challenging to reproduce the bug in a few lines of code.
You can fork the following repository, which contains all the configuration needed to spin up a three nodes Elasticsearch cluster with security enabled.
[This repository](https://github.com/delvedor/es-reproduce-issue) also contains a preconfigured client instance that you can use to reproduce the issue.
validations:
required: true
- type: textarea
id: expected-behavior
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: input
id: node-version
attributes:
label: Node.js version
description: What version of Node.js you are using (`node --version`).
validations:
required: true
- type: input
id: typescript-version
attributes:
label: TypeScript version
description: TypeScript version you are using, if applicable.
- type: input
id: elasticsearch-client-version
attributes:
label: Elasticsearch client version
description: What version of `@elastic/elasticsearch` and `@elastic/transport` you are using (`npm ls -a | grep '@elastic'`).
validations:
required: true
- type: input
id: elasticsearch-version
attributes:
label: Elasticsearch server version
description: What version of Elasticsearch you are using.
validations:
required: true
- type: input
id: operating-system
attributes:
label: Operating system
description: What operating system you are running.
placeholder: e.g. Linux, MacOS, Windows
- type: textarea
id: env-info
attributes:
label: Any other relevant environment information.

6
.github/ISSUE_TEMPLATE/security.md vendored Normal file
View File

@ -0,0 +1,6 @@
---
name: 👮 Security Issue
about: Responsible Disclosure
---
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).

View File

@ -1,8 +0,0 @@
---
name: 👮 Security Issue
description: Responsible disclosure
body:
- type: markdown
attributes:
value: |
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).

222
.github/make.sh vendored
View File

@ -1,222 +0,0 @@
#!/usr/bin/env bash
# ------------------------------------------------------- #
#
# Build entry script for elasticsearch-js
#
# Must be called: ./.github/make.sh <target> <params>
#
# Version: 1.1.0
#
# Targets:
# ---------------------------
# assemble <VERSION> : build client artifacts with version
# bump <VERSION> : bump client internals to version
# bumpmatrix <VERSION> : bump stack version in test matrix to version
# codegen <VERSION> : generate endpoints
# docsgen <VERSION> : generate documentation
# examplegen : generate the doc examples
# clean : clean workspace
#
# ------------------------------------------------------- #
# ------------------------------------------------------- #
# Bootstrap
# ------------------------------------------------------- #
script_path=$(dirname "$(realpath -s "$0")")
repo=$(realpath "$script_path/../")
# shellcheck disable=SC1090
CMD=$1
TASK=$1
TASK_ARGS=()
VERSION=$2
STACK_VERSION=$VERSION
set -euo pipefail
product="elastic/elasticsearch-js"
output_folder=".buildkite/output"
codegen_folder=".buildkite/output"
OUTPUT_DIR="$repo/${output_folder}"
NODE_JS_VERSION=22
WORKFLOW=${WORKFLOW-staging}
mkdir -p "$OUTPUT_DIR"
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
case $CMD in
clean)
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
rm -rf "$output_folder"
echo -e "\033[32;1mdone.\033[0m"
exit 0
;;
assemble)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: assemble artifact $VERSION\033[0m"
TASK=release
TASK_ARGS=("$VERSION" "$output_folder")
;;
codegen)
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
# fall back to branch name or `main` if no VERSION is set
branch_name=$(git rev-parse --abbrev-ref HEAD)
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
VERSION="$branch_name"
else
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using \`main\`\033[0m"
VERSION="main"
fi
fi
if [ "$VERSION" = 'main' ]; then
echo -e "\033[36;1mTARGET: codegen API $VERSION\033[0m"
else
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
fi
TASK=codegen
TASK_ARGS=("$VERSION")
;;
docsgen)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
TASK=codegen
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
examplesgen)
echo -e "\033[36;1mTARGET: generate examples\033[0m"
TASK=codegen
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
bump)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
TASK=bump
TASK_ARGS=("$VERSION")
;;
bumpmatrix)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
TASK=bumpmatrix
TASK_ARGS=("$VERSION")
;;
*)
echo -e "\n'$CMD' is not supported right now\n"
echo -e "\nUsage:"
echo -e "\t $0 release \$VERSION\n"
echo -e "\t $0 bump \$VERSION"
echo -e "\t $0 codegen \$VERSION"
exit 1
;;
esac
# ------------------------------------------------------- #
# Build Container
# ------------------------------------------------------- #
echo -e "\033[34;1mINFO: building $product container\033[0m"
docker build \
--file .buildkite/Dockerfile-make \
--tag "$product" \
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
--build-arg "BUILDER_UID=$(id -u)" \
--build-arg "BUILDER_GID=$(id -g)" \
.
# ------------------------------------------------------- #
# Run the Container
# ------------------------------------------------------- #
echo -e "\033[34;1mINFO: running $product container\033[0m"
if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}" ]]; then
echo -e "\033[34;1mINFO: Running in local mode"
docker run \
-u "$(id -u):$(id -g)" \
--volume "$repo:/usr/src/elasticsearch-js" \
--volume /usr/src/elasticsearch-js/node_modules \
--volume "$(realpath "$repo/../elastic-client-generator-js"):/usr/src/elastic-client-generator-js" \
--env "WORKFLOW=$WORKFLOW" \
--name make-elasticsearch-js \
--rm \
$product \
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
else
echo -e "\033[34;1mINFO: Running in CI mode"
# determine branch to clone
GENERATOR_BRANCH="main"
if [[ "$VERSION" == 8.* ]]; then
GENERATOR_BRANCH="8.x"
fi
echo -e "\033[34;1mINFO: Generator branch: $GENERATOR_BRANCH"
docker run \
--volume "$repo:/usr/src/elasticsearch-js" \
--volume /usr/src/elasticsearch-js/node_modules \
-u "$(id -u):$(id -g)" \
--env "WORKFLOW=$WORKFLOW" \
--name make-elasticsearch-js \
--rm \
$product \
/bin/bash -c "cd /usr/src && \
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
mkdir -p /usr/src/elastic-client-generator-js/output && \
cd /usr/src/elasticsearch-js && \
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
fi
# ------------------------------------------------------- #
# Post Command tasks & checks
# ------------------------------------------------------- #
if [[ "$CMD" == "assemble" ]]; then
if compgen -G ".buildkite/output/*" >/dev/null; then
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "bump" ]]; then
if [ -n "$(git status --porcelain)" ]; then
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "codegen" ]]; then
if [ -n "$(git status --porcelain)" ]; then
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "docsgen" ]]; then
echo "TODO"
fi
if [[ "$CMD" == "examplesgen" ]]; then
echo "TODO"
fi

View File

@ -1,27 +1,16 @@
---
name: Backport name: Backport
on: on:
pull_request_target: pull_request:
types: types:
- closed - closed
- labeled - labeled
jobs: jobs:
backport: backport:
name: Backport
runs-on: ubuntu-latest runs-on: ubuntu-latest
# Only react to merged PRs for security reasons. name: Backport
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& (
github.event.action == 'closed'
|| (
github.event.action == 'labeled'
&& contains(github.event.label.name, 'backport')
)
)
steps: steps:
- uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 - name: Backport
uses: tibdex/backport@7005ef85c4562bc23b0e9b4a9940d5922f439750
with: with:
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,50 +1,34 @@
--- ---
name: Node CI name: Node CI
on: on: [push, pull_request]
pull_request: {}
jobs: jobs:
paths-filter:
name: Detect files changed
runs-on: ubuntu-latest
outputs:
src-only: "${{ steps.changes.outputs.src-only }}"
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: dorny/paths-filter/@v3.0.2
id: changes
with:
filters: |
src-only:
- '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.buildkite,scripts}/**/*|catalog-info.yaml)'
- '.github/workflows/**'
test: test:
name: Test name: Test
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
needs: paths-filter
# only run if code relevant to unit tests was changed
if: needs.paths-filter.outputs.src-only == 'true'
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
node-version: [18.x, 20.x, 22.x, 23.x] node-version: [18.x, 20.x]
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
persist-credentials: false
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4 uses: actions/setup-node@v3
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
# workaround for failing tests on Node.js 14.x
# see https://github.com/actions/setup-node/issues/411
- name: Force install specific npm version
run: |
npm install --global npm@8.3.1
npm install --global npm@9.7.1
- name: Install - name: Install
run: | run: |
npm install npm install
@ -57,23 +41,21 @@ jobs:
run: | run: |
npm run test:unit npm run test:unit
- name: ECMAScript module test
run: |
npm run test:esm
license: license:
name: License check name: License check
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: strategy:
- uses: actions/checkout@v4 matrix:
with: node-version: [20.x]
persist-credentials: false
- name: Use Node.js steps:
uses: actions/setup-node@v4 - uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with: with:
node-version: 22.x node-version: ${{ matrix.node-version }}
- name: Install - name: Install
run: | run: |
@ -82,49 +64,3 @@ jobs:
- name: License checker - name: License checker
run: | run: |
npm run license-checker npm run license-checker
test-bun:
name: Test Bun
runs-on: ${{ matrix.os }}
needs: paths-filter
# only run if code relevant to unit tests was changed
if: needs.paths-filter.outputs.src-only == 'true'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Use Bun
uses: oven-sh/setup-bun@v2
- name: Install
run: |
bun install
- name: Lint
run: |
bun run lint
- name: Unit test
run: |
bun run test:unit-bun
- name: ECMAScript module test
run: |
bun run test:esm
auto-approve:
name: Auto-approve
needs: [test, license]
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: github.actor == 'elasticmachine'
steps:
- uses: hmarr/auto-approve-action@v4

View File

@ -1,38 +0,0 @@
name: Publish Package to npm
on:
workflow_dispatch:
inputs:
branch:
description: "Git branch to build and publish"
required: true
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
id-token: write
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
ref: ${{ github.event.inputs.branch }}
- uses: actions/setup-node@v4
with:
node-version: "22.x"
registry-url: "https://registry.npmjs.org"
- run: npm install -g npm
- run: npm install
- run: npm test
- run: npm publish --provenance --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
version=$(jq -r .version package.json)
gh release create \
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
--target "$BRANCH_NAME" \
-t "v$version" \
"v$version"
env:
BRANCH_NAME: ${{ github.event.inputs.branch }}
GH_TOKEN: ${{ github.token }}

View File

@ -1,43 +0,0 @@
#!/usr/bin/env bash
set -exuo pipefail
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
# generate patch file
cd "$GITHUB_WORKSPACE/stack"
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
# set committer info
git config --global user.email "elasticmachine@users.noreply.github.com"
git config --global user.name "Elastic Machine"
# apply patch file
cd "$GITHUB_WORKSPACE/serverless"
git am -C1 --reject /tmp/patch.diff || git am --quit
# generate PR body comment
comment="Patch applied from $pr_shortcode"
# enumerate rejected patches in PR comment
has_rejects='false'
for f in ./**/*.rej; do
has_rejects='true'
comment="$comment
## Rejected patch \`$f\` must be resolved:
\`\`\`diff
$(cat "$f")
\`\`\`
"
done
# delete .rej files
rm -fv ./**/*.rej
# send data to output parameters
echo "$comment" > /tmp/pr_body
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"

View File

@ -1,53 +0,0 @@
---
name: Apply PR changes to serverless
on:
pull_request_target:
types:
- closed
- labeled
jobs:
apply-patch:
name: Apply patch
runs-on: ubuntu-latest
# Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& (
(
github.event.action == 'closed'
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
)
||
(
github.event.action == 'labeled'
&& github.event.label.name == 'apply-to-serverless'
)
)
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
repository: elastic/elasticsearch-js
ref: main
path: stack
fetch-depth: 0
- uses: actions/checkout@v4
with:
persist-credentials: false
repository: elastic/elasticsearch-serverless-js
ref: main
path: serverless
- name: Apply patch from stack to serverless
id: apply-patch
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
- uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GH_TOKEN }}
path: serverless
title: "Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}"
commit-message: "Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}"
body-path: /tmp/pr_body
draft: "${{ steps.apply-patch.outputs.PR_DRAFT }}"
add-paths: ":!*.rej"

View File

@ -1,21 +0,0 @@
---
name: 'Close stale issues and PRs'
on:
schedule:
- cron: '30 1 * * *'
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
with:
stale-issue-label: stale
stale-pr-label: stale
days-before-stale: 90
days-before-close: 14
exempt-issue-labels: 'good first issue'
close-issue-label: closed-stale
close-pr-label: closed-stale
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'

5
.gitignore vendored
View File

@ -63,8 +63,3 @@ test/bundlers/**/bundle.js
test/bundlers/parcel-test/.parcel-cache test/bundlers/parcel-test/.parcel-cache
lib lib
junit-output
bun.lockb
test-results
processinfo
.tap

View File

@ -64,6 +64,7 @@ test
scripts scripts
# ci configuration # ci configuration
.ci
.travis.yml .travis.yml
.buildkite .buildkite
certs certs
@ -72,5 +73,3 @@ CODE_OF_CONDUCT.md
CONTRIBUTING.md CONTRIBUTING.md
src src
bun.lockb
.tap

11
Makefile Normal file
View File

@ -0,0 +1,11 @@
.PHONY: integration-setup
integration-setup: integration-cleanup
DETACH=true .ci/run-elasticsearch.sh
.PHONY: integration-cleanup
integration-cleanup:
docker container rm --force --volumes instance || true
.PHONY: integration
integration: integration-setup
npm run test:integration

View File

@ -21,25 +21,6 @@ of the getting started documentation.
Refer to the [Connecting section](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_connecting) Refer to the [Connecting section](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_connecting)
of the getting started documentation. of the getting started documentation.
## Compatibility
The Elasticsearch client is compatible with currently maintained JS versions.
Language clients are forward compatible; meaning that clients support
communicating with greater or equal minor versions of Elasticsearch without
breaking. It does not mean that the client automatically supports new features
of newer Elasticsearch versions; it is only possible after a release of a new
client version. For example, a 8.12 client version won't automatically support
the new features of the 8.13 version of Elasticsearch, the 8.13 client version
is required for that. Elasticsearch language clients are only backwards
compatible with default distributions and without guarantees made.
| Elasticsearch Version | Elasticsearch-JS Branch | Supported |
| --------------------- | ------------------------ | --------- |
| main | main | |
| 8.x | 8.x | 8.x |
| 7.x | 7.x | 7.17 |
## Usage ## Usage
* [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index) * [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index)

View File

@ -6,7 +6,7 @@ metadata:
name: elasticsearch-js name: elasticsearch-js
spec: spec:
type: library type: library
owner: group:devtools-team owner: group:clients-team
lifecycle: production lifecycle: production
--- ---
@ -18,7 +18,7 @@ metadata:
description: elasticsearch-js - integration tests description: elasticsearch-js - integration tests
spec: spec:
type: buildkite-pipeline type: buildkite-pipeline
owner: group:devtools-team owner: group:clients-team
system: buildkite system: buildkite
implementation: implementation:
apiVersion: buildkite.elastic.dev/v1 apiVersion: buildkite.elastic.dev/v1
@ -29,7 +29,7 @@ spec:
repository: elastic/elasticsearch-js repository: elastic/elasticsearch-js
pipeline_file: .buildkite/pipeline.yml pipeline_file: .buildkite/pipeline.yml
teams: teams:
devtools-team: clients-team:
access_level: MANAGE_BUILD_AND_READ access_level: MANAGE_BUILD_AND_READ
everyone: everyone:
access_level: READ_ONLY access_level: READ_ONLY
@ -37,11 +37,14 @@ spec:
build_pull_requests: false build_pull_requests: false
build_branches: false build_branches: false
cancel_intermediate_builds: true cancel_intermediate_builds: true
cancel_intermediate_builds_branch_filter: "!main" cancel_intermediate_builds_branch_filter: '!main'
schedules: schedules:
main: main_semi_daily:
branch: "main" branch: 'main'
cronline: "@daily" cronline: '0 */12 * * *'
8_14: 8_12_semi_daily:
branch: "8.14" branch: '8.12'
cronline: "@daily" cronline: '0 */12 * * *'
8_11_daily:
branch: '8.11'
cronline: '@daily'

View File

@ -252,19 +252,19 @@ const client = new Client({
---- ----
|`disablePrototypePoisoningProtection` |`disablePrototypePoisoningProtection`
|`boolean`, `'proto'`, `'constructor'` - The client can protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more about this security concern. If needed, you can enable prototype poisoning protection entirely (`false`) or one of the two checks (`'proto'` or `'constructor'`). For performance reasons, it is disabled by default. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. + |`boolean`, `'proto'`, `'constructor'` - By the default the client will protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more. If needed you can disable prototype poisoning protection entirely or one of the two checks. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
_Default:_ `true` _Default:_ `false`
|`caFingerprint` |`caFingerprint`
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. + |`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
_Default:_ `null` _Default:_ `null`
|`maxResponseSize` |`maxResponseSize`
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH + |`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
_Default:_ `null` _Default:_ `null`
|`maxCompressedResponseSize` |`maxCompressedResponseSize`
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH + |`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
_Default:_ `null` _Default:_ `null`
|=== |===

View File

@ -1,260 +1,6 @@
[[changelog-client]] [[changelog-client]]
== Release notes == Release notes
[discrete]
=== 8.16.4
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.16`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
===== Report correct transport connection type in telemetry
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
[discrete]
=== 8.16.3
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.16`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
=== 8.16.2
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.16`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
===== Drop testing artifacts from npm package
Tap, the unit testing tool used by this project, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
[discrete]
=== 8.16.1
[discrete]
==== Fixes
[discrete]
===== Fix ECMAScript imports
Fixed package configuration to correctly support native ECMAScript `import` syntax.
[discrete]
=== 8.16.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.16`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.16/release-notes-8.16.0.html[here].
[discrete]
===== Support Apache Arrow in ES|QL helper
The ES|QL helper can now return results as an Apache Arrow `Table` or `RecordBatchReader`, which enables high-performance calculations on ES|QL results, even if the response data is larger than the system's available memory. See <<esql-helper>> for more information.
[discrete]
==== Fixes
[discrete]
===== Pass prototype poisoning options to serializer correctly
The client's `disablePrototypePoisoningProtection` option was set to `true` by default, but when it was set to any other value it was ignored, making it impossible to enable prototype poisoning protection without providing a custom serializer implementation.
[discrete]
=== 8.15.3
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.15`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
===== Drop testing artifacts from npm package
Tap, the unit testing tool, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
[discrete]
=== 8.15.2
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.15`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
=== 8.15.1
[discrete]
==== Fixes
[discrete]
===== Improved support for Elasticsearch `v8.15`
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
[discrete]
=== 8.15.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.15.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
[discrete]
===== OpenTelemetry zero-code instrumentation support
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
See {jsclient}/observability.html#_opentelemetry[the docs]
for more information.
[discrete]
=== 8.14.1
[discrete]
==== Features
[discrete]
===== Improved support for Elasticsearch `8.14`
Updated types based on fixes and changes to the Elasticsearch specification.
[discrete]
=== 8.14.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.14.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
[discrete]
===== ES|QL object API helper
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
[discrete]
===== `onSuccess` callback added to bulk helper
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
[discrete]
===== Request retries are more polite
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
[discrete]
=== 8.13.1
[discrete]
==== Fixes
[discrete]
===== Pin @elastic/transport to `~8.4.1`
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
[discrete]
=== 8.13.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.13.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
[discrete]
==== Fixes
[discrete]
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
[discrete]
=== 8.12.3
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.4.1`
Switching from `^8.4.1` to `~8.4.1` ensures 8.12 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.12.2
[discrete]
==== Fixes
[discrete]
===== Upgrade transport to 8.4.1 https://github.com/elastic/elasticsearch-js/pull/2137[#2137]
Upgrades `@elastic/transport` to 8.4.1 to resolve https://github.com/elastic/elastic-transport-js/pull/83[a bug] where arrays in error diagnostics were unintentionally transformed into objects.
[discrete]
=== 8.12.1
[discrete]
==== Fixes
[discrete]
===== Fix hang in bulk helper semaphore https://github.com/elastic/elasticsearch-js/pull/2027[#2027]
The failing state could be reached when a server's response times are slower than flushInterval.
[discrete]
=== 8.12.0
[discrete]
=== Features
[discrete]
===== Support for Elasticsearch `v8.12.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
[discrete] [discrete]
=== 8.11.1 === 8.11.1
@ -345,7 +91,7 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.
[discrete] [discrete]
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732] ===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it. In the https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
[discrete] [discrete]
==== Fixes ==== Fixes
@ -626,7 +372,7 @@ client.search({ params }, { options }, (err, result) => {
client.search({ params }, { options }) client.search({ params }, { options })
.then(console.log) .then(console.log)
.catch(console.log) .catch(console.log)
// async-style (sugar syntax on top of promises) // async-style (sugar syntax on top of promises)
const response = await client.search({ params }, { options }) const response = await client.search({ params }, { options })
console.log(response) console.log(response)
@ -786,7 +532,7 @@ If you weren't extending the internals of the client, this won't be a breaking c
*Breaking: Yes* | *Migration effort: Medium* *Breaking: Yes* | *Migration effort: Medium*
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`. Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense. This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense.
If you are already using `snake_case` keys, this won't be a breaking change for you. If you are already using `snake_case` keys, this won't be a breaking change for you.

View File

@ -2,11 +2,10 @@
== Configuration == Configuration
The client is designed to be easily configured for your needs. In the following The client is designed to be easily configured for your needs. In the following
section, you can see the possible options that you can use to configure it. section, you can see the possible options that you can use to configure it.
* <<basic-config>> * <<basic-config>>
* <<advanced-config>> * <<advanced-config>>
* <<timeout-best-practices>>
* <<child>> * <<child>>
* <<client-testing>> * <<client-testing>>

View File

@ -1,7 +1,7 @@
[[client-connecting]] [[client-connecting]]
== Connecting == Connecting
This page contains the information you need to connect and use the Client with This page contains the information you need to connect and use the Client with
{es}. {es}.
**On this page** **On this page**
@ -19,7 +19,7 @@ This page contains the information you need to connect and use the Client with
[discrete] [discrete]
=== Authentication === Authentication
This document contains code snippets to show you how to connect to various {es} This document contains code snippets to show you how to connect to various {es}
providers. providers.
@ -27,18 +27,18 @@ providers.
[[auth-ec]] [[auth-ec]]
==== Elastic Cloud ==== Elastic Cloud
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers
an easy way to connect to it via the `cloud` option. You must pass the Cloud ID an easy way to connect to it via the `cloud` option. You must pass the Cloud ID
that you can find in the cloud console, then your username and password inside that you can find in the cloud console, then your username and password inside
the `auth` option. the `auth` option.
NOTE: When connecting to Elastic Cloud, the client will automatically enable NOTE: When connecting to Elastic Cloud, the client will automatically enable
both request and response compression by default, since it yields significant both request and response compression by default, since it yields significant
throughput improvements. Moreover, the client will also set the tls option throughput improvements. Moreover, the client will also set the tls option
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still `secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
override this option by configuring them. override this option by configuring them.
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
behind a load balancer, Elastic Cloud will take care of everything for you. behind a load balancer, Elastic Cloud will take care of everything for you.
Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here]
to know more. to know more.
@ -61,18 +61,18 @@ const client = new Client({
[[connect-self-managed-new]] [[connect-self-managed-new]]
=== Connecting to a self-managed cluster === Connecting to a self-managed cluster
By default {es} will start with security features like authentication and TLS By default {es} will start with security features like authentication and TLS
enabled. To connect to the {es} cluster you'll need to configure the Node.js {es} enabled. To connect to the {es} cluster you'll need to configure the Node.js {es}
client to use HTTPS with the generated CA certificate in order to make requests client to use HTTPS with the generated CA certificate in order to make requests
successfully. successfully.
If you're just getting started with {es} we recommend reading the documentation If you're just getting started with {es} we recommend reading the documentation
on https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html[configuring] on https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html[configuring]
and and
https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html[starting {es}] https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html[starting {es}]
to ensure your cluster is running as expected. to ensure your cluster is running as expected.
When you start {es} for the first time you'll see a distinct block like the one When you start {es} for the first time you'll see a distinct block like the one
below in the output from {es} (you may have to scroll up if it's been a while): below in the output from {es} (you may have to scroll up if it's been a while):
[source,sh] [source,sh]
@ -90,24 +90,24 @@ below in the output from {es} (you may have to scroll up if it's been a while):
---- ----
Depending on the circumstances there are two options for verifying the HTTPS Depending on the circumstances there are two options for verifying the HTTPS
connection, either verifying with the CA certificate itself or via the HTTP CA connection, either verifying with the CA certificate itself or via the HTTP CA
certificate fingerprint. certificate fingerprint.
[discrete] [discrete]
[[auth-tls]] [[auth-tls]]
==== TLS configuration ==== TLS configuration
The generated root CA certificate can be found in the `certs` directory in your The generated root CA certificate can be found in the `certs` directory in your
{es} config location (`$ES_CONF_PATH/certs/http_ca.crt`). If you're running {es} {es} config location (`$ES_CONF_PATH/certs/http_ca.crt`). If you're running {es}
in Docker there is in Docker there is
https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html[additional documentation for retrieving the CA certificate]. https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html[additional documentation for retrieving the CA certificate].
Without any additional configuration you can specify `https://` node urls, and Without any additional configuration you can specify `https://` node urls, and
the certificates used to sign these requests will be verified. To turn off the certificates used to sign these requests will be verified. To turn off
certificate verification, you must specify an `tls` object in the top level certificate verification, you must specify an `tls` object in the top level
config and set `rejectUnauthorized: false`. The default `tls` values are the config and set `rejectUnauthorized: false`. The default `tls` values are the
same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`] same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
uses. uses.
[source,js] [source,js]
@ -152,7 +152,7 @@ const client = new Client({
}) })
---- ----
The certificate fingerprint can be calculated using `openssl x509` with the The certificate fingerprint can be calculated using `openssl x509` with the
certificate file: certificate file:
[source,sh] [source,sh]
@ -160,8 +160,8 @@ certificate file:
openssl x509 -fingerprint -sha256 -noout -in /path/to/http_ca.crt openssl x509 -fingerprint -sha256 -noout -in /path/to/http_ca.crt
---- ----
If you don't have access to the generated CA file from {es} you can use the If you don't have access to the generated CA file from {es} you can use the
following script to output the root CA fingerprint of the {es} instance with following script to output the root CA fingerprint of the {es} instance with
`openssl s_client`: `openssl s_client`:
[source,sh] [source,sh]
@ -186,8 +186,8 @@ SHA256 Fingerprint=A5:2D:D9:35:11:E8:C6:04:5E:21:F1:66:54:B7:7C:9E:E0:F3:4A:EA:2
WARNING: Running {es} without security enabled is not recommended. WARNING: Running {es} without security enabled is not recommended.
If your cluster is configured with If your cluster is configured with
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html[security explicitly disabled] https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html[security explicitly disabled]
then you can connect via HTTP: then you can connect via HTTP:
[source,js] [source,js]
@ -208,14 +208,14 @@ Following you can find all the supported authentication strategies.
[[auth-apikey]] [[auth-apikey]]
==== ApiKey authentication ==== ApiKey authentication
You can use the You can use the
{ref-7x}/security-api-create-api-key.html[ApiKey] {ref-7x}/security-api-create-api-key.html[ApiKey]
authentication by passing the `apiKey` parameter via the `auth` option. The authentication by passing the `apiKey` parameter via the `auth` option. The
`apiKey` parameter can be either a base64 encoded string or an object with the `apiKey` parameter can be either a base64 encoded string or an object with the
values that you can obtain from the values that you can obtain from the
{ref-7x}/security-api-create-api-key.html[create api key endpoint]. {ref-7x}/security-api-create-api-key.html[create api key endpoint].
NOTE: If you provide both basic authentication credentials and the ApiKey NOTE: If you provide both basic authentication credentials and the ApiKey
configuration, the ApiKey takes precedence. configuration, the ApiKey takes precedence.
[source,js] [source,js]
@ -268,10 +268,10 @@ const client = new Client({
[[auth-basic]] [[auth-basic]]
==== Basic authentication ==== Basic authentication
You can provide your credentials by passing the `username` and `password` You can provide your credentials by passing the `username` and `password`
parameters via the `auth` option. parameters via the `auth` option.
NOTE: If you provide both basic authentication credentials and the Api Key NOTE: If you provide both basic authentication credentials and the Api Key
configuration, the Api Key will take precedence. configuration, the Api Key will take precedence.
[source,js] [source,js]
@ -342,14 +342,14 @@ const result = await client.search({
}, { meta: true }) }, { meta: true })
---- ----
In this case, the result will be: In this case, the result will be:
[source,ts] [source,ts]
---- ----
{ {
body: object | boolean body: object | boolean
statusCode: number statusCode: number
headers: object headers: object
warnings: string[], warnings: [string],
meta: object meta: object
} }
---- ----
@ -361,7 +361,7 @@ NOTE: The body is a boolean value when you use `HEAD` APIs.
If needed, you can abort a running request by using the `AbortController` standard. If needed, you can abort a running request by using the `AbortController` standard.
CAUTION: If you abort a request, the request will fail with a CAUTION: If you abort a request, the request will fail with a
`RequestAbortedError`. `RequestAbortedError`.
@ -410,23 +410,19 @@ The supported request specific options are:
[cols=2*] [cols=2*]
|=== |===
|`ignore` |`ignore`
|`number[]` - HTTP status codes which should not be considered errors for this request. + |`[number]` - HTTP status codes which should not be considered errors for this request. +
_Default:_ `null` _Default:_ `null`
|`requestTimeout` |`requestTimeout`
|`number | string` - Max request timeout for the request in milliseconds, it overrides the client default. + |`number` - Max request timeout for the request in milliseconds, it overrides the client default. +
_Default:_ `30000` _Default:_ `30000`
|`retryOnTimeout`
|`boolean` - Retry requests that have timed out.
_Default:_ `false`
|`maxRetries` |`maxRetries`
|`number` - Max number of retries for the request, it overrides the client default. + |`number` - Max number of retries for the request, it overrides the client default. +
_Default:_ `3` _Default:_ `3`
|`compression` |`compression`
|`string | boolean` - Enables body compression for the request. + |`string, boolean` - Enables body compression for the request. +
_Options:_ `false`, `'gzip'` + _Options:_ `false`, `'gzip'` +
_Default:_ `false` _Default:_ `false`
@ -450,10 +446,6 @@ _Default:_ `null`
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ + |`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
_Default:_ `null` _Default:_ `null`
|`opaqueId`
|`string` - Set the `X-Opaque-Id` HTTP header. See {ref}/api-conventions.html#x-opaque-id
_Default:_ `null`
|`maxResponseSize` |`maxResponseSize`
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH + |`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
_Default:_ `null` _Default:_ `null`
@ -466,17 +458,6 @@ _Default:_ `null`
|`AbortSignal` - The AbortSignal instance to allow request abortion. + |`AbortSignal` - The AbortSignal instance to allow request abortion. +
_Default:_ `null` _Default:_ `null`
|`meta`
|`boolean` - Rather than returning the body, return an object containing `body`, `statusCode`, `headers` and `meta` keys +
_Default_: `false`
|`redaction`
|`object` - Options for redacting potentially sensitive data from error metadata. See <<redaction>>.
|`retryBackoff`
|`(min: number, max: number, attempt: number) => number;` - A function that calculates how long to sleep, in seconds, before the next request retry +
_Default:_ A built-in function that uses exponential backoff with jitter.
|=== |===
[discrete] [discrete]
@ -556,8 +537,8 @@ Resources used to assess these recommendations:
~Added~ ~in~ ~`v7.10.0`~ ~Added~ ~in~ ~`v7.10.0`~
If you need to pass through an http(s) proxy for connecting to {es}, the client If you need to pass through an http(s) proxy for connecting to {es}, the client
out of the box offers a handy configuration for helping you with it. Under the out of the box offers a handy configuration for helping you with it. Under the
hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module. hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations. IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations.
@ -734,5 +715,5 @@ This pre-flight product check allows the client to establish the version of Elas
that it is communicating with. The product check requires one additional HTTP request to that it is communicating with. The product check requires one additional HTTP request to
be sent to the server as part of the request pipeline before the main API call is sent. be sent to the server as part of the request pipeline before the main API call is sent.
In most cases, this will succeed during the very first API call that the client sends. In most cases, this will succeed during the very first API call that the client sends.
Once the product check completes, no further product check HTTP requests are sent for Once the product check completes, no further product check HTTP requests are sent for
subsequent API calls. subsequent API calls.

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.clearCachedRealms({
realms: "default_file,ldap1",
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.forcemerge({
index: ".ds-my-data-stream-2099.03.07-000001",
max_num_segments: 1,
});
console.log(response);
----

View File

@ -1,19 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
query: {
pinned: {
ids: ["1", "4", "100"],
organic: {
match: {
description: "iphone",
},
},
},
},
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
settings: {
analysis: {
analyzer: {
my_analyzer: {
tokenizer: "whitespace",
filter: ["stemmer"],
},
},
},
},
});
console.log(response);
----

View File

@ -1,40 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.watcher.putWatch({
id: "cluster_health_watch",
trigger: {
schedule: {
interval: "10s",
},
},
input: {
http: {
request: {
host: "localhost",
port: 9200,
path: "/_cluster/health",
},
},
},
condition: {
compare: {
"ctx.payload.status": {
eq: "red",
},
},
},
actions: {
send_email: {
email: {
to: "username@example.org",
subject: "Cluster Status Warning",
body: "Cluster status is RED",
},
},
},
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.migration.postFeatureUpgrade();
console.log(response);
----

View File

@ -1,16 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index",
query: {
match: {
"http.clientip": "40.135.0.0",
},
},
fields: ["http.clientip"],
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getDataStream({
name: "my-data-stream",
filter_path: "data_streams.indices.index_name",
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.shardStores();
console.log(response);
----

View File

@ -1,15 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.renderSearchTemplate({
source: '{ "query": {{#toJson}}my_query{{/toJson}} }',
params: {
my_query: {
match_all: {},
},
},
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
tokenizer: "standard",
filter: ["asciifolding"],
text: "açaí à la carte",
});
console.log(response);
----

View File

@ -1,67 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "italian_example",
settings: {
analysis: {
filter: {
italian_elision: {
type: "elision",
articles: [
"c",
"l",
"all",
"dall",
"dell",
"nell",
"sull",
"coll",
"pell",
"gl",
"agl",
"dagl",
"degl",
"negl",
"sugl",
"un",
"m",
"t",
"s",
"v",
"d",
],
articles_case: true,
},
italian_stop: {
type: "stop",
stopwords: "_italian_",
},
italian_keywords: {
type: "keyword_marker",
keywords: ["esempio"],
},
italian_stemmer: {
type: "stemmer",
language: "light_italian",
},
},
analyzer: {
rebuilt_italian: {
tokenizer: "standard",
filter: [
"italian_elision",
"lowercase",
"italian_stop",
"italian_keywords",
"italian_stemmer",
],
},
},
},
},
});
console.log(response);
----

View File

@ -1,19 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "text_embedding",
inference_id: "my-e5-model",
inference_config: {
service: "elasticsearch",
service_settings: {
num_allocations: 1,
num_threads: 1,
model_id: ".multilingual-e5-small",
},
},
});
console.log(response);
----

View File

@ -1,13 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.putIndexTemplate({
name: "my-data-stream-template",
index_patterns: ["my-data-stream*"],
data_stream: {},
priority: 500,
});
console.log(response);
----

View File

@ -0,0 +1,15 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cluster.putSettings({
body: {
transient: {
'cluster.routing.use_adaptive_replica_selection': false
}
}
})
console.log(response)
----

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.update({
index: 'test',
id: '1',
body: {
doc: {
name: 'new_name'
},
detect_noop: false
}
})
console.log(response)
----

View File

@ -1,46 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "retrievers_example",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
range: {
year: {
gt: 2023,
},
},
},
},
},
{
standard: {
query: {
term: {
topic: "elastic",
},
},
},
},
],
rank_window_size: 10,
rank_constant: 1,
},
},
_source: false,
aggs: {
topics: {
terms: {
field: "topic",
},
},
},
});
console.log(response);
----

View File

@ -1,24 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "sales",
size: 0,
filter_path: "aggregations",
query: {
term: {
type: "t-shirt",
},
},
aggs: {
avg_price: {
avg: {
field: "price",
},
},
},
});
console.log(response);
----

View File

@ -1,18 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "rerank",
inference_id: "my-rerank-model",
inference_config: {
service: "cohere",
service_settings: {
model_id: "rerank-english-v3.0",
api_key: "{{COHERE_API_KEY}}",
},
},
});
console.log(response);
----

View File

@ -1,17 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.updateAliases({
actions: [
{
add: {
index: "logs-*",
alias: "logs",
},
},
],
});
console.log(response);
----

View File

@ -1,26 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "asciifold_example",
settings: {
analysis: {
analyzer: {
standard_asciifolding: {
tokenizer: "standard",
filter: ["my_ascii_folding"],
},
},
filter: {
my_ascii_folding: {
type: "asciifolding",
preserve_original: true,
},
},
},
},
});
console.log(response);
----

View File

@ -1,36 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "idx",
settings: {
index: {
mapping: {
source: {
mode: "synthetic",
},
},
},
},
mappings: {
properties: {
kwd: {
type: "keyword",
ignore_above: 3,
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "idx",
id: 1,
document: {
kwd: ["foo", "foo", "bang", "bar", "baz"],
},
});
console.log(response1);
----

View File

@ -1,37 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cluster.putComponentTemplate({
name: "component_template1",
template: {
mappings: {
properties: {
"@timestamp": {
type: "date",
},
},
},
},
});
console.log(response);
const response1 = await client.cluster.putComponentTemplate({
name: "runtime_component_template",
template: {
mappings: {
runtime: {
day_of_week: {
type: "keyword",
script: {
source:
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
},
},
},
},
},
});
console.log(response1);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transform.startTransform({
transform_id: "ecommerce_transform",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.tasks.list({
human: "true",
detailed: "true",
actions: "indices:data/write/bulk",
});
console.log(response);
----

View File

@ -1,17 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index-000001",
aggs: {
"my-agg-name": {
terms: {
field: "my-field",
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.watcher.executeWatch({
id: "my_watch",
});
console.log(response);
----

View File

@ -1,39 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "basque_example",
settings: {
analysis: {
filter: {
basque_stop: {
type: "stop",
stopwords: "_basque_",
},
basque_keywords: {
type: "keyword_marker",
keywords: ["Adibidez"],
},
basque_stemmer: {
type: "stemmer",
language: "basque",
},
},
analyzer: {
rebuilt_basque: {
tokenizer: "standard",
filter: [
"lowercase",
"basque_stop",
"basque_keywords",
"basque_stemmer",
],
},
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ccr.followStats({
index: "<index>",
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
query: {
has_child: {
type: "child",
query: {
match_all: {},
},
max_children: 10,
min_children: 2,
score_mode: "min",
},
},
});
console.log(response);
----

View File

@ -1,19 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.putMapping({
index: "my-index-000001",
properties: {
name: {
properties: {
last: {
type: "text",
},
},
},
},
});
console.log(response);
----

View File

@ -1,28 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "sales",
mappings: {
properties: {
tags: {
type: "keyword",
},
comments: {
type: "nested",
properties: {
username: {
type: "keyword",
},
comment: {
type: "text",
},
},
},
},
},
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ml.flushJob({
job_id: "low_request_rate",
calc_interim: true,
});
console.log(response);
----

View File

@ -0,0 +1,14 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: 'twitter',
size: '0',
q: 'extra:test',
filter_path: 'hits.total'
})
console.log(response)
----

View File

@ -1,23 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
aggs: {
JapaneseCars: {
terms: {
field: "make",
include: ["mazda", "honda"],
},
},
ActiveCarManufacturers: {
terms: {
field: "make",
exclude: ["rover", "jensen"],
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.invalidateToken({
username: "myuser",
});
console.log(response);
----

View File

@ -1,28 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
tokenizer: "keyword",
char_filter: [
{
type: "mapping",
mappings: [
"٠ => 0",
"١ => 1",
"٢ => 2",
"٣ => 3",
"٤ => 4",
"٥ => 5",
"٦ => 6",
"٧ => 7",
"٨ => 8",
"٩ => 9",
],
},
],
text: "My license plate is ٢٥٠١٥",
});
console.log(response);
----

View File

@ -0,0 +1,43 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
body: {
aggs: {
countries: {
terms: {
field: 'artist.country',
order: [
{
'rock>playback_stats.avg': 'desc'
},
{
_count: 'desc'
}
]
},
aggs: {
rock: {
filter: {
term: {
genre: 'rock'
}
},
aggs: {
playback_stats: {
stats: {
field: 'play_count'
}
}
}
}
}
}
}
}
})
console.log(response)
----

View File

@ -1,21 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
mappings: {
properties: {
full_name: {
type: "text",
index_prefixes: {
min_chars: 1,
max_chars: 10,
},
},
},
},
});
console.log(response);
----

View File

@ -1,16 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index-000001",
routing: "user1,user2",
query: {
match: {
title: "document",
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-data-stream",
});
console.log(response);
----

View File

@ -1,24 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "pipelineB",
description: "outer pipeline",
processors: [
{
pipeline: {
name: "pipelineA",
},
},
{
set: {
field: "outer_pipeline_set",
value: "outer",
},
},
],
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getTemplate({
name: "template_1",
});
console.log(response);
----

View File

@ -1,38 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
mappings: {
properties: {
my_wildcard: {
type: "wildcard",
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "my-index-000001",
id: 1,
document: {
my_wildcard: "This string can be quite lengthy",
},
});
console.log(response1);
const response2 = await client.search({
index: "my-index-000001",
query: {
wildcard: {
my_wildcard: {
value: "*quite*lengthy",
},
},
},
});
console.log(response2);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.get({
index: "my-index-000001",
id: 0,
_source: "*.id",
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.refresh({
index: "my-index-000001,my-index-000002",
});
console.log(response);
----

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
body: {
aggs: {
genres: {
terms: {
script: {
source: "doc['genre'].value",
lang: 'painless'
}
}
}
}
}
})
console.log(response)
----

View File

@ -1,26 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.index({
index: "example",
document: {
location: {
type: "Polygon",
orientation: "LEFT",
coordinates: [
[
[-177, 10],
[176, 15],
[172, 0],
[176, -15],
[-177, -10],
[-177, 10],
],
],
},
},
});
console.log(response);
----

View File

@ -1,36 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.index({
index: "my-index-000001",
id: 5,
refresh: "true",
document: {
query: {
bool: {
should: [
{
match: {
message: {
query: "Japanese art",
_name: "query1",
},
},
},
{
match: {
message: {
query: "Holand culture",
_name: "query2",
},
},
},
],
},
},
},
});
console.log(response);
----

View File

@ -1,38 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "example-index",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
term: {
text: "blue shoes sale",
},
},
},
},
{
standard: {
query: {
sparse_vector: {
field: "ml.tokens",
inference_id: "my_elser_model",
query: "What blue shoes are on sale?",
},
},
},
},
],
rank_window_size: 50,
rank_constant: 20,
},
},
});
console.log(response);
----

View File

@ -1,29 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
settings: {
analysis: {
analyzer: {
my_custom_analyzer: {
type: "custom",
tokenizer: "standard",
char_filter: ["html_strip"],
filter: ["lowercase", "asciifolding"],
},
},
},
},
});
console.log(response);
const response1 = await client.indices.analyze({
index: "my-index-000001",
analyzer: "my_custom_analyzer",
text: "Is this déjà vu</b>?",
});
console.log(response1);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
analyzer: "whitespace",
text: "The quick brown fox.",
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.getServiceAccounts();
console.log(response);
----

View File

@ -1,14 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.snapshot.restore({
repository: "my_repository",
snapshot: "my_snapshot_2099.05.06",
indices: "my-index,logs-my_app-default",
rename_pattern: "(.+)",
rename_replacement: "restored-$1",
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.sql.query({
format: "txt",
query: "SELECT * FROM library ORDER BY page_count DESC LIMIT 5",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.connector.updateApiKeyId({
connector_id: "my-connector",
api_key_id: "my-api-key-id",
api_key_secret_id: "my-connector-secret-id",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.eql.search({
index: "my-data-stream",
query: '\n process where process.name == "regsvr32.exe"\n ',
size: 50,
});
console.log(response);
----

View File

@ -1,52 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
multi_match: {
query: "How is the weather in Jamaica?",
fields: ["title", "description"],
},
},
},
},
{
standard: {
query: {
text_expansion: {
"ml.inference.title_expanded.predicted_value": {
model_id: ".elser_model_2",
model_text: "How is the weather in Jamaica?",
},
},
},
},
},
{
standard: {
query: {
text_expansion: {
"ml.inference.description_expanded.predicted_value": {
model_id: ".elser_model_2",
model_text: "How is the weather in Jamaica?",
},
},
},
},
},
],
window_size: 10,
rank_constant: 20,
},
},
});
console.log(response);
----

View File

@ -1,27 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "PUT",
path: "/_internal/desired_nodes/&lt;history_id&gt;/&lt;version&gt;",
body: {
nodes: [
{
settings: {
"node.name": "instance-000187",
"node.external_id": "instance-000187",
"node.roles": ["data_hot", "master"],
"node.attr.data": "hot",
"node.attr.logical_availability_zone": "zone-0",
},
processors: 8,
memory: "58gb",
storage: "2tb",
},
],
},
});
console.log(response);
----

View File

@ -4,14 +4,20 @@
[source, js] [source, js]
---- ----
const response = await client.search({ const response = await client.search({
query: { body: {
multi_match: { query: {
query: "Will Smith", multi_match: {
type: "cross_fields", query: 'Will Smith',
fields: ["first_name", "last_name"], type: 'cross_fields',
operator: "and", fields: [
}, 'first_name',
}, 'last_name'
}); ],
console.log(response); operator: 'and'
}
}
}
})
console.log(response)
---- ----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transform.resetTransform({
transform_id: "ecommerce_transform",
});
console.log(response);
----

View File

@ -0,0 +1,17 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.create({
index: 'twitter',
id: '1',
body: {
user: 'kimchy',
post_date: '2009-11-15T14:12:12',
message: 'trying out Elasticsearch'
}
})
console.log(response)
----

View File

@ -1,31 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "set_os",
description: "sets the value of host.os.name from the field os",
processors: [
{
set: {
field: "host.os.name",
value: "{{{os}}}",
},
},
],
});
console.log(response);
const response1 = await client.ingest.simulate({
id: "set_os",
docs: [
{
_source: {
os: "Ubuntu",
},
},
],
});
console.log(response1);
----

View File

@ -1,27 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "my-pipeline-id",
description: "My optional pipeline description",
processors: [
{
set: {
description: "My optional processor description",
field: "my-keyword-field",
value: "foo",
},
},
],
_meta: {
reason: "set my-keyword-field to foo",
serialization: {
class: "MyPipeline",
id: 10,
},
},
});
console.log(response);
----

Some files were not shown because too many files have changed in this diff Show More