Compare commits
21 Commits
main
...
backport-2
| Author | SHA1 | Date | |
|---|---|---|---|
| ca3b25c929 | |||
| 424cc94458 | |||
| 7aca5cf652 | |||
| a8927727b1 | |||
| 15a450eba4 | |||
| 8f028a522a | |||
| 411f379006 | |||
| 242b4227ee | |||
| 78332da539 | |||
| 62b2d78b15 | |||
| 599d7e6e07 | |||
| 6116909a9b | |||
| f609271107 | |||
| b4eb8e5441 | |||
| ccf9fcbd93 | |||
| 0c6f323745 | |||
| 04a9eb462d | |||
| 05eaa9fc61 | |||
| 30c6793383 | |||
| 6df75b6a92 | |||
| 383206ef19 |
@ -1,14 +1,16 @@
|
||||
ARG NODE_VERSION=${NODE_VERSION:-20}
|
||||
ARG NODE_VERSION=${NODE_VERSION:-18}
|
||||
FROM node:$NODE_VERSION
|
||||
|
||||
# Install required tools
|
||||
RUN apt-get clean -y && \
|
||||
apt-get -qy update && \
|
||||
apt-get -y install zip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
apt-get -qy update && \
|
||||
apt-get -y install zip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
RUN npm install --production=false
|
||||
|
||||
COPY . .
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
ARG NODE_JS_VERSION=${NODE_JS_VERSION:-20}
|
||||
ARG NODE_JS_VERSION=${NODE_JS_VERSION:-18}
|
||||
FROM node:${NODE_JS_VERSION}
|
||||
|
||||
ARG BUILDER_UID=1000
|
||||
@ -24,4 +24,7 @@ USER ${BUILDER_UID}:${BUILDER_GID}
|
||||
|
||||
# install dependencies
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
RUN npm install --production=false
|
||||
|
||||
# copy project files
|
||||
COPY . .
|
||||
|
||||
@ -1,6 +1,20 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and contributors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* global $ argv */
|
||||
@ -109,7 +123,7 @@ async function codegen (args) {
|
||||
await $`rm -rf ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mkdir ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.md')} ${join(import.meta.url, '..', 'docs', 'reference', 'api-reference.md')}`
|
||||
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
|
||||
await $`npm run build`
|
||||
|
||||
// run docs example generation
|
||||
|
||||
@ -1,31 +1,31 @@
|
||||
---
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
memory: "8G"
|
||||
cpu: "2"
|
||||
|
||||
steps:
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }})"
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
env:
|
||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||
TEST_SUITE: "platinum"
|
||||
STACK_VERSION: 9.0.0
|
||||
GITHUB_TOKEN_PATH: "secret/ci/elastic-elasticsearch-js/github-token"
|
||||
TEST_ES_STACK: "1"
|
||||
TEST_SUITE: "{{ matrix.suite }}"
|
||||
STACK_VERSION: 8.15.0
|
||||
matrix:
|
||||
setup:
|
||||
suite:
|
||||
- "free"
|
||||
- "platinum"
|
||||
nodejs:
|
||||
- "18"
|
||||
- "20"
|
||||
- "22"
|
||||
- "23"
|
||||
command: ./.buildkite/run-tests.sh
|
||||
artifact_paths: "./junit-output/junit-*.xml"
|
||||
- wait: ~
|
||||
continue_on_failure: true
|
||||
- label: ":junit: Test results"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
plugins:
|
||||
- junit-annotate#v2.7.0:
|
||||
- junit-annotate#v2.4.1:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
job-uuid-file-pattern: "junit-(.*).xml"
|
||||
fail-build-on-error: true
|
||||
|
||||
@ -6,33 +6,26 @@ script_path=$(dirname "$(realpath -s "$0")")
|
||||
set -euo pipefail
|
||||
repo=$(pwd)
|
||||
|
||||
export NODE_VERSION=${NODE_VERSION:-20}
|
||||
export NODE_VERSION=${NODE_VERSION:-18}
|
||||
|
||||
echo "--- :javascript: Building Docker image"
|
||||
docker build \
|
||||
--file "$script_path/Dockerfile" \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_VERSION="$NODE_VERSION" \
|
||||
.
|
||||
--file "$script_path/Dockerfile" \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_VERSION="$NODE_VERSION" \
|
||||
.
|
||||
|
||||
GITHUB_TOKEN=$(vault read -field=token "$GITHUB_TOKEN_PATH")
|
||||
export GITHUB_TOKEN
|
||||
|
||||
echo "--- :javascript: Running tests"
|
||||
echo "--- :javascript: Running $TEST_SUITE tests"
|
||||
mkdir -p "$repo/junit-output"
|
||||
docker run \
|
||||
--network="${network_name}" \
|
||||
--env TEST_ES_STACK \
|
||||
--env STACK_VERSION \
|
||||
--env GITHUB_TOKEN \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "ELASTIC_PASSWORD=${elastic_password}" \
|
||||
--env "ELASTIC_USER=elastic" \
|
||||
--env "BUILDKITE=true" \
|
||||
--volume "/usr/src/app/node_modules" \
|
||||
--volume "$repo:/usr/src/app" \
|
||||
--volume "$repo/junit-output:/junit-output" \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
bash -c "npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
--network="${network_name}" \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "ELASTIC_PASSWORD=${elastic_password}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--env "ELASTIC_USER=elastic" \
|
||||
--env "BUILDKITE=true" \
|
||||
--volume "$repo/junit-output:/junit-output" \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
|
||||
@ -5,7 +5,3 @@ elasticsearch
|
||||
.git
|
||||
lib
|
||||
junit-output
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
|
||||
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@ -1,3 +0,0 @@
|
||||
package.json @joshmock
|
||||
renovate.json @joshmock
|
||||
catalog-info.yaml @joshmock
|
||||
50
.github/ISSUE_TEMPLATE/bug.md
vendored
Normal file
50
.github/ISSUE_TEMPLATE/bug.md
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
---
|
||||
name: 🐛 Bug report
|
||||
about: Create a report to help us improve
|
||||
labels: ["Category: Bug"]
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
## 🐛 Bug Report
|
||||
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
## To Reproduce
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
Paste your code here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
<!--
|
||||
In some cases, it might be challenging to reproduce the bug in a few lines of code.
|
||||
You can fork the following repository, which contains all the configuration needed
|
||||
to spin up a three nodes Elasticsearch cluster with security enabled.
|
||||
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
|
||||
https://github.com/delvedor/es-reproduce-issue
|
||||
--->
|
||||
|
||||
## Expected behavior
|
||||
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
Paste the results here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
## Your Environment
|
||||
|
||||
- *node version*: 6,8,10
|
||||
- `@elastic/elasticsearch` *version*: >=7.0.0
|
||||
- *os*: Mac, Windows, Linux
|
||||
- *any other relevant information*
|
||||
66
.github/ISSUE_TEMPLATE/bug.yaml
vendored
66
.github/ISSUE_TEMPLATE/bug.yaml
vendored
@ -1,66 +0,0 @@
|
||||
---
|
||||
name: 🐛 Bug report
|
||||
description: Create a report to help us improve
|
||||
labels: ["Category: Bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
- type: textarea
|
||||
id: bug-report
|
||||
attributes:
|
||||
label: 🐛 Bug report
|
||||
description: A clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: To reproduce
|
||||
description: Steps to reproduce the behavior
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: node-js-version
|
||||
attributes:
|
||||
label: Node.js version
|
||||
placeholder: 20.x, 22.x, etc.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: client-version
|
||||
attributes:
|
||||
label: "@elastic/elasticsearch version"
|
||||
placeholder: 7.17.0, 8.14.1, etc.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
placeholder: Ubuntu 22.04, macOS, etc.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: env-info
|
||||
attributes:
|
||||
label: Any other relevant environment information
|
||||
23
.github/ISSUE_TEMPLATE/feature.md
vendored
Normal file
23
.github/ISSUE_TEMPLATE/feature.md
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
---
|
||||
name: 🚀 Feature Proposal
|
||||
about: Submit a proposal for a new feature
|
||||
labels: ["Category: Feature"]
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
## 🚀 Feature Proposal
|
||||
|
||||
A clear and concise description of what the feature is.
|
||||
|
||||
## Motivation
|
||||
|
||||
Please outline the motivation for the proposal.
|
||||
|
||||
## Example
|
||||
|
||||
Please provide an example for how this feature would be used.
|
||||
33
.github/ISSUE_TEMPLATE/feature.yaml
vendored
33
.github/ISSUE_TEMPLATE/feature.yaml
vendored
@ -1,33 +0,0 @@
|
||||
---
|
||||
name: 🚀 Feature Proposal
|
||||
description: Submit a proposal for a new feature
|
||||
labels: ["Category: Feature"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
- type: textarea
|
||||
id: feature-proposal
|
||||
attributes:
|
||||
label: 🚀 Feature Proposal
|
||||
description: A clear and concise description of what the feature is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: motivation
|
||||
attributes:
|
||||
label: Motivation
|
||||
description: Please outline the motivation for the proposal.
|
||||
|
||||
- type: textarea
|
||||
id: example
|
||||
attributes:
|
||||
label: Example
|
||||
description: Please provide an example for how this feature would be used.
|
||||
11
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
---
|
||||
name: 💬 Questions / Help
|
||||
about: If you have questions, please check our Gitter or Help repo
|
||||
labels: ["Category: Question"]
|
||||
---
|
||||
|
||||
## 💬 Questions and Help
|
||||
|
||||
### Please note that this issue tracker is not a help forum and this issue may be closed.
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
21
.github/ISSUE_TEMPLATE/question.yaml
vendored
21
.github/ISSUE_TEMPLATE/question.yaml
vendored
@ -1,21 +0,0 @@
|
||||
---
|
||||
name: 💬 Questions / Help
|
||||
description: If you have questions, please check our community forum or support
|
||||
labels: ["Category: Question"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
### Please note that this issue tracker is not a help forum and this issue may be closed.
|
||||
|
||||
Please check our [community forum](https://discuss.elastic.co/) or [contact Elastic support](https://www.elastic.co/support) if your issue is not specifically related to the documented functionality of this client library.
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: Question
|
||||
description: Your question or comment
|
||||
validations:
|
||||
required: true
|
||||
6
.github/ISSUE_TEMPLATE/security.md
vendored
Normal file
6
.github/ISSUE_TEMPLATE/security.md
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
name: 👮 Security Issue
|
||||
about: Responsible Disclosure
|
||||
---
|
||||
|
||||
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).
|
||||
8
.github/ISSUE_TEMPLATE/security.yaml
vendored
8
.github/ISSUE_TEMPLATE/security.yaml
vendored
@ -1,8 +0,0 @@
|
||||
---
|
||||
name: 👮 Security Issue
|
||||
description: Responsible disclosure
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).
|
||||
196
.github/make.sh
vendored
196
.github/make.sh
vendored
@ -37,7 +37,7 @@ product="elastic/elasticsearch-js"
|
||||
output_folder=".buildkite/output"
|
||||
codegen_folder=".buildkite/output"
|
||||
OUTPUT_DIR="$repo/${output_folder}"
|
||||
NODE_JS_VERSION=22
|
||||
NODE_JS_VERSION=18
|
||||
WORKFLOW=${WORKFLOW-staging}
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
@ -46,84 +46,84 @@ echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
|
||||
|
||||
case $CMD in
|
||||
clean)
|
||||
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
|
||||
rm -rf "$output_folder"
|
||||
echo -e "\033[32;1mdone.\033[0m"
|
||||
exit 0
|
||||
;;
|
||||
assemble)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: assemble artifact $VERSION\033[0m"
|
||||
TASK=release
|
||||
TASK_ARGS=("$VERSION" "$output_folder")
|
||||
;;
|
||||
codegen)
|
||||
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
||||
# fall back to branch name or `main` if no VERSION is set
|
||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [[ "$branch_name" =~ ^[0-9]+\.([0-9]+|x) ]]; then
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
||||
VERSION="$branch_name"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using \`main\`\033[0m"
|
||||
VERSION="main"
|
||||
fi
|
||||
fi
|
||||
if [ "$VERSION" = 'main' ]; then
|
||||
echo -e "\033[36;1mTARGET: codegen API $VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
|
||||
fi
|
||||
clean)
|
||||
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
|
||||
rm -rf "$output_folder"
|
||||
echo -e "\033[32;1mdone.\033[0m"
|
||||
exit 0
|
||||
;;
|
||||
assemble)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: assemble artifact $VERSION\033[0m"
|
||||
TASK=release
|
||||
TASK_ARGS=("$VERSION" "$output_folder")
|
||||
;;
|
||||
codegen)
|
||||
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
||||
# fall back to branch name or `main` if no VERSION is set
|
||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
||||
VERSION="$branch_name"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using \`main\`\033[0m"
|
||||
VERSION="main"
|
||||
fi
|
||||
fi
|
||||
if [ "$VERSION" = 'main' ]; then
|
||||
echo -e "\033[36;1mTARGET: codegen API $VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
|
||||
fi
|
||||
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
docsgen)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
examplesgen)
|
||||
echo -e "\033[36;1mTARGET: generate examples\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
bump)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
|
||||
TASK=bump
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
bumpmatrix)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
|
||||
TASK=bumpmatrix
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
*)
|
||||
echo -e "\n'$CMD' is not supported right now\n"
|
||||
echo -e "\nUsage:"
|
||||
echo -e "\t $0 release \$VERSION\n"
|
||||
echo -e "\t $0 bump \$VERSION"
|
||||
echo -e "\t $0 codegen \$VERSION"
|
||||
exit 1
|
||||
;;
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
docsgen)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
examplesgen)
|
||||
echo -e "\033[36;1mTARGET: generate examples\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
bump)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
|
||||
TASK=bump
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
bumpmatrix)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
|
||||
TASK=bumpmatrix
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
*)
|
||||
echo -e "\n'$CMD' is not supported right now\n"
|
||||
echo -e "\nUsage:"
|
||||
echo -e "\t $0 release \$VERSION\n"
|
||||
echo -e "\t $0 bump \$VERSION"
|
||||
echo -e "\t $0 codegen \$VERSION"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Build Container
|
||||
# ------------------------------------------------------- #
|
||||
@ -150,7 +150,7 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
--volume "$(realpath "$repo/../elastic-client-generator-js"):/usr/src/elastic-client-generator-js" \
|
||||
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
@ -159,14 +159,6 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
else
|
||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||
|
||||
# determine branch to clone
|
||||
GENERATOR_BRANCH="main"
|
||||
if [[ "$VERSION" == 8.* ]]; then
|
||||
GENERATOR_BRANCH="8.x"
|
||||
fi
|
||||
echo -e "\033[34;1mINFO: Generator branch: $GENERATOR_BRANCH"
|
||||
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
@ -176,7 +168,7 @@ else
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
@ -187,36 +179,36 @@ fi
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
if [[ "$CMD" == "assemble" ]]; then
|
||||
if compgen -G ".buildkite/output/*" >/dev/null; then
|
||||
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
if compgen -G ".buildkite/output/*" > /dev/null; then
|
||||
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "bump" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "codegen" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "docsgen" ]]; then
|
||||
echo "TODO"
|
||||
echo "TODO"
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "examplesgen" ]]; then
|
||||
echo "TODO"
|
||||
echo "TODO"
|
||||
fi
|
||||
|
||||
26
.github/stale.yml
vendored
Normal file
26
.github/stale.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 15
|
||||
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- "discussion"
|
||||
- "feature request"
|
||||
- "bug"
|
||||
- "todo"
|
||||
- "good first issue"
|
||||
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: |
|
||||
We understand that this might be important for you, but this issue has been automatically marked as stale because it has not had recent activity either from our end or yours.
|
||||
It will be closed if no further activity occurs, please write a comment if you would like to keep this going.
|
||||
|
||||
Note: in the past months we have built a new client, that has just landed in master. If you want to open an issue or a pr for the legacy client, you should do that in https://github.com/elastic/elasticsearch-js-legacy
|
||||
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
18
.github/workflows/auto-merge.yml
vendored
Normal file
18
.github/workflows/auto-merge.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
name: Automerge
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
types:
|
||||
- submitted
|
||||
|
||||
jobs:
|
||||
automerge:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.review.state == 'approved'
|
||||
steps:
|
||||
- uses: reitermarkus/automerge@v2
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
merge-method: squash
|
||||
pull-request-author-associations: OWNER
|
||||
review-author-associations: OWNER,CONTRIBUTOR
|
||||
19
.github/workflows/docs-build.yml
vendored
19
.github/workflows/docs-build.yml
vendored
@ -1,19 +0,0 @@
|
||||
name: docs-build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target: ~
|
||||
merge_group: ~
|
||||
|
||||
jobs:
|
||||
docs-preview:
|
||||
uses: elastic/docs-builder/.github/workflows/preview-build.yml@main
|
||||
with:
|
||||
path-pattern: docs/**
|
||||
permissions:
|
||||
deployments: write
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
14
.github/workflows/docs-cleanup.yml
vendored
14
.github/workflows/docs-cleanup.yml
vendored
@ -1,14 +0,0 @@
|
||||
name: docs-cleanup
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
docs-preview:
|
||||
uses: elastic/docs-builder/.github/workflows/preview-cleanup.yml@main
|
||||
permissions:
|
||||
contents: none
|
||||
id-token: write
|
||||
deployments: write
|
||||
69
.github/workflows/nodejs.yml
vendored
69
.github/workflows/nodejs.yml
vendored
@ -11,10 +11,8 @@ jobs:
|
||||
outputs:
|
||||
src-only: "${{ steps.changes.outputs.src-only }}"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dorny/paths-filter/@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter/@v2.11.1
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
@ -32,16 +30,14 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x, 22.x, 23.x]
|
||||
node-version: [18.x, 20.x, 22.x]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
@ -57,21 +53,15 @@ jobs:
|
||||
run: |
|
||||
npm run test:unit
|
||||
|
||||
- name: ECMAScript module test
|
||||
run: |
|
||||
npm run test:esm
|
||||
|
||||
license:
|
||||
name: License check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22.x
|
||||
|
||||
@ -83,41 +73,12 @@ jobs:
|
||||
run: |
|
||||
npm run license-checker
|
||||
|
||||
- name: SPDX header check
|
||||
run: npm run license-header
|
||||
|
||||
test-bun:
|
||||
name: Test Bun
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: paths-filter
|
||||
# only run if code relevant to unit tests was changed
|
||||
if: needs.paths-filter.outputs.src-only == 'true'
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
auto-approve:
|
||||
name: Auto-approve
|
||||
needs: [test, license]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
if: github.actor == 'elasticmachine'
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Bun
|
||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
bun install
|
||||
|
||||
- name: Lint
|
||||
run: |
|
||||
bun run lint
|
||||
|
||||
- name: Unit test
|
||||
run: |
|
||||
bun run test:unit-bun
|
||||
|
||||
- name: ECMAScript module test
|
||||
run: |
|
||||
bun run test:esm
|
||||
- uses: hmarr/auto-approve-action@v4
|
||||
|
||||
82
.github/workflows/npm-publish-unstable.yml
vendored
82
.github/workflows/npm-publish-unstable.yml
vendored
@ -1,82 +0,0 @@
|
||||
---
|
||||
name: Publish unstable builds to npm
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
# kill in-progress action if another one is triggered
|
||||
concurrency:
|
||||
group: publish-unstable
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# don't publish if source code has not changed
|
||||
paths-filter:
|
||||
name: Detect files changed
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
src: "${{ steps.changes.outputs.src }}"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dorny/paths-filter/@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
src:
|
||||
- 'src/**'
|
||||
- 'package.json'
|
||||
- 'tsconfig.json'
|
||||
- 'index.d.ts'
|
||||
- 'index.js'
|
||||
|
||||
test:
|
||||
name: Run tests and publish unstable
|
||||
if: ${{ needs.paths-filter.outputs.src == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# pause for 30 minutes to avoid publishing more than 2x per hour
|
||||
- name: Debounce 30 minutes
|
||||
uses: zachary95/github-actions-debounce@ab7363483e2837992b8aa6be891763da00ac14f9 # v0.1.0
|
||||
with:
|
||||
wait: 1800
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: main
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "22.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
npm install -g npm
|
||||
npm install
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
|
||||
# if tests pass, publish unstable
|
||||
publish:
|
||||
name: Publish unstable
|
||||
needs: test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: npm publish
|
||||
run: |
|
||||
# set unstable version value
|
||||
unstable_tag=$(echo "unstable.$(date --utc +%Y%m%d%H%M%S)")
|
||||
latest=$(npm view @elastic/elasticsearch --json | jq -r '.["dist-tags"].latest')
|
||||
next=$(yes | npx semver -i minor "$latest")
|
||||
unstable_version=$(echo "$next-$unstable_tag")
|
||||
|
||||
# overwrite package.json with unstable version value
|
||||
mv package.json package.json.bak
|
||||
jq --arg v "$unstable_version" ".version = $v" package.json.bak > package.json
|
||||
rm package.json.bak
|
||||
|
||||
# publish to npm
|
||||
npm publish --provenance --access public --tag "unstable"
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
56
.github/workflows/npm-publish.yml
vendored
56
.github/workflows/npm-publish.yml
vendored
@ -9,63 +9,29 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "22.x"
|
||||
node-version: "20.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- run: npm install -g npm
|
||||
- run: npm install
|
||||
- run: npm test
|
||||
- name: npm publish
|
||||
run: |
|
||||
version=$(jq -r .version package.json)
|
||||
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
||||
# if no meta info on the version (e.g. a '-alpha.1' prefix), publish as a stable release
|
||||
if [[ -z "$tag_meta" ]]; then
|
||||
# get latest version on npm
|
||||
latest=$(npm view @elastic/elasticsearch --json | jq -r '.["dist-tags"].latest')
|
||||
|
||||
# if $version is higher than the most recently published version, publish as-is
|
||||
if [[ $(yes | npx semver "$version" "$latest" | tail -n1) == "$version" ]]; then
|
||||
npm publish --provenance --access public
|
||||
else
|
||||
# otherwise, publish with "previous" tag
|
||||
npm publish --provenance --access public --tag "previous"
|
||||
fi
|
||||
else
|
||||
# publish as a non-stable release using the meta name (e.g. 'alpha') as the tag
|
||||
tag=$(echo "$tag_meta" | cut -d '.' -f1)
|
||||
npm publish --provenance --access public --tag "$tag"
|
||||
fi
|
||||
- run: npm publish --provenance --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Publish version on GitHub
|
||||
run: |
|
||||
- run: |
|
||||
version=$(jq -r .version package.json)
|
||||
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
||||
if [[ -z "$tag_meta" ]]; then
|
||||
gh release create \
|
||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)"
|
||||
--target "$BRANCH_NAME" \
|
||||
--title "v$version" \
|
||||
"v$version"
|
||||
else
|
||||
tag_main=$(echo "$version" | cut -d '-' -f1)
|
||||
gh release create \
|
||||
-n "This is a $tag_main pre-release. Changes may not be stable." \
|
||||
--latest=false \
|
||||
--prerelease \
|
||||
--target "$BRANCH_NAME" \
|
||||
--title "v$version" \
|
||||
"v$version"
|
||||
fi
|
||||
gh release create \
|
||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
|
||||
--target "$BRANCH_NAME" \
|
||||
-t "v$version" \
|
||||
"v$version"
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
43
.github/workflows/serverless-patch.sh
vendored
Executable file
43
.github/workflows/serverless-patch.sh
vendored
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -exuo pipefail
|
||||
|
||||
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
|
||||
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
|
||||
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
|
||||
|
||||
# generate patch file
|
||||
cd "$GITHUB_WORKSPACE/stack"
|
||||
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
|
||||
|
||||
# set committer info
|
||||
git config --global user.email "elasticmachine@users.noreply.github.com"
|
||||
git config --global user.name "Elastic Machine"
|
||||
|
||||
# apply patch file
|
||||
cd "$GITHUB_WORKSPACE/serverless"
|
||||
git am -C1 --reject /tmp/patch.diff || git am --quit
|
||||
|
||||
# generate PR body comment
|
||||
comment="Patch applied from $pr_shortcode"
|
||||
|
||||
# enumerate rejected patches in PR comment
|
||||
has_rejects='false'
|
||||
for f in ./**/*.rej; do
|
||||
has_rejects='true'
|
||||
comment="$comment
|
||||
|
||||
## Rejected patch \`$f\` must be resolved:
|
||||
|
||||
\`\`\`diff
|
||||
$(cat "$f")
|
||||
\`\`\`
|
||||
"
|
||||
done
|
||||
|
||||
# delete .rej files
|
||||
rm -fv ./**/*.rej
|
||||
|
||||
# send data to output parameters
|
||||
echo "$comment" > /tmp/pr_body
|
||||
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"
|
||||
51
.github/workflows/serverless-patch.yml
vendored
Normal file
51
.github/workflows/serverless-patch.yml
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
---
|
||||
name: Apply PR changes to serverless
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- closed
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
apply-patch:
|
||||
name: Apply patch
|
||||
runs-on: ubuntu-latest
|
||||
# Only react to merged PRs for security reasons.
|
||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||
if: >
|
||||
github.event.pull_request.merged
|
||||
&& (
|
||||
(
|
||||
github.event.action == 'closed'
|
||||
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
|
||||
)
|
||||
||
|
||||
(
|
||||
github.event.action == 'labeled'
|
||||
&& github.event.label.name == 'apply-to-serverless'
|
||||
)
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: elastic/elasticsearch-js
|
||||
ref: main
|
||||
path: stack
|
||||
fetch-depth: 0
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: elastic/elasticsearch-serverless-js
|
||||
ref: main
|
||||
path: serverless
|
||||
- name: Apply patch from stack to serverless
|
||||
id: apply-patch
|
||||
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: serverless
|
||||
title: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
||||
commit-message: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
||||
body-path: /tmp/pr_body
|
||||
draft: '${{ steps.apply-patch.outputs.PR_DRAFT }}'
|
||||
add-paths: ':!*.rej'
|
||||
12
.github/workflows/stale.yml
vendored
12
.github/workflows/stale.yml
vendored
@ -1,21 +1,21 @@
|
||||
---
|
||||
name: "Close stale issues and PRs"
|
||||
name: 'Close stale issues and PRs'
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
- cron: '30 1 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
days-before-stale: 90
|
||||
days-before-close: 14
|
||||
exempt-issue-labels: "good first issue,tracking"
|
||||
exempt-issue-labels: 'good first issue'
|
||||
close-issue-label: closed-stale
|
||||
close-pr-label: closed-stale
|
||||
stale-issue-message: "This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||
stale-pr-message: "This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@ -64,11 +64,3 @@ test/bundlers/parcel-test/.parcel-cache
|
||||
|
||||
lib
|
||||
junit-output
|
||||
bun.lockb
|
||||
test-results
|
||||
processinfo
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
schema
|
||||
|
||||
@ -72,8 +72,3 @@ CODE_OF_CONDUCT.md
|
||||
CONTRIBUTING.md
|
||||
|
||||
src
|
||||
bun.lockb
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
|
||||
90
README.md
90
README.md
@ -2,7 +2,7 @@
|
||||
|
||||
# Elasticsearch Node.js client
|
||||
|
||||
[](http://standardjs.com/) [](https://buildkite.com/elastic/elasticsearch-javascript-client-integration-tests/builds?branch=main) [](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml) [](https://codecov.io/gh/elastic/elasticsearch-js) [](https://www.npmjs.com/package/@elastic/elasticsearch)
|
||||
[](http://standardjs.com/) [](https://buildkite.com/elastic/elasticsearch-javascript-client-integration-tests/builds?branch=main) [](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml) [](https://codecov.io/gh/elastic/elasticsearch-js) [](https://www.npmjs.com/package/@elastic/elasticsearch)
|
||||
|
||||
**[Download the latest version of Elasticsearch](https://www.elastic.co/downloads/elasticsearch)**
|
||||
or
|
||||
@ -34,26 +34,25 @@ the new features of the 8.13 version of Elasticsearch, the 8.13 client version
|
||||
is required for that. Elasticsearch language clients are only backwards
|
||||
compatible with default distributions and without guarantees made.
|
||||
|
||||
| Elasticsearch Version | Elasticsearch-JS Branch |
|
||||
| --------------------- | ----------------------- |
|
||||
| main | main |
|
||||
| 9.x | 9.x |
|
||||
| 8.x | 8.x |
|
||||
| 7.x | 7.x |
|
||||
| Elasticsearch Version | Elasticsearch-JS Branch | Supported |
|
||||
| --------------------- | ------------------------ | --------- |
|
||||
| main | main | |
|
||||
| 8.x | 8.x | 8.x |
|
||||
| 7.x | 7.x | 7.17 |
|
||||
|
||||
## Usage
|
||||
|
||||
- [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index)
|
||||
- [Indexing a document](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_indexing_documents)
|
||||
- [Getting documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_getting_documents)
|
||||
- [Searching documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_searching_documents)
|
||||
- [Updating documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_updating_documents)
|
||||
- [Deleting documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_deleting_documents)
|
||||
- [Deleting an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_deleting_an_index)
|
||||
* [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index)
|
||||
* [Indexing a document](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_indexing_documents)
|
||||
* [Getting documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_getting_documents)
|
||||
* [Searching documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_searching_documents)
|
||||
* [Updating documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_updating_documents)
|
||||
* [Deleting documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_deleting_documents)
|
||||
* [Deleting an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_deleting_an_index)
|
||||
|
||||
### Node.js support
|
||||
|
||||
NOTE: The minimum supported version of Node.js is `v20`.
|
||||
NOTE: The minimum supported version of Node.js is `v18`.
|
||||
|
||||
The client versioning follows the Elastic Stack versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
@ -66,43 +65,58 @@ to support that version for at least another minor release. If you are using the
|
||||
with a version of Node.js that will be unsupported soon, you will see a warning
|
||||
in your logs (the client will start logging the warning with two minors in advance).
|
||||
|
||||
Unless you are **always** using a supported version of Node.js,
|
||||
Unless you are **always** using a supported version of Node.js,
|
||||
we recommend defining the client dependency in your
|
||||
`package.json` with the `~` instead of `^`. In this way, you will lock the
|
||||
dependency on the minor release and not the major. (for example, `~7.10.0` instead
|
||||
of `^7.10.0`).
|
||||
|
||||
| Node.js Version | Node.js EOL date | End of support |
|
||||
| --------------- | ---------------- | ------------------- |
|
||||
| `8.x` | `December 2019` | `7.11` (early 2021) |
|
||||
| `10.x` | `April 2021` | `7.12` (mid 2021) |
|
||||
| `12.x` | `April 2022` | `8.2` (early 2022) |
|
||||
| `14.x` | `April 2023` | `8.8` (early 2023) |
|
||||
| `16.x` | `September 2023` | `8.11` (late 2023) |
|
||||
| `18.x` | `April 2025` | `9.1` (mid 2025) |
|
||||
| Node.js Version | Node.js EOL date | End of support |
|
||||
| --------------- |------------------| ---------------------- |
|
||||
| `8.x` | `December 2019` | `7.11` (early 2021) |
|
||||
| `10.x` | `April 2021` | `7.12` (mid 2021) |
|
||||
| `12.x` | `April 2022` | `8.2` (early 2022) |
|
||||
| `14.x` | `April 2023` | `8.8` (early 2023) |
|
||||
| `16.x` | `September 2023` | `8.11` (late 2023) |
|
||||
|
||||
### Compatibility
|
||||
|
||||
Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch.
|
||||
Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made.
|
||||
|
||||
| Elasticsearch Version | Client Version |
|
||||
| --------------------- |----------------|
|
||||
| `8.x` | `8.x` |
|
||||
| `7.x` | `7.x` |
|
||||
| `6.x` | `6.x` |
|
||||
| `5.x` | `5.x` |
|
||||
|
||||
To install a specific major of the client, run the following command:
|
||||
```
|
||||
npm install @elastic/elasticsearch@<major>
|
||||
```
|
||||
|
||||
#### Browser
|
||||
|
||||
> [!WARNING]
|
||||
> There is no official support for the browser environment. It exposes your Elasticsearch instance to everyone, which could lead to security issues.
|
||||
> We recommend that you write a lightweight proxy that uses this client instead, you can see a proxy example [here](./docs/examples/proxy).
|
||||
We recommend that you write a lightweight proxy that uses this client instead, you can see a proxy example [here](./docs/examples/proxy).
|
||||
|
||||
## Documentation
|
||||
|
||||
- [Introduction](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/introduction.html)
|
||||
- [Usage](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#client-usage)
|
||||
- [Client configuration](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-configuration.html)
|
||||
- [API reference](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/api-reference.html)
|
||||
- [Authentication](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#authentication)
|
||||
- [Observability](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html)
|
||||
- [Creating a child client](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/child.html)
|
||||
- [Client helpers](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html)
|
||||
- [Typescript support](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/typescript.html)
|
||||
- [Testing](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-testing.html)
|
||||
- [Examples](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/examples.html)
|
||||
* [Introduction](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/introduction.html)
|
||||
* [Usage](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#client-usage)
|
||||
* [Client configuration](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-configuration.html)
|
||||
* [API reference](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/api-reference.html)
|
||||
* [Authentication](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#authentication)
|
||||
* [Observability](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html)
|
||||
* [Creating a child client](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/child.html)
|
||||
* [Client helpers](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html)
|
||||
* [Typescript support](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/typescript.html)
|
||||
* [Testing](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-testing.html)
|
||||
* [Examples](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/examples.html)
|
||||
|
||||
## Install multiple versions
|
||||
|
||||
If you are using multiple versions of Elasticsearch, you need to use multiple versions of the client. In the past, install multiple versions of the same package was not possible, but with `npm v6.9`, you can do that via aliasing.
|
||||
|
||||
The command you must run to install different version of the client is:
|
||||
@ -147,7 +161,7 @@ client7.info().then(console.log, console.log)
|
||||
```
|
||||
|
||||
Finally, if you want to install the client for the next version of Elasticsearch
|
||||
_(the one that lives in Elasticsearch’s main branch)_, you can use the following
|
||||
*(the one that lives in Elasticsearch’s main branch)*, you can use the following
|
||||
command:
|
||||
|
||||
```sh
|
||||
|
||||
@ -28,16 +28,20 @@ spec:
|
||||
spec:
|
||||
repository: elastic/elasticsearch-js
|
||||
pipeline_file: .buildkite/pipeline.yml
|
||||
env:
|
||||
ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true"
|
||||
SLACK_NOTIFICATIONS_CHANNEL: "#devtools-notify-javascript"
|
||||
teams:
|
||||
devtools-team:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
everyone:
|
||||
access_level: READ_ONLY
|
||||
provider_settings:
|
||||
build_pull_requests: true
|
||||
build_pull_requests: false
|
||||
build_branches: false
|
||||
separate_pull_request_statuses: true
|
||||
cancel_intermediate_builds: true
|
||||
cancel_intermediate_builds_branch_filter: "!main"
|
||||
schedules:
|
||||
main:
|
||||
branch: "main"
|
||||
cronline: "@daily"
|
||||
8_14:
|
||||
branch: "8.14"
|
||||
cronline: "@daily"
|
||||
|
||||
@ -1,27 +1,25 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/advanced-config.html
|
||||
---
|
||||
[[advanced-config]]
|
||||
=== Advanced configuration
|
||||
|
||||
# Advanced configuration [advanced-config]
|
||||
|
||||
If you need to customize the client behavior heavily, you are in the right place! The client enables you to customize the following internals:
|
||||
If you need to customize the client behavior heavily, you are in the right
|
||||
place! The client enables you to customize the following internals:
|
||||
|
||||
* `ConnectionPool` class
|
||||
* `Connection` class
|
||||
* `Serializer` class
|
||||
|
||||
::::{note}
|
||||
For information about the `Transport` class, refer to [Transport](/reference/transport.md).
|
||||
::::
|
||||
NOTE: For information about the `Transport` class, refer to <<transport>>.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== `ConnectionPool`
|
||||
|
||||
## `ConnectionPool` [_connectionpool]
|
||||
This class is responsible for keeping in memory all the {es} Connection that you
|
||||
are using. There is a single Connection for every node. The connection pool
|
||||
handles the resurrection strategies and the updates of the pool.
|
||||
|
||||
This class is responsible for keeping in memory all the {{es}} Connection that you are using. There is a single Connection for every node. The connection pool handles the resurrection strategies and the updates of the pool.
|
||||
|
||||
```js
|
||||
[source,js]
|
||||
----
|
||||
const { Client, ConnectionPool } = require('@elastic/elasticsearch')
|
||||
|
||||
class MyConnectionPool extends ConnectionPool {
|
||||
@ -36,14 +34,19 @@ const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
```
|
||||
----
|
||||
|
||||
|
||||
## `Connection` [_connection]
|
||||
[discrete]
|
||||
==== `Connection`
|
||||
|
||||
This class represents a single node, it holds every information we have on the node, such as roles, id, URL, custom headers and so on. The actual HTTP request is performed here, this means that if you want to swap the default HTTP client (Node.js core), you should override the `request` method of this class.
|
||||
This class represents a single node, it holds every information we have on the
|
||||
node, such as roles, id, URL, custom headers and so on. The actual HTTP request
|
||||
is performed here, this means that if you want to swap the default HTTP client
|
||||
(Node.js core), you should override the `request` method of this class.
|
||||
|
||||
```js
|
||||
[source,js]
|
||||
----
|
||||
const { Client, BaseConnection } = require('@elastic/elasticsearch')
|
||||
|
||||
class MyConnection extends BaseConnection {
|
||||
@ -57,19 +60,22 @@ const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
```
|
||||
----
|
||||
|
||||
|
||||
## `Serializer` [_serializer]
|
||||
[discrete]
|
||||
==== `Serializer`
|
||||
|
||||
This class is responsible for the serialization of every request, it offers the following methods:
|
||||
This class is responsible for the serialization of every request, it offers the
|
||||
following methods:
|
||||
|
||||
* `serialize(object: any): string;` serializes request objects.
|
||||
* `deserialize(json: string): any;` deserializes response strings.
|
||||
* `ndserialize(array: any[]): string;` serializes bulk request objects.
|
||||
* `qserialize(object: any): string;` serializes request query parameters.
|
||||
|
||||
```js
|
||||
[source,js]
|
||||
----
|
||||
const { Client, Serializer } = require('@elastic/elasticsearch')
|
||||
|
||||
class MySerializer extends Serializer {
|
||||
@ -83,10 +89,11 @@ const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
```
|
||||
----
|
||||
|
||||
|
||||
## Redaction of potentially sensitive data [redaction]
|
||||
[discrete]
|
||||
[[redaction]]
|
||||
==== Redaction of potentially sensitive data
|
||||
|
||||
When the client raises an `Error` that originated at the HTTP layer, like a `ConnectionError` or `TimeoutError`, a `meta` object is often attached to the error object that includes metadata useful for debugging, like request and response information. Because this can include potentially sensitive data, like authentication secrets in an `Authorization` header, the client takes measures to redact common sources of sensitive data when this metadata is attached and serialized.
|
||||
|
||||
@ -94,7 +101,8 @@ If your configuration requires extra headers or other configurations that may in
|
||||
|
||||
By default, the `redaction` option is set to `{ type: 'replace' }`, which recursively searches for sensitive key names, case insensitive, and replaces their values with the string `[redacted]`.
|
||||
|
||||
```js
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -107,11 +115,12 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers.authorization) // prints "[redacted]"
|
||||
}
|
||||
```
|
||||
----
|
||||
|
||||
If you would like to redact additional properties, you can include additional key names to search and replace:
|
||||
|
||||
```js
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -129,11 +138,12 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers['X-My-Secret-Password']) // prints "[redacted]"
|
||||
}
|
||||
```
|
||||
----
|
||||
|
||||
Alternatively, if you know you’re not going to use the metadata at all, setting the redaction type to `remove` will remove all optional sources of potentially sensitive data entirely, or replacing them with `null` for required properties.
|
||||
Alternatively, if you know you're not going to use the metadata at all, setting the redaction type to `remove` will remove all optional sources of potentially sensitive data entirely, or replacing them with `null` for required properties.
|
||||
|
||||
```js
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -147,16 +157,14 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers) // undefined
|
||||
}
|
||||
```
|
||||
----
|
||||
|
||||
Finally, if you prefer to turn off redaction altogether, perhaps while debugging on a local developer environment, you can set the redaction type to `off`. This will revert the client to pre-8.11.0 behavior, where basic redaction is only performed during common serialization methods like `console.log` and `JSON.stringify`.
|
||||
|
||||
::::{warning}
|
||||
Setting `redaction.type` to `off` is not recommended in production environments.
|
||||
::::
|
||||
WARNING: Setting `redaction.type` to `off` is not recommended in production environments.
|
||||
|
||||
|
||||
```js
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -170,10 +178,18 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers.authorization) // the actual header value will be logged
|
||||
}
|
||||
```
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Migrate to v8
|
||||
|
||||
## Migrate to v8 [_migrate_to_v8]
|
||||
|
||||
The Node.js client can be configured to emit an HTTP header `Accept: application/vnd.elasticsearch+json; compatible-with=7` which signals to Elasticsearch that the client is requesting `7.x` version of request and response bodies. This allows for upgrading from 7.x to 8.x version of Elasticsearch without upgrading everything at once. Elasticsearch should be upgraded first after the compatibility header is configured and clients should be upgraded second. To enable to setting, configure the environment variable `ELASTIC_CLIENT_APIVERSIONING` to `true`.
|
||||
|
||||
The Node.js client can be configured to emit an HTTP header
|
||||
`Accept: application/vnd.elasticsearch+json; compatible-with=7`
|
||||
which signals to Elasticsearch that the client is requesting
|
||||
`7.x` version of request and response bodies. This allows for
|
||||
upgrading from 7.x to 8.x version of Elasticsearch without upgrading
|
||||
everything at once. Elasticsearch should be upgraded first after
|
||||
the compatibility header is configured and clients should be upgraded
|
||||
second.
|
||||
To enable to setting, configure the environment variable
|
||||
`ELASTIC_CLIENT_APIVERSIONING` to `true`.
|
||||
270
docs/basic-config.asciidoc
Normal file
270
docs/basic-config.asciidoc
Normal file
@ -0,0 +1,270 @@
|
||||
[[basic-config]]
|
||||
=== Basic configuration
|
||||
|
||||
This page shows you the possible basic configuration options that the clients
|
||||
offers.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
maxRetries: 5,
|
||||
requestTimeout: 60000,
|
||||
sniffOnStart: true
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[cols=2*]
|
||||
|===
|
||||
|`node` or `nodes`
|
||||
a|The Elasticsearch endpoint to use. +
|
||||
It can be a single string or an array of strings:
|
||||
[source,js]
|
||||
----
|
||||
node: 'http://localhost:9200'
|
||||
----
|
||||
Or it can be an object (or an array of objects) that represents the node:
|
||||
[source,js]
|
||||
----
|
||||
node: {
|
||||
url: new URL('http://localhost:9200'),
|
||||
tls: 'tls options',
|
||||
agent: 'http agent options',
|
||||
id: 'custom node id',
|
||||
headers: { 'custom': 'headers' }
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|`auth`
|
||||
a|Your authentication data. You can use both basic authentication and
|
||||
{ref}/security-api-create-api-key.html[ApiKey]. +
|
||||
See <<authentication,Authentication>> for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
Basic authentication:
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
----
|
||||
{ref}/security-api-create-api-key.html[ApiKey] authentication:
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
apiKey: 'base64EncodedKey'
|
||||
}
|
||||
----
|
||||
Bearer authentication, useful for https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html[service account tokens]. Be aware that it does not handle automatic token refresh:
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
bearer: 'token'
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
|`maxRetries`
|
||||
|`number` - Max number of retries for each request. +
|
||||
_Default:_ `3`
|
||||
|
||||
|`requestTimeout`
|
||||
|`number` - Max request timeout in milliseconds for each request. +
|
||||
_Default:_ `30000`
|
||||
|
||||
|`pingTimeout`
|
||||
|`number` - Max ping request timeout in milliseconds for each request. +
|
||||
_Default:_ `3000`
|
||||
|
||||
|`sniffInterval`
|
||||
|`number, boolean` - Perform a sniff operation every `n` milliseconds. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`sniffOnStart`
|
||||
|`boolean` - Perform a sniff once the client is started. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`sniffEndpoint`
|
||||
|`string` - Endpoint to ping during a sniff. +
|
||||
_Default:_ `'_nodes/_all/http'`
|
||||
|
||||
|`sniffOnConnectionFault`
|
||||
|`boolean` - Perform a sniff on connection fault. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`resurrectStrategy`
|
||||
|`string` - Configure the node resurrection strategy. +
|
||||
_Options:_ `'ping'`, `'optimistic'`, `'none'` +
|
||||
_Default:_ `'ping'`
|
||||
|
||||
|`suggestCompression`
|
||||
|`boolean` - Adds `accept-encoding` header to every request. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`compression`
|
||||
|`string, boolean` - Enables gzip request body compression. +
|
||||
_Options:_ `'gzip'`, `false` +
|
||||
_Default:_ `false`
|
||||
|
||||
|`tls`
|
||||
|`http.SecureContextOptions` - tls https://nodejs.org/api/tls.html[configuraton]. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`proxy`
|
||||
a|`string, URL` - If you are using an http(s) proxy, you can put its url here.
|
||||
The client will automatically handle the connection to it. +
|
||||
_Default:_ `null`
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://localhost:8080'
|
||||
})
|
||||
|
||||
// Proxy with basic authentication
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://user:pwd@localhost:8080'
|
||||
})
|
||||
----
|
||||
|
||||
|`agent`
|
||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options],
|
||||
or a function that returns an actual http agent instance. If you want to disable the http agent use entirely
|
||||
(and disable the `keep-alive` feature), set the agent to `false`. +
|
||||
_Default:_ `null`
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent: { agent: 'options' }
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// the function takes as parameter the option
|
||||
// object passed to the Connection constructor
|
||||
agent: (opts) => new CustomAgent()
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// Disable agent and keep-alive
|
||||
agent: false
|
||||
})
|
||||
----
|
||||
|
||||
|`nodeFilter`
|
||||
a|`function` - Filters which node not to use for a request. +
|
||||
_Default:_
|
||||
[source,js]
|
||||
----
|
||||
function defaultNodeFilter (node) {
|
||||
// avoid master only nodes
|
||||
if (node.roles.master === true &&
|
||||
node.roles.data === false &&
|
||||
node.roles.ingest === false) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
----
|
||||
|
||||
|`nodeSelector`
|
||||
a|`function` - custom selection strategy. +
|
||||
_Options:_ `'round-robin'`, `'random'`, custom function +
|
||||
_Default:_ `'round-robin'` +
|
||||
_Custom function example:_
|
||||
[source,js]
|
||||
----
|
||||
function nodeSelector (connections) {
|
||||
const index = calculateIndex()
|
||||
return connections[index]
|
||||
}
|
||||
----
|
||||
|
||||
|`generateRequestId`
|
||||
a|`function` - function to generate the request id for every request, it takes
|
||||
two parameters, the request parameters and options. +
|
||||
By default it generates an incremental integer for every request. +
|
||||
_Custom function example:_
|
||||
[source,js]
|
||||
----
|
||||
function generateRequestId (params, options) {
|
||||
// your id generation logic
|
||||
// must be syncronous
|
||||
return 'id'
|
||||
}
|
||||
----
|
||||
|
||||
|`name`
|
||||
|`string, symbol` - The name to identify the client instance in the events. +
|
||||
_Default:_ `elasticsearch-js`
|
||||
|
||||
|`opaqueIdPrefix`
|
||||
|`string` - A string that will be use to prefix any `X-Opaque-Id` header. +
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html#_x-opaque-id_support[`X-Opaque-Id` support] for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`headers`
|
||||
|`object` - A set of custom headers to send in every request. +
|
||||
_Default:_ `{}`
|
||||
|
||||
|`context`
|
||||
|`object` - A custom object that you can use for observability in your events.
|
||||
It will be merged with the API level context option. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`enableMetaHeader`
|
||||
|`boolean` - If true, adds an header named `'x-elastic-client-meta'`, containing some minimal telemetry data,
|
||||
such as the client and platform version. +
|
||||
_Default:_ `true`
|
||||
|
||||
|`cloud`
|
||||
a|`object` - Custom configuration for connecting to
|
||||
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
for more details. +
|
||||
_Default:_ `null` +
|
||||
_Cloud configuration example:_
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|`disablePrototypePoisoningProtection`
|
||||
|`boolean`, `'proto'`, `'constructor'` - By the default the client will protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more. If needed you can disable prototype poisoning protection entirely or one of the two checks. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`caFingerprint`
|
||||
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
804
docs/changelog.asciidoc
Normal file
804
docs/changelog.asciidoc
Normal file
@ -0,0 +1,804 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.15.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.15.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== OpenTelemetry zero-code instrumentation support
|
||||
|
||||
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
|
||||
See {jsclient}/observability.html#_opentelemetry[the docs]
|
||||
for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.1
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `8.14`
|
||||
|
||||
Updated types based on fixes and changes to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.14.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== ES|QL object API helper
|
||||
|
||||
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
|
||||
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
|
||||
|
||||
[discrete]
|
||||
===== `onSuccess` callback added to bulk helper
|
||||
|
||||
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
|
||||
|
||||
[discrete]
|
||||
===== Request retries are more polite
|
||||
|
||||
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== 8.13.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Pin @elastic/transport to `~8.4.1`
|
||||
|
||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
|
||||
|
||||
[discrete]
|
||||
=== 8.13.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.13.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
|
||||
|
||||
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.4.1`
|
||||
|
||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.12 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Upgrade transport to 8.4.1 https://github.com/elastic/elasticsearch-js/pull/2137[#2137]
|
||||
|
||||
Upgrades `@elastic/transport` to 8.4.1 to resolve https://github.com/elastic/elastic-transport-js/pull/83[a bug] where arrays in error diagnostics were unintentionally transformed into objects.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix hang in bulk helper semaphore https://github.com/elastic/elasticsearch-js/pull/2027[#2027]
|
||||
|
||||
The failing state could be reached when a server's response times are slower than flushInterval.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.0
|
||||
|
||||
[discrete]
|
||||
=== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.12.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.11.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.4.0`
|
||||
|
||||
Switching from `^8.4.0` to `~8.4.0` ensures 8.11 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.11.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.11.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.11/release-notes-8.11.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Enhanced support for redacting potentially sensitive data https://github.com/elastic/elasticsearch-js/pull/2095[#2095]
|
||||
|
||||
`@elastic/transport` https://github.com/elastic/elastic-transport-js/releases/tag/v8.4.0[version 8.4.0] introduces enhanced measures for ensuring that request metadata attached to some `Error` objects is redacted. This functionality is primarily to address custom logging solutions that don't use common serialization methods like `JSON.stringify`, `console.log`, or `util.inspect`, which were already accounted for.
|
||||
|
||||
See <<redaction>> for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.10.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.10.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.10.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.9.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.9 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.9.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Upgrade Transport https://github.com/elastic/elasticsearch-js/pull/1968[#1968]
|
||||
|
||||
Upgrades `@elastic/transport` to the latest patch release to fix https://github.com/elastic/elastic-transport-js/pull/69[a bug] that could cause the process to exit when handling malformed `HEAD` requests.
|
||||
|
||||
[discrete]
|
||||
=== 8.9.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.9.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
|
||||
|
||||
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Updated `user-agent` header https://github.com/elastic/elasticsearch-js/pull/1954[#1954]
|
||||
|
||||
The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.2`
|
||||
|
||||
Switching from `^8.3.2` to `~8.3.2` ensures 8.8 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.1
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.8.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.1.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix index drift bug in bulk helper https://github.com/elastic/elasticsearch-js/pull/1759[#1759]
|
||||
|
||||
Fixes a bug in the bulk helper that would cause `onDrop` to send back the wrong JSON document or error on a nonexistent document when an error occurred on a bulk HTTP request that contained a `delete` action.
|
||||
|
||||
[discrete]
|
||||
===== Fix a memory leak caused by an outdated version of Undici https://github.com/elastic/elasticsearch-js/pull/1902[#1902]
|
||||
|
||||
Undici 5.5.1, used by https://github.com/elastic/elastic-transport-js[elastic-transport-js], could create a memory leak when a high volume of requests created too many HTTP `abort` listeners. Upgrading Undici to 5.22.1 removed the memory leak.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.8.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix type declarations for legacy types with a body key https://github.com/elastic/elasticsearch-js/pull/1784[#1784]
|
||||
|
||||
Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.7.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.6.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.6.0
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to 8.3.1+ https://github.com/elastic/elasticsearch-js/pull/1802[#1802]
|
||||
|
||||
The `@elastic/transport` dependency has been bumped to `~8.3.1` to ensure
|
||||
fixes to the `maxResponseSize` option are available in the client.
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.6.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.6/release-notes-8.6.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.5.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.5.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.5/release-notes-8.5.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.4.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.4.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.4/release-notes-8.4.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.2.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.2.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.1.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Fix ndjson APIs https://github.com/elastic/elasticsearch-js/pull/1688[#1688]
|
||||
|
||||
The previous release contained a bug that broken ndjson APIs.
|
||||
We have released `v8.2.0-patch.1` to address this.
|
||||
This fix is the same as the one we have released and we strongly recommend upgrading to this version.
|
||||
|
||||
[discrete]
|
||||
===== Fix node shutdown apis https://github.com/elastic/elasticsearch-js/pull/1697[#1697]
|
||||
|
||||
The shutdown APIs wheren't complete, this fix completes them.
|
||||
|
||||
[discrete]
|
||||
==== Types: move query keys to body https://github.com/elastic/elasticsearch-js/pull/1693[#1693]
|
||||
|
||||
The types definitions where wrongly representing the types of fields present in both query and body.
|
||||
|
||||
[discrete]
|
||||
=== 8.2.0
|
||||
|
||||
[discrete]
|
||||
==== Breaking changes
|
||||
|
||||
[discrete]
|
||||
===== Drop Node.js v12 https://github.com/elastic/elasticsearch-js/pull/1670[#1670]
|
||||
|
||||
According to our https://github.com/elastic/elasticsearch-js#nodejs-support[Node.js support matrix].
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.2`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== More lenient parameter checks https://github.com/elastic/elasticsearch-js/pull/1662[#1662]
|
||||
|
||||
When creating a new client, an `undefined` `caFingerprint` no longer trigger an error for a http connection.
|
||||
|
||||
[discrete]
|
||||
===== Update TypeScript docs and export estypes https://github.com/elastic/elasticsearch-js/pull/1675[#1675]
|
||||
|
||||
You can import the full TypeScript requests & responses definitions as it follows:
|
||||
[source,ts]
|
||||
----
|
||||
import { estypes } from '@elastic/elasticsearch'
|
||||
----
|
||||
|
||||
If you need the legacy definitions with the body, you can do the following:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import { estypesWithBody } from '@elastic/elasticsearch'
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Updated hpagent to the latest version https://github.com/elastic/elastic-transport-js/pull/49[transport/#49]
|
||||
|
||||
You can fing the related changes https://github.com/delvedor/hpagent/releases/tag/v1.0.0[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.1.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.1/release-notes-8.1.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Export SniffingTransport https://github.com/elastic/elasticsearch-js/pull/1653[#1653]
|
||||
|
||||
Now the client exports the SniffingTransport class.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix onFlushTimeout timer not being cleared when upstream errors https://github.com/elastic/elasticsearch-js/pull/1616[#1616]
|
||||
|
||||
Fixes a memory leak caused by an error in the upstream dataset of the bulk helper.
|
||||
|
||||
[discrete]
|
||||
===== Cleanup abort listener https://github.com/elastic/elastic-transport-js/pull/42[transport/#42]
|
||||
|
||||
The legacy http client was not cleaning up the abort listener, which could cause a memory leak.
|
||||
|
||||
[discrete]
|
||||
===== Improve undici performances https://github.com/elastic/elastic-transport-js/pull/41[transport/#41]
|
||||
|
||||
Improve the stream body collection and keep alive timeout.
|
||||
|
||||
[discrete]
|
||||
=== 8.0.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.0/release-notes-8.0.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Drop old typescript definitions
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Medium*
|
||||
|
||||
The current TypeScript definitions will be removed from the client, and the new definitions, which contain request and response definitions as well will be shipped by default.
|
||||
|
||||
[discrete]
|
||||
===== Drop callback-style API
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Large*
|
||||
|
||||
Maintaining both API styles is not a problem per se, but it makes error handling more convoluted due to async stack traces.
|
||||
Moving to a full-promise API will solve this issue.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// callback-style api
|
||||
client.search({ params }, { options }, (err, result) => {
|
||||
console.log(err || result)
|
||||
})
|
||||
|
||||
// promise-style api
|
||||
client.search({ params }, { options })
|
||||
.then(console.log)
|
||||
.catch(console.log)
|
||||
|
||||
// async-style (sugar syntax on top of promises)
|
||||
const response = await client.search({ params }, { options })
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
If you are already using the promise-style API, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Remove the current abort API and use the new AbortController standard
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The old abort API makes sense for callbacks but it's annoying to use with promises
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// callback-style api
|
||||
const request = client.search({ params }, { options }, (err, result) => {
|
||||
console.log(err) // RequestAbortedError
|
||||
})
|
||||
|
||||
request.abort()
|
||||
|
||||
// promise-style api
|
||||
const promise = client.search({ params }, { options })
|
||||
|
||||
promise
|
||||
.then(console.log)
|
||||
.catch(console.log) // RequestAbortedError
|
||||
|
||||
promise.abort()
|
||||
----
|
||||
|
||||
Node v12 has added the standard https://nodejs.org/api/globals.html#globals_class_abortcontroller[`AbortController`] API which is designed to work well with both callbacks and promises.
|
||||
[source,js]
|
||||
----
|
||||
const ac = new AbortController()
|
||||
client.search({ params }, { signal: ac.signal })
|
||||
.then(console.log)
|
||||
.catch(console.log) // RequestAbortedError
|
||||
|
||||
ac.abort()
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove the body key from the request
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Thanks to the new types we are developing now we know exactly where a parameter should go.
|
||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
||||
|
||||
This could be a rather big breaking change, so a double solution could be used during the 8.x lifecycle. (accepting body keys without them being wrapped in the body as well as the current solution).
|
||||
|
||||
To convert code from 7.x, you need to remove the `body` parameter in all the endpoints request.
|
||||
For instance, this is an example for the `search` endpoint:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// to
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Migrate to new separate transport
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
The separated transport has been rewritten in TypeScript and has already dropped the callback style API.
|
||||
Given that now is separated, most of the Elasticsearch specific concepts have been removed, and the client will likely need to extend parts of it for reintroducing them.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== The returned value of API calls is the body and not the HTTP related keys
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
||||
The client will expose a new request-specific option to still get the full response details.
|
||||
|
||||
The new behaviour returns the `body` value directly as response.
|
||||
If you want to have the 7.x response format, you need to add `meta : true` in the request.
|
||||
This will return all the HTTP meta information, including the `body`.
|
||||
|
||||
For instance, this is an example for the `search` endpoint:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
||||
|
||||
// to
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
})
|
||||
console.log(response) // SearchResponse
|
||||
|
||||
// with a bit of TypeScript and JavaScript magic...
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}, {
|
||||
meta: true
|
||||
})
|
||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Use a weighted connection pool
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
Move from the current cluster connection pool to a weight-based implementation.
|
||||
This new implementation offers better performances and runs less code in the background, the old connection pool can still be used.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Migrate to the "undici" http client
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
By default, the HTTP client will no longer be the default Node.js HTTP client, but https://github.com/nodejs/undici[undici] instead.
|
||||
Undici is a brand new HTTP client written from scratch, it offers vastly improved performances and has better support for promises.
|
||||
Furthermore, it offers comprehensive and predictable error handling. The old HTTP client can still be used.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Drop support for old camelCased keys
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Medium*
|
||||
|
||||
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
|
||||
This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense.
|
||||
If you are already using `snake_case` keys, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Rename `ssl` option to `tls`
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
People usually refers to this as `tls`, furthermore, internally we use the tls API and Node.js refers to it as tls everywhere.
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
ssl: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
|
||||
// after
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
tls: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove prototype poisoning protection
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Prototype poisoning protection is very useful, but it can cause performances issues with big payloads.
|
||||
In v8 it will be removed, and the documentation will show how to add it back with a custom serializer.
|
||||
|
||||
[discrete]
|
||||
===== Remove client extensions API
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Large*
|
||||
|
||||
Nowadays the client support the entire Elasticsearch API, and the `transport.request` method can be used if necessary. The client extensions API have no reason to exist.
|
||||
[source,js]
|
||||
----
|
||||
client.extend('utility.index', ({ makeRequest }) => {
|
||||
return function _index (params, options) {
|
||||
// your code
|
||||
}
|
||||
})
|
||||
|
||||
client.utility.index(...)
|
||||
----
|
||||
|
||||
If you weren't using client extensions, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Move to TypeScript
|
||||
|
||||
*Breaking: No* | *Migration effort: None*
|
||||
|
||||
The new separated transport is already written in TypeScript, and it makes sense that the client v8 will be fully written in TypeScript as well.
|
||||
|
||||
[discrete]
|
||||
===== Move from emitter-like interface to a diagnostic method
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Currently, the client offers a subset of methods of the `EventEmitter` class, v8 will ship with a `diagnostic` property which will be a proper event emitter.
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
client.on('request', console.log)
|
||||
|
||||
// to
|
||||
client.diagnostic.on('request', console.log)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove username & password properties from Cloud configuration
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The Cloud configuration does not support ApiKey and Bearer auth, while the `auth` options does.
|
||||
There is no need to keep the legacy basic auth support in the cloud configuration.
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>',
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
|
||||
// after
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
If you are already passing the basic auth options in the `auth` configuration, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Calling `client.close` will reject new requests
|
||||
|
||||
Once you call `client.close` every new request after that will be rejected with a `NoLivingConnectionsError`. In-flight requests will be executed normally unless an in-flight request requires a retry, in which case it will be rejected.
|
||||
|
||||
[discrete]
|
||||
===== Parameters rename
|
||||
|
||||
- `ilm.delete_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `ilm.get_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `ilm.put_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `snapshot.cleanup_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.create_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.delete_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.get_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.verify_repository`: `repository` parameter has been renamed to `name`
|
||||
|
||||
[discrete]
|
||||
===== Removal of snake_cased methods
|
||||
|
||||
The v7 client provided snake_cased methods, such as `client.delete_by_query`. This is no longer supported, now only camelCased method are present.
|
||||
So `client.delete_by_query` can be accessed with `client.deleteByQuery`
|
||||
|
||||
36
docs/child.asciidoc
Normal file
36
docs/child.asciidoc
Normal file
@ -0,0 +1,36 @@
|
||||
[[child]]
|
||||
=== Creating a child client
|
||||
|
||||
There are some use cases where you may need multiple instances of the client.
|
||||
You can easily do that by calling `new Client()` as many times as you need, but
|
||||
you will lose all the benefits of using one single client, such as the long
|
||||
living connections and the connection pool handling. To avoid this problem, the
|
||||
client offers a `child` API, which returns a new client instance that shares the
|
||||
connection pool with the parent client.
|
||||
|
||||
NOTE: The event emitter is shared between the parent and the child(ren). If you
|
||||
extend the parent client, the child client will have the same extensions, while
|
||||
if the child client adds an extension, the parent client will not be extended.
|
||||
|
||||
You can pass to the `child` every client option you would pass to a normal
|
||||
client, but the connection pool specific options (`ssl`, `agent`, `pingTimeout`,
|
||||
`Connection`, and `resurrectStrategy`).
|
||||
|
||||
CAUTION: If you call `close` in any of the parent/child clients, every client
|
||||
will be closed.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const child = client.child({
|
||||
headers: { 'x-foo': 'bar' },
|
||||
requestTimeout: 1000
|
||||
})
|
||||
|
||||
client.info().then(console.log, console.log)
|
||||
child.info().then(console.log, console.log)
|
||||
----
|
||||
11
docs/configuration.asciidoc
Normal file
11
docs/configuration.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
[[client-configuration]]
|
||||
== Configuration
|
||||
|
||||
|
||||
The client is designed to be easily configured for your needs. In the following
|
||||
section, you can see the possible options that you can use to configure it.
|
||||
|
||||
* <<basic-config>>
|
||||
* <<advanced-config>>
|
||||
* <<child>>
|
||||
* <<client-testing>>
|
||||
719
docs/connecting.asciidoc
Normal file
719
docs/connecting.asciidoc
Normal file
@ -0,0 +1,719 @@
|
||||
[[client-connecting]]
|
||||
== Connecting
|
||||
|
||||
This page contains the information you need to connect and use the Client with
|
||||
{es}.
|
||||
|
||||
**On this page**
|
||||
|
||||
* <<authentication, Authentication options>>
|
||||
* <<client-usage, Using the client>>
|
||||
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
|
||||
* <<client-connect-proxy, Connecting through a proxy>>
|
||||
* <<client-error-handling, Handling errors>>
|
||||
* <<keep-alive, Keep-alive connections>>
|
||||
* <<close-connections, Closing a client's connections>>
|
||||
* <<product-check, Automatic product check>>
|
||||
|
||||
[[authentication]]
|
||||
[discrete]
|
||||
=== Authentication
|
||||
|
||||
This document contains code snippets to show you how to connect to various {es}
|
||||
providers.
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-ec]]
|
||||
==== Elastic Cloud
|
||||
|
||||
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers
|
||||
an easy way to connect to it via the `cloud` option. You must pass the Cloud ID
|
||||
that you can find in the cloud console, then your username and password inside
|
||||
the `auth` option.
|
||||
|
||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable
|
||||
both request and response compression by default, since it yields significant
|
||||
throughput improvements. Moreover, the client will also set the tls option
|
||||
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
|
||||
override this option by configuring them.
|
||||
|
||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
|
||||
behind a load balancer, Elastic Cloud will take care of everything for you.
|
||||
Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here]
|
||||
to know more.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[connect-self-managed-new]]
|
||||
=== Connecting to a self-managed cluster
|
||||
|
||||
By default {es} will start with security features like authentication and TLS
|
||||
enabled. To connect to the {es} cluster you'll need to configure the Node.js {es}
|
||||
client to use HTTPS with the generated CA certificate in order to make requests
|
||||
successfully.
|
||||
|
||||
If you're just getting started with {es} we recommend reading the documentation
|
||||
on https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html[configuring]
|
||||
and
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html[starting {es}]
|
||||
to ensure your cluster is running as expected.
|
||||
|
||||
When you start {es} for the first time you'll see a distinct block like the one
|
||||
below in the output from {es} (you may have to scroll up if it's been a while):
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
|
||||
-> Elasticsearch security features have been automatically configured!
|
||||
-> Authentication is enabled and cluster connections are encrypted.
|
||||
|
||||
-> Password for the elastic user (reset with `bin/elasticsearch-reset-password -u elastic`):
|
||||
lhQpLELkjkrawaBoaz0Q
|
||||
|
||||
-> HTTP CA certificate SHA-256 fingerprint:
|
||||
a52dd93511e8c6045e21f16654b77c9ee0f34aea26d9f40320b531c474676228
|
||||
...
|
||||
|
||||
----
|
||||
|
||||
Depending on the circumstances there are two options for verifying the HTTPS
|
||||
connection, either verifying with the CA certificate itself or via the HTTP CA
|
||||
certificate fingerprint.
|
||||
|
||||
[discrete]
|
||||
[[auth-tls]]
|
||||
==== TLS configuration
|
||||
|
||||
The generated root CA certificate can be found in the `certs` directory in your
|
||||
{es} config location (`$ES_CONF_PATH/certs/http_ca.crt`). If you're running {es}
|
||||
in Docker there is
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html[additional documentation for retrieving the CA certificate].
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, and
|
||||
the certificates used to sign these requests will be verified. To turn off
|
||||
certificate verification, you must specify an `tls` object in the top level
|
||||
config and set `rejectUnauthorized: false`. The default `tls` values are the
|
||||
same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
||||
uses.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
tls: {
|
||||
ca: fs.readFileSync('./http_ca.crt'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-ca-fingerprint]]
|
||||
==== CA fingerprint
|
||||
|
||||
You can configure the client to only trust certificates that are signed by a specific CA certificate
|
||||
(CA certificate pinning) by providing a `caFingerprint` option.
|
||||
This will verify that the fingerprint of the CA certificate that has signed
|
||||
the certificate of the server matches the supplied value.
|
||||
You must configure a SHA256 digest.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://example.com'
|
||||
auth: { ... },
|
||||
// the fingerprint (SHA256) of the CA certificate that is used to sign
|
||||
// the certificate that the Elasticsearch node presents for TLS.
|
||||
caFingerprint: '20:0D:CA:FA:76:...',
|
||||
tls: {
|
||||
// might be required if it's a self-signed certificate
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
The certificate fingerprint can be calculated using `openssl x509` with the
|
||||
certificate file:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
openssl x509 -fingerprint -sha256 -noout -in /path/to/http_ca.crt
|
||||
----
|
||||
|
||||
If you don't have access to the generated CA file from {es} you can use the
|
||||
following script to output the root CA fingerprint of the {es} instance with
|
||||
`openssl s_client`:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
# Replace the values of 'localhost' and '9200' to the
|
||||
# corresponding host and port values for the cluster.
|
||||
openssl s_client -connect localhost:9200 -servername localhost -showcerts </dev/null 2>/dev/null \
|
||||
| openssl x509 -fingerprint -sha256 -noout -in /dev/stdin
|
||||
----
|
||||
|
||||
The output of `openssl x509` will look something like this:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
SHA256 Fingerprint=A5:2D:D9:35:11:E8:C6:04:5E:21:F1:66:54:B7:7C:9E:E0:F3:4A:EA:26:D9:F4:03:20:B5:31:C4:74:67:62:28
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[connect-no-security]]
|
||||
=== Connecting without security enabled
|
||||
|
||||
WARNING: Running {es} without security enabled is not recommended.
|
||||
|
||||
If your cluster is configured with
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html[security explicitly disabled]
|
||||
then you can connect via HTTP:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://example.com'
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-strategies]]
|
||||
=== Authentication strategies
|
||||
|
||||
Following you can find all the supported authentication strategies.
|
||||
|
||||
[discrete]
|
||||
[[auth-apikey]]
|
||||
==== ApiKey authentication
|
||||
|
||||
You can use the
|
||||
{ref-7x}/security-api-create-api-key.html[ApiKey]
|
||||
authentication by passing the `apiKey` parameter via the `auth` option. The
|
||||
`apiKey` parameter can be either a base64 encoded string or an object with the
|
||||
values that you can obtain from the
|
||||
{ref-7x}/security-api-create-api-key.html[create api key endpoint].
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the ApiKey
|
||||
configuration, the ApiKey takes precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: 'base64EncodedKey'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: {
|
||||
id: 'foo',
|
||||
api_key: 'bar'
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-bearer]]
|
||||
==== Bearer authentication
|
||||
|
||||
You can provide your credentials by passing the `bearer` token
|
||||
parameter via the `auth` option.
|
||||
Useful for https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html[service account tokens].
|
||||
Be aware that it does not handle automatic token refresh.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
bearer: 'token'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-basic]]
|
||||
==== Basic authentication
|
||||
|
||||
You can provide your credentials by passing the `username` and `password`
|
||||
parameters via the `auth` option.
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the Api Key
|
||||
configuration, the Api Key will take precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
Otherwise, you can provide your credentials in the node(s) URL.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://username:password@localhost:9200'
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-usage]]
|
||||
=== Usage
|
||||
|
||||
Using the client is straightforward, it supports all the public APIs of {es},
|
||||
and every method exposes the same signature.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
The returned value of every API call is the response body from {es}.
|
||||
If you need to access additonal metadata, such as the status code or headers,
|
||||
you must specify `meta: true` in the request options:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, { meta: true })
|
||||
----
|
||||
|
||||
In this case, the result will be:
|
||||
[source,ts]
|
||||
----
|
||||
{
|
||||
body: object | boolean
|
||||
statusCode: number
|
||||
headers: object
|
||||
warnings: [string],
|
||||
meta: object
|
||||
}
|
||||
----
|
||||
|
||||
NOTE: The body is a boolean value when you use `HEAD` APIs.
|
||||
|
||||
[discrete]
|
||||
==== Aborting a request
|
||||
|
||||
If needed, you can abort a running request by using the `AbortController` standard.
|
||||
|
||||
CAUTION: If you abort a request, the request will fail with a
|
||||
`RequestAbortedError`.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const AbortController = require('node-abort-controller')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const abortController = new AbortController()
|
||||
setImmediate(() => abortController.abort())
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, { signal: abortController.signal })
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Request specific options
|
||||
|
||||
If needed you can pass request specific options in a second object:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
The supported request specific options are:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`ignore`
|
||||
|`[number]` - HTTP status codes which should not be considered errors for this request. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`requestTimeout`
|
||||
|`number` - Max request timeout for the request in milliseconds, it overrides the client default. +
|
||||
_Default:_ `30000`
|
||||
|
||||
|`maxRetries`
|
||||
|`number` - Max number of retries for the request, it overrides the client default. +
|
||||
_Default:_ `3`
|
||||
|
||||
|`compression`
|
||||
|`string, boolean` - Enables body compression for the request. +
|
||||
_Options:_ `false`, `'gzip'` +
|
||||
_Default:_ `false`
|
||||
|
||||
|`asStream`
|
||||
|`boolean` - Instead of getting the parsed body back, you get the raw Node.js stream of data. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`headers`
|
||||
|`object` - Custom headers for the request. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`querystring`
|
||||
|`object` - Custom querystring for the request. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`id`
|
||||
|`any` - Custom request id. _(overrides the top level request id generator)_ +
|
||||
_Default:_ `null`
|
||||
|
||||
|`context`
|
||||
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`signal`
|
||||
|`AbortSignal` - The AbortSignal instance to allow request abortion. +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
[[client-faas-env]]
|
||||
=== Using the Client in a Function-as-a-Service Environment
|
||||
|
||||
This section illustrates the best practices for leveraging the {es} client in a Function-as-a-Service (FaaS) environment.
|
||||
The most influential optimization is to initialize the client outside of the function, the global scope.
|
||||
This practice does not only improve performance but also enables background functionality as – for example – https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[sniffing].
|
||||
The following examples provide a skeleton for the best practices.
|
||||
|
||||
[discrete]
|
||||
==== GCP Cloud Functions
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.testFunction = async function (req, res) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== AWS Lambda
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.handler = async function (event, context) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Azure Functions
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
module.exports = async function (context, req) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
Resources used to assess these recommendations:
|
||||
|
||||
- https://cloud.google.com/functions/docs/bestpractices/tips#use_global_variables_to_reuse_objects_in_future_invocations[GCP Cloud Functions: Tips & Tricks]
|
||||
- https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html[Best practices for working with AWS Lambda functions]
|
||||
- https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python?tabs=azurecli-linux%2Capplication-level#global-variables[Azure Functions Python developer guide]
|
||||
- https://docs.aws.amazon.com/lambda/latest/operatorguide/global-scope.html[AWS Lambda: Comparing the effect of global scope]
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-connect-proxy]]
|
||||
=== Connecting through a proxy
|
||||
|
||||
~Added~ ~in~ ~`v7.10.0`~
|
||||
|
||||
If you need to pass through an http(s) proxy for connecting to {es}, the client
|
||||
out of the box offers a handy configuration for helping you with it. Under the
|
||||
hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
|
||||
|
||||
IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations.
|
||||
To use a proxy, you will need to use the `HttpConnection` class from `@elastic/transport` instead.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
import { HttpConnection } from '@elastic/transport'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
Basic authentication is supported as well:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http:user:pwd@//localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
If you are connecting through a non-http(s) proxy, such as a `socks5` or `pac`,
|
||||
you can use the `agent` option to configure it.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const SocksProxyAgent = require('socks-proxy-agent')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent () {
|
||||
return new SocksProxyAgent('socks://127.0.0.1:1080')
|
||||
},
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-error-handling]]
|
||||
=== Error handling
|
||||
|
||||
The client exposes a variety of error objects that you can use to enhance your
|
||||
error handling. You can find all the error objects inside the `errors` key in
|
||||
the client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { errors } = require('@elastic/elasticsearch')
|
||||
console.log(errors)
|
||||
----
|
||||
|
||||
|
||||
You can find the errors exported by the client in the table below.
|
||||
|
||||
[cols=3*]
|
||||
|===
|
||||
|*Error*
|
||||
|*Description*
|
||||
|*Properties*
|
||||
|
||||
|`ElasticsearchClientError`
|
||||
|Every error inherits from this class, it is the basic error generated by the client.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
|
||||
|`TimeoutError`
|
||||
|Generated when a request exceeds the `requestTimeout` option.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`ConnectionError`
|
||||
|Generated when an error occurs during the request, it can be a connection error or a malformed stream of data.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`RequestAbortedError`
|
||||
|Generated if the user calls the `request.abort()` method.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`NoLivingConnectionsError`
|
||||
|Given the configuration, the ConnectionPool was not able to find a usable Connection for this request.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`SerializationError`
|
||||
|Generated if the serialization fails.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `data` - `object`, the object to serialize
|
||||
|
||||
|`DeserializationError`
|
||||
|Generated if the deserialization fails.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `data` - `string`, the string to deserialize
|
||||
|
||||
|`ConfigurationError`
|
||||
|Generated if there is a malformed configuration or parameter.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
|
||||
|`ResponseError`
|
||||
|Generated when in case of a `4xx` or `5xx` response.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
* `body` - `object`, the response body
|
||||
* `statusCode` - `object`, the response headers
|
||||
* `headers` - `object`, the response status code
|
||||
|===
|
||||
|
||||
[[keep-alive]]
|
||||
[discrete]
|
||||
=== Keep-alive connections
|
||||
|
||||
By default, the client uses persistent, keep-alive connections to reduce the overhead of creating a new HTTP connection for each Elasticsearch request.
|
||||
If you are using the default `UndiciConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 10 minutes.
|
||||
If you are using the legacy `HttpConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 1 minute.
|
||||
|
||||
If you need to disable keep-alive connections, you can override the HTTP agent with your preferred https://nodejs.org/api/http.html#http_new_agent_options[HTTP agent options]:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// the function takes as parameter the option
|
||||
// object passed to the Connection constructor
|
||||
agent: (opts) => new CustomAgent()
|
||||
})
|
||||
----
|
||||
|
||||
Or you can disable the HTTP agent entirely:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// Disable agent and keep-alive
|
||||
agent: false
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[close-connections]]
|
||||
=== Closing a client's connections
|
||||
|
||||
If you would like to close all open connections being managed by an instance of the client, use the `close()` function:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
});
|
||||
client.close();
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[product-check]]
|
||||
=== Automatic product check
|
||||
|
||||
Since v7.14.0, the client performs a required product check before the first call.
|
||||
This pre-flight product check allows the client to establish the version of Elasticsearch
|
||||
that it is communicating with. The product check requires one additional HTTP request to
|
||||
be sent to the server as part of the request pipeline before the main API call is sent.
|
||||
In most cases, this will succeed during the very first API call that the client sends.
|
||||
Once the product check completes, no further product check HTTP requests are sent for
|
||||
subsequent API calls.
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataStream({
|
||||
name: "my-data-stream",
|
||||
filter_path: "data_streams.indices.index_name",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,46 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
range: {
|
||||
year: {
|
||||
gt: 2023,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "elastic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
aggs: {
|
||||
topics: {
|
||||
terms: {
|
||||
field: "topic",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "my-rerank-model",
|
||||
inference_config: {
|
||||
service: "cohere",
|
||||
service_settings: {
|
||||
model_id: "rerank-english-v3.0",
|
||||
api_key: "{{COHERE_API_KEY}}",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000002",
|
||||
index: "my-index-000003",
|
||||
mappings: {
|
||||
properties: {
|
||||
metrics: {
|
||||
@ -29,7 +29,7 @@ const response = await client.indices.create({
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.indices.getMapping({
|
||||
index: "my-index-000002",
|
||||
index: "my-index-000003",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -1,8 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.reroute();
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,42 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -1,49 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example_nested",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
nested: {
|
||||
path: "nested_field",
|
||||
inner_hits: {
|
||||
name: "nested_vector",
|
||||
_source: false,
|
||||
fields: ["nested_field.paragraph_id"],
|
||||
},
|
||||
query: {
|
||||
knn: {
|
||||
field: "nested_field.nested_vector",
|
||||
query_vector: [1, 0, 0.5],
|
||||
k: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "ai",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: ["topic"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,13 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.asyncQuery({
|
||||
format: "json",
|
||||
query:
|
||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,7 +4,12 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "cooking_blog",
|
||||
index: "idx",
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "cooking_blog",
|
||||
query: {
|
||||
match: {
|
||||
title: {
|
||||
query: "fluffy pancakes breakfast",
|
||||
minimum_should_match: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,57 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "elastic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query:
|
||||
"(information retrieval) OR (artificial intelligence)",
|
||||
default_field: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
size: 1,
|
||||
explain: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,18 +5,20 @@
|
||||
----
|
||||
const response = await client.searchApplication.renderQuery({
|
||||
name: "my-app",
|
||||
params: {
|
||||
query_string: "my first query",
|
||||
text_fields: [
|
||||
{
|
||||
name: "title",
|
||||
boost: 5,
|
||||
},
|
||||
{
|
||||
name: "description",
|
||||
boost: 1,
|
||||
},
|
||||
],
|
||||
body: {
|
||||
params: {
|
||||
query_string: "my first query",
|
||||
text_fields: [
|
||||
{
|
||||
name: "title",
|
||||
boost: 5,
|
||||
},
|
||||
{
|
||||
name: "description",
|
||||
boost: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "watsonx-embeddings",
|
||||
inference_config: {
|
||||
service: "watsonxai",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
url: "<url>",
|
||||
model_id: "ibm/slate-30m-english-rtrvr",
|
||||
project_id: "<project_id>",
|
||||
api_version: "2024-03-14",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,16 +5,10 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
properties: {
|
||||
f: {
|
||||
type: "scaled_float",
|
||||
15
docs/doc_examples/0e83f140237d75469a428ff403564bb5.asciidoc
Normal file
15
docs/doc_examples/0e83f140237d75469a428ff403564bb5.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "100gb",
|
||||
"cluster.routing.allocation.disk.watermark.high": "50gb",
|
||||
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
|
||||
"cluster.info.update.interval": "1m",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,16 +5,10 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
properties: {
|
||||
text: {
|
||||
type: "text",
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.chatCompletionUnified({
|
||||
inference_id: "openai-completion",
|
||||
chat_completion_request: {
|
||||
model: "gpt-4o",
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "What is Elastic?",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "read_only",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.update({
|
||||
index: ".elastic-connectors",
|
||||
id: "connector_id",
|
||||
doc: {
|
||||
features: {
|
||||
native_connector_api_keys: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,25 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.createFrom({
|
||||
source: "my-index",
|
||||
dest: "my-new-index",
|
||||
create_from: {
|
||||
settings_override: {
|
||||
index: {
|
||||
number_of_shards: 5,
|
||||
},
|
||||
},
|
||||
mappings_override: {
|
||||
properties: {
|
||||
field2: {
|
||||
type: "boolean",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,7 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.sparseEmbedding({
|
||||
const response = await client.inference.inference({
|
||||
task_type: "sparse_embedding",
|
||||
inference_id: "my-elser-model",
|
||||
input:
|
||||
"The sky above the port was the color of television tuned to a dead channel.",
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.update({
|
||||
inference_id: "my-inference-endpoint",
|
||||
inference_config: {
|
||||
service_settings: {
|
||||
api_key: "<API_KEY>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,16 +4,15 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-bit-vectors",
|
||||
index: "test-index",
|
||||
query: {
|
||||
script_score: {
|
||||
nested: {
|
||||
path: "inference_field.inference.chunks",
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "dotProduct(params.query_vector, 'my_dense_vector')",
|
||||
params: {
|
||||
query_vector: [8, 5, -15, 1, -7],
|
||||
sparse_vector: {
|
||||
field: "inference_field.inference.chunks.embeddings",
|
||||
inference_id: "my-inference-id",
|
||||
query: "mountain lake",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -5,16 +5,10 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
properties: {
|
||||
date: {
|
||||
type: "date_nanos",
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.connector.updateConfiguration({
|
||||
connector_id: "my-connector-id",
|
||||
values: {
|
||||
host: "127.0.0.1",
|
||||
port: 5432,
|
||||
username: "myuser",
|
||||
password: "mypassword",
|
||||
database: "chinook",
|
||||
schema: "public",
|
||||
tables: "album,artist",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.cancelMigrateReindex({
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,28 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-bit-vectors",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "dotProduct(params.query_vector, 'my_dense_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
0.23, 1.45, 3.67, 4.89, -0.56, 2.34, 3.21, 1.78, -2.45, 0.98, -0.12,
|
||||
3.45, 4.56, 2.78, 1.23, 0.67, 3.89, 4.12, -2.34, 1.56, 0.78, 3.21,
|
||||
4.12, 2.45, -1.67, 0.34, -3.45, 4.56, -2.78, 1.23, -0.67, 3.89,
|
||||
-4.34, 2.12, -1.56, 0.78, -3.21, 4.45, 2.12, 1.67,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
||||
script: {
|
||||
lang: "mustache",
|
||||
source:
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||
params: {
|
||||
query: "",
|
||||
_es_filters: {},
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.delete({
|
||||
index: "books",
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.indices.delete({
|
||||
index: "my-explicit-mappings-books",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -1,26 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.createApiKey({
|
||||
name: "my-connector-api-key",
|
||||
role_descriptors: {
|
||||
"my-connector-connector-role": {
|
||||
cluster: ["monitor", "manage_connector"],
|
||||
indices: [
|
||||
{
|
||||
names: [
|
||||
"my-index_name",
|
||||
".search-acl-filter-my-index_name",
|
||||
".elastic-connectors*",
|
||||
],
|
||||
privileges: ["all"],
|
||||
allow_restricted_indices: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,12 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.delegatePki({
|
||||
x509_certificate_chain: [
|
||||
"MIIDeDCCAmCgAwIBAgIUBzj/nGGKxP2iXawsSquHmQjCJmMwDQYJKoZIhvcNAQELBQAwUzErMCkGA1UEAxMiRWxhc3RpY3NlYXJjaCBUZXN0IEludGVybWVkaWF0ZSBDQTEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMB4XDTIzMDcxODE5MjkwNloXDTQzMDcxMzE5MjkwNlowSjEiMCAGA1UEAxMZRWxhc3RpY3NlYXJjaCBUZXN0IENsaWVudDEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAllHL4pQkkfwAm/oLkxYYO+r950DEy1bjH+4viCHzNADLCTWO+lOZJVlNx7QEzJE3QGMdif9CCBBxQFMapA7oUFCLq84fPSQQu5AnvvbltVD9nwVtCs+9ZGDjMKsz98RhSLMFIkxdxi6HkQ3Lfa4ZSI4lvba4oo+T/GveazBDS+NgmKyq00EOXt3tWi1G9vEVItommzXWfv0agJWzVnLMldwkPqsw0W7zrpyT7FZS4iLbQADGceOW8fiauOGMkscu9zAnDR/SbWl/chYioQOdw6ndFLn1YIFPd37xL0WsdsldTpn0vH3YfzgLMffT/3P6YlwBegWzsx6FnM/93Ecb4wIDAQABo00wSzAJBgNVHRMEAjAAMB0GA1UdDgQWBBQKNRwjW+Ad/FN1Rpoqme/5+jrFWzAfBgNVHSMEGDAWgBRcya0c0x/PaI7MbmJVIylWgLqXNjANBgkqhkiG9w0BAQsFAAOCAQEACZ3PF7Uqu47lplXHP6YlzYL2jL0D28hpj5lGtdha4Muw1m/BjDb0Pu8l0NQ1z3AP6AVcvjNDkQq6Y5jeSz0bwQlealQpYfo7EMXjOidrft1GbqOMFmTBLpLA9SvwYGobSTXWTkJzonqVaTcf80HpMgM2uEhodwTcvz6v1WEfeT/HMjmdIsq4ImrOL9RNrcZG6nWfw0HR3JNOgrbfyEztEI471jHznZ336OEcyX7gQuvHE8tOv5+oD1d7s3Xg1yuFp+Ynh+FfOi3hPCuaHA+7F6fLmzMDLVUBAllugst1C3U+L/paD7tqIa4ka+KNPCbSfwazmJrt4XNiivPR4hwH5g==",
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -7,9 +7,9 @@ const response = await client.indices.create({
|
||||
index: "my-index-000002",
|
||||
mappings: {
|
||||
properties: {
|
||||
inference_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-openai-endpoint",
|
||||
datetime: {
|
||||
type: "date",
|
||||
format: "uuuu/MM/dd HH:mm:ss||uuuu/MM/dd||epoch_millis",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "books",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,22 +4,24 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.simulate.ingest({
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "bar",
|
||||
body: {
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
foo: "rab",
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
foo: "rab",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -4,11 +4,8 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_connector/_secret",
|
||||
body: {
|
||||
value: "encoded_api_key",
|
||||
},
|
||||
method: "DELETE",
|
||||
path: "/_ingest/geoip/database/my-database-id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -9,6 +9,7 @@ const response = await client.indices.create({
|
||||
properties: {
|
||||
inference_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "mv",
|
||||
refresh: "true",
|
||||
document: {
|
||||
a: [2, null, 1],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.esql.query({
|
||||
query: "FROM mv | LIMIT 1",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -5,7 +5,7 @@
|
||||
----
|
||||
const response = await client.cat.mlTrainedModels({
|
||||
h: "c,o,l,ct,v",
|
||||
v: "true",
|
||||
v: "ture",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,16 +5,10 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
properties: {
|
||||
flattened: {
|
||||
type: "flattened",
|
||||
@ -4,11 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
index: ".watches",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_replicas: 0,
|
||||
},
|
||||
"index.routing.allocation.include.role": "watcher",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -5,16 +5,10 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
properties: {
|
||||
text: {
|
||||
type: "text",
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.queryRole({
|
||||
query: {
|
||||
bool: {
|
||||
must_not: {
|
||||
term: {
|
||||
"metadata._reserved": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
sort: ["name"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,33 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.createApiKey({
|
||||
name: "john-api-key",
|
||||
expiration: "1d",
|
||||
role_descriptors: {
|
||||
"sharepoint-online-role": {
|
||||
index: [
|
||||
{
|
||||
names: ["sharepoint-search-application"],
|
||||
privileges: ["read"],
|
||||
query: {
|
||||
template: {
|
||||
params: {
|
||||
access_control: ["john@example.co", "Engineering Members"],
|
||||
},
|
||||
source:
|
||||
'\n {\n "bool": {\n "should": [\n {\n "bool": {\n "must_not": {\n "exists": {\n "field": "_allow_access_control"\n }\n }\n }\n },\n {\n "terms": {\n "_allow_access_control.enum": {{#toJson}}access_control{{/toJson}}\n }\n }\n ]\n }\n }\n ',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
restriction: {
|
||||
workflows: ["search_application_query"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,16 +5,10 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
properties: {
|
||||
my_range: {
|
||||
type: "ip_range",
|
||||
@ -14,7 +14,6 @@ const response = await client.indices.putSettings({
|
||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||
"index.search.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,67 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-rank-vectors-bit",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[
|
||||
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
|
||||
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
|
||||
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
|
||||
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
28
docs/doc_examples/2577acb462b95bd4394523cf2f8a661f.asciidoc
Normal file
28
docs/doc_examples/2577acb462b95bd4394523cf2f8a661f.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.query({
|
||||
format: "txt",
|
||||
query:
|
||||
"\n FROM library\n | SORT page_count DESC\n | KEEP name, author\n | LOOKUP era ON author\n | LIMIT 5\n ",
|
||||
tables: {
|
||||
era: {
|
||||
author: {
|
||||
keyword: [
|
||||
"Frank Herbert",
|
||||
"Peter F. Hamilton",
|
||||
"Vernor Vinge",
|
||||
"Alastair Reynolds",
|
||||
"James S.A. Corey",
|
||||
],
|
||||
},
|
||||
era: {
|
||||
keyword: ["The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "write",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,16 +5,10 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
_source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
properties: {
|
||||
bool: {
|
||||
type: "boolean",
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.migrateReindex({
|
||||
reindex: {
|
||||
source: {
|
||||
index: "my-data-stream",
|
||||
},
|
||||
mode: "upgrade",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,9 +4,11 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.oidcLogout({
|
||||
token:
|
||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||
body: {
|
||||
token:
|
||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-rank-vectors-float",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[0.5, 10, 6],
|
||||
[-0.5, 10, 10],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,35 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "attachment",
|
||||
description: "Extract attachment information including original binary",
|
||||
processors: [
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
pipeline: "attachment",
|
||||
document: {
|
||||
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.get({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
@ -1,24 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
query: {
|
||||
match: {
|
||||
my_field: "Which country is Paris in?",
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_field: {
|
||||
type: "semantic",
|
||||
number_of_fragments: 2,
|
||||
order: "score",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,6 +6,7 @@
|
||||
const response = await client.esql.asyncQueryGet({
|
||||
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||
wait_for_completion_timeout: "30s",
|
||||
body: null,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -5,8 +5,11 @@
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "90%",
|
||||
"cluster.routing.allocation.disk.watermark.high": "95%",
|
||||
"cluster.indices.close.enable": false,
|
||||
"indices.recovery.max_bytes_per_sec": "50mb",
|
||||
},
|
||||
transient: {
|
||||
"*": null,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,77 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
mappings: {
|
||||
properties: {
|
||||
attributes: {
|
||||
type: "passthrough",
|
||||
priority: 10,
|
||||
properties: {
|
||||
id: {
|
||||
type: "keyword",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: 1,
|
||||
document: {
|
||||
attributes: {
|
||||
id: "foo",
|
||||
zone: 10,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
id: "foo",
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
zone: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
|
||||
const response3 = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
"attributes.id": "foo",
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
"attributes.zone": 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response3);
|
||||
----
|
||||
@ -1,28 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-explicit-mappings-books",
|
||||
mappings: {
|
||||
dynamic: false,
|
||||
properties: {
|
||||
name: {
|
||||
type: "text",
|
||||
},
|
||||
author: {
|
||||
type: "text",
|
||||
},
|
||||
release_date: {
|
||||
type: "date",
|
||||
format: "yyyy-MM-dd",
|
||||
},
|
||||
page_count: {
|
||||
type: "integer",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,6 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.reroute({
|
||||
metric: "none",
|
||||
commands: [
|
||||
{
|
||||
move: {
|
||||
@ -1,25 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "idx_keep",
|
||||
id: 1,
|
||||
document: {
|
||||
path: {
|
||||
to: [
|
||||
{
|
||||
foo: [3, 2, 1],
|
||||
},
|
||||
{
|
||||
foo: [30, 20, 10],
|
||||
},
|
||||
],
|
||||
bar: "baz",
|
||||
},
|
||||
ids: [200, 100, 300, 100],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,43 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
retriever: {
|
||||
rule: {
|
||||
match_criteria: {
|
||||
query_string: "puggles",
|
||||
user_country: "us",
|
||||
},
|
||||
ruleset_ids: ["my-ruleset"],
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query: "pugs",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query: "puggles",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,10 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "*",
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
"index.indexing.slowlog.include.user": true,
|
||||
"index.indexing.slowlog.threshold.index.warn": "30s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,11 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_replicas: "<original_number_of_replicas>",
|
||||
},
|
||||
"index.merge.policy.max_merge_at_once_explicit": null,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user