Compare commits
99 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| fb1a42cadc | |||
| e3863d7b77 | |||
| 424cc94458 | |||
| 7aca5cf652 | |||
| a8927727b1 | |||
| 15a450eba4 | |||
| 8f028a522a | |||
| 411f379006 | |||
| 242b4227ee | |||
| 78332da539 | |||
| 62b2d78b15 | |||
| 599d7e6e07 | |||
| 6116909a9b | |||
| f609271107 | |||
| b4eb8e5441 | |||
| ccf9fcbd93 | |||
| 0c6f323745 | |||
| 04a9eb462d | |||
| 05eaa9fc61 | |||
| 30c6793383 | |||
| 6df75b6a92 | |||
| 383206ef19 | |||
| 069103612a | |||
| db911746a0 | |||
| 7b255bed98 | |||
| 74be52ebb1 | |||
| 0e5beddd65 | |||
| bd89ab5dd7 | |||
| c202a6bbc5 | |||
| 8e162dd8b8 | |||
| 2b0eebc8fa | |||
| f97ba5b02a | |||
| 72a1114186 | |||
| 542585a5dc | |||
| e1de2bd53d | |||
| 4be14a1f6c | |||
| a71ebb5f68 | |||
| 05f7078534 | |||
| b250049ee7 | |||
| fe2d8c1915 | |||
| b9ea8f8906 | |||
| 896216860f | |||
| 45e3c0657a | |||
| b65e468b95 | |||
| 768ba3d8ae | |||
| 2da30cd4cd | |||
| a13992ec7d | |||
| 95fd81a883 | |||
| 6f2aaa5c7c | |||
| b857d8ee71 | |||
| 4aa00e03e1 | |||
| e2974b0747 | |||
| 3bd7ba95f8 | |||
| f96aa32345 | |||
| 628254df2d | |||
| 1ef318aded | |||
| ec9a4dc960 | |||
| f3d9dfb48e | |||
| 7f7942e207 | |||
| d584836399 | |||
| c7cbe941db | |||
| 113b32258d | |||
| 6e63530801 | |||
| 38c17fd7f3 | |||
| 63eb92b42a | |||
| 7475dba8b9 | |||
| 3ad00b4a9f | |||
| 2721008867 | |||
| c106146d30 | |||
| 78dab89db8 | |||
| af2dbc01d3 | |||
| 3ac5a1cc65 | |||
| fba3e41862 | |||
| 6a821583c0 | |||
| 86d89a47a0 | |||
| 1d84468762 | |||
| 8afdec052a | |||
| b77bdf2a79 | |||
| d61d54a811 | |||
| d430aecdbd | |||
| 29a0e53978 | |||
| 05e3139f80 | |||
| 8b9ca79d5b | |||
| 24e1f4fb26 | |||
| fa33037b86 | |||
| c2fb0a294f | |||
| abd15eb111 | |||
| 352f73e7c2 | |||
| 1d8da99d5b | |||
| 8df91fce7c | |||
| 1607a0d3f7 | |||
| 57ee5cf6c2 | |||
| 6eabf37097 | |||
| 5413eb5f35 | |||
| 4aaf49b6ea | |||
| d3f22f1e14 | |||
| 51323e769d | |||
| 1fb789862d | |||
| c2c417a9fd |
@ -10,5 +10,7 @@ RUN apt-get clean -y && \
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY . .
|
||||
COPY package.json .
|
||||
RUN npm install --production=false
|
||||
|
||||
COPY . .
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
#
|
||||
# Shared cleanup routines between different steps
|
||||
#
|
||||
# Please source .ci/functions/imports.sh as a whole not just this file
|
||||
# Please source .buildkite/functions/imports.sh as a whole not just this file
|
||||
#
|
||||
# Version 1.0.0
|
||||
# - Initial version after refactor
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
#
|
||||
# Exposes a routine scripts can call to wait for a container if that container set up a health command
|
||||
#
|
||||
# Please source .ci/functions/imports.sh as a whole not just this file
|
||||
# Please source .buildkite/functions/imports.sh as a whole not just this file
|
||||
#
|
||||
# Version 1.0.1
|
||||
# - Initial version after refactor
|
||||
|
||||
@ -74,14 +74,15 @@ async function release (args) {
|
||||
|
||||
async function bump (args) {
|
||||
assert(args.length === 1, 'Bump task expects one parameter')
|
||||
const [version] = args
|
||||
let [version] = args
|
||||
const packageJson = JSON.parse(await readFile(
|
||||
join(import.meta.url, '..', 'package.json'),
|
||||
'utf8'
|
||||
))
|
||||
|
||||
if (version.split('.').length === 2) version = `${version}.0`
|
||||
const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
|
||||
assert(semver.valid(cleanVersion))
|
||||
assert(semver.valid(cleanVersion), `${cleanVersion} is not seen as a valid semver version. raw version: ${version}`)
|
||||
packageJson.version = cleanVersion
|
||||
packageJson.versionCanary = `${cleanVersion}-canary.0`
|
||||
|
||||
@ -94,7 +95,7 @@ async function bump (args) {
|
||||
const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), 'utf8')
|
||||
await writeFile(
|
||||
join(import.meta.url, '..', '.buildkite', 'pipeline.yml'),
|
||||
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}-SNAPSHOT`),
|
||||
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}`),
|
||||
'utf8'
|
||||
)
|
||||
}
|
||||
@ -124,6 +125,13 @@ async function codegen (args) {
|
||||
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
|
||||
await $`npm run build`
|
||||
|
||||
// run docs example generation
|
||||
if (version === 'main') {
|
||||
await $`node ./scripts/generate-docs-examples.js`
|
||||
} else {
|
||||
await $`node ./scripts/generate-docs-examples.js ${version.split('.').slice(0, 2).join('.')}`
|
||||
}
|
||||
}
|
||||
|
||||
function onError (err) {
|
||||
@ -6,7 +6,7 @@ steps:
|
||||
env:
|
||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||
TEST_SUITE: "{{ matrix.suite }}"
|
||||
STACK_VERSION: 8.10.3-SNAPSHOT
|
||||
STACK_VERSION: 8.15.0
|
||||
matrix:
|
||||
setup:
|
||||
suite:
|
||||
@ -15,6 +15,7 @@ steps:
|
||||
nodejs:
|
||||
- "18"
|
||||
- "20"
|
||||
- "22"
|
||||
command: ./.buildkite/run-tests.sh
|
||||
artifact_paths: "./junit-output/junit-*.xml"
|
||||
- wait: ~
|
||||
@ -26,6 +27,6 @@ steps:
|
||||
plugins:
|
||||
- junit-annotate#v2.4.1:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
job-uuid-file-pattern: 'junit-(.*).xml'
|
||||
job-uuid-file-pattern: "junit-(.*).xml"
|
||||
fail-build-on-error: true
|
||||
failure-format: file
|
||||
|
||||
@ -9,7 +9,6 @@
|
||||
"\\.md$",
|
||||
"\\.asciidoc$",
|
||||
"^docs\\/",
|
||||
"^\\.ci\\/",
|
||||
"^scripts\\/",
|
||||
"^catalog-info\\.yaml$",
|
||||
"^test\\/unit\\/",
|
||||
|
||||
@ -3,3 +3,5 @@ npm-debug.log
|
||||
test/benchmarks
|
||||
elasticsearch
|
||||
.git
|
||||
lib
|
||||
junit-output
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/bug.md
vendored
3
.github/ISSUE_TEMPLATE/bug.md
vendored
@ -1,13 +1,14 @@
|
||||
---
|
||||
name: 🐛 Bug report
|
||||
about: Create a report to help us improve
|
||||
labels: ["Category: Bug"]
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `not reproducible` label.**
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
## 🐛 Bug Report
|
||||
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/feature.md
vendored
3
.github/ISSUE_TEMPLATE/feature.md
vendored
@ -1,13 +1,14 @@
|
||||
---
|
||||
name: 🚀 Feature Proposal
|
||||
about: Submit a proposal for a new feature
|
||||
labels: ["Category: Feature"]
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `invalid` label.**
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
## 🚀 Feature Proposal
|
||||
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/question.md
vendored
1
.github/ISSUE_TEMPLATE/question.md
vendored
@ -1,6 +1,7 @@
|
||||
---
|
||||
name: 💬 Questions / Help
|
||||
about: If you have questions, please check our Gitter or Help repo
|
||||
labels: ["Category: Question"]
|
||||
---
|
||||
|
||||
## 💬 Questions and Help
|
||||
|
||||
56
.github/ISSUE_TEMPLATE/regression.md
vendored
56
.github/ISSUE_TEMPLATE/regression.md
vendored
@ -1,56 +0,0 @@
|
||||
---
|
||||
name: 💥 Regression Report
|
||||
about: Report unexpected behavior that worked in previous versions
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `invalid` label.**
|
||||
|
||||
## 💥 Regression Report
|
||||
|
||||
A clear and concise description of what the regression is.
|
||||
|
||||
## Last working version
|
||||
|
||||
Worked up to version:
|
||||
|
||||
Stopped working in version:
|
||||
|
||||
## To Reproduce
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
Paste your code here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
<!--
|
||||
In some cases, it might be challenging to reproduce the bug in a few lines of code.
|
||||
You can fork the following repository, which contains all the configuration needed
|
||||
to spin up a three nodes Elasticsearch cluster with security enabled.
|
||||
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
|
||||
https://github.com/delvedor/es-reproduce-issue
|
||||
--->
|
||||
|
||||
## Expected behavior
|
||||
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
Paste the results here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
## Your Environment
|
||||
|
||||
- *node version*: 6,8,10
|
||||
- `@elastic/elasticsearch` *version*: >=7.0.0
|
||||
- *typescript version*: 4.x (if applicable)
|
||||
- *os*: Mac, Windows, Linux
|
||||
- *any other relevant information*
|
||||
92
.github/ISSUE_TEMPLATE/regression.yaml
vendored
Normal file
92
.github/ISSUE_TEMPLATE/regression.yaml
vendored
Normal file
@ -0,0 +1,92 @@
|
||||
---
|
||||
name: 💥 Regression Report
|
||||
description: Report unexpected behavior that worked in previous versions
|
||||
labels: ["Category: Bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
- type: textarea
|
||||
id: report
|
||||
attributes:
|
||||
label: Regression report
|
||||
description: A clear and concise description of what the regression is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: last-working-version
|
||||
attributes:
|
||||
label: Last working version
|
||||
description: Version of `@elastic/elasticsearch` where this last worked.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: to-reproduce
|
||||
attributes:
|
||||
label: To reproduce
|
||||
description: |
|
||||
Paste your code here that shows how to reproduce the behavior.
|
||||
|
||||
In some cases, it might be challenging to reproduce the bug in a few lines of code.
|
||||
You can fork the following repository, which contains all the configuration needed to spin up a three nodes Elasticsearch cluster with security enabled.
|
||||
[This repository](https://github.com/delvedor/es-reproduce-issue) also contains a preconfigured client instance that you can use to reproduce the issue.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected-behavior
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: node-version
|
||||
attributes:
|
||||
label: Node.js version
|
||||
description: What version of Node.js you are using (`node --version`).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: typescript-version
|
||||
attributes:
|
||||
label: TypeScript version
|
||||
description: TypeScript version you are using, if applicable.
|
||||
|
||||
- type: input
|
||||
id: elasticsearch-client-version
|
||||
attributes:
|
||||
label: Elasticsearch client version
|
||||
description: What version of `@elastic/elasticsearch` and `@elastic/transport` you are using (`npm ls -a | grep '@elastic'`).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: elasticsearch-version
|
||||
attributes:
|
||||
label: Elasticsearch server version
|
||||
description: What version of Elasticsearch you are using.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: operating-system
|
||||
attributes:
|
||||
label: Operating system
|
||||
description: What operating system you are running.
|
||||
placeholder: e.g. Linux, MacOS, Windows
|
||||
|
||||
- type: textarea
|
||||
id: env-info
|
||||
attributes:
|
||||
label: Any other relevant environment information.
|
||||
14
.ci/make.sh → .github/make.sh
vendored
14
.ci/make.sh → .github/make.sh
vendored
@ -3,7 +3,7 @@
|
||||
#
|
||||
# Build entry script for elasticsearch-js
|
||||
#
|
||||
# Must be called: ./.ci/make.sh <target> <params>
|
||||
# Must be called: ./.github/make.sh <target> <params>
|
||||
#
|
||||
# Version: 1.1.0
|
||||
#
|
||||
@ -34,8 +34,8 @@ STACK_VERSION=$VERSION
|
||||
set -euo pipefail
|
||||
|
||||
product="elastic/elasticsearch-js"
|
||||
output_folder=".ci/output"
|
||||
codegen_folder=".ci/output"
|
||||
output_folder=".buildkite/output"
|
||||
codegen_folder=".buildkite/output"
|
||||
OUTPUT_DIR="$repo/${output_folder}"
|
||||
NODE_JS_VERSION=18
|
||||
WORKFLOW=${WORKFLOW-staging}
|
||||
@ -131,7 +131,7 @@ esac
|
||||
echo -e "\033[34;1mINFO: building $product container\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--file .buildkite/Dockerfile-make \
|
||||
--tag "$product" \
|
||||
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
|
||||
--build-arg "BUILDER_UID=$(id -u)" \
|
||||
@ -156,7 +156,7 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
else
|
||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||
docker run \
|
||||
@ -171,7 +171,7 @@ else
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
@ -179,7 +179,7 @@ fi
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
if [[ "$CMD" == "assemble" ]]; then
|
||||
if compgen -G ".ci/output/*" > /dev/null; then
|
||||
if compgen -G ".buildkite/output/*" > /dev/null; then
|
||||
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
|
||||
18
.github/workflows/auto-merge.yml
vendored
Normal file
18
.github/workflows/auto-merge.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
name: Automerge
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
types:
|
||||
- submitted
|
||||
|
||||
jobs:
|
||||
automerge:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.review.state == 'approved'
|
||||
steps:
|
||||
- uses: reitermarkus/automerge@v2
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
merge-method: squash
|
||||
pull-request-author-associations: OWNER
|
||||
review-author-associations: OWNER,CONTRIBUTOR
|
||||
37
.github/workflows/nodejs.yml
vendored
37
.github/workflows/nodejs.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
name: Detect files changed
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
src-only: '${{ steps.changes.outputs.src-only }}'
|
||||
src-only: "${{ steps.changes.outputs.src-only }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter/@v2.11.1
|
||||
@ -17,7 +17,7 @@ jobs:
|
||||
with:
|
||||
filters: |
|
||||
src-only:
|
||||
- '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.ci,.buildkite,scripts}/**/*|catalog-info.yaml)'
|
||||
- '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.buildkite,scripts}/**/*|catalog-info.yaml)'
|
||||
- '.github/workflows/**'
|
||||
|
||||
test:
|
||||
@ -30,24 +30,17 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [18.x, 20.x]
|
||||
node-version: [18.x, 20.x, 22.x]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
# workaround for failing tests on Node.js 14.x
|
||||
# see https://github.com/actions/setup-node/issues/411
|
||||
- name: Force install specific npm version
|
||||
run: |
|
||||
npm install --global npm@8.3.1
|
||||
npm install --global npm@9.7.1
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
npm install
|
||||
@ -64,17 +57,13 @@ jobs:
|
||||
name: License check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
node-version: 22.x
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
@ -83,3 +72,13 @@ jobs:
|
||||
- name: License checker
|
||||
run: |
|
||||
npm run license-checker
|
||||
|
||||
auto-approve:
|
||||
name: Auto-approve
|
||||
needs: [test, license]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
if: github.actor == 'elasticmachine'
|
||||
steps:
|
||||
- uses: hmarr/auto-approve-action@v4
|
||||
|
||||
37
.github/workflows/npm-publish.yml
vendored
Normal file
37
.github/workflows/npm-publish.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
name: Publish Package to npm
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Git branch to build and publish"
|
||||
required: true
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- run: npm install -g npm
|
||||
- run: npm install
|
||||
- run: npm test
|
||||
- run: npm publish --provenance --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- run: |
|
||||
version=$(jq -r .version package.json)
|
||||
gh release create \
|
||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
|
||||
--target "$BRANCH_NAME" \
|
||||
-t "v$version" \
|
||||
"v$version"
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
43
.github/workflows/serverless-patch.sh
vendored
Executable file
43
.github/workflows/serverless-patch.sh
vendored
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -exuo pipefail
|
||||
|
||||
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
|
||||
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
|
||||
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
|
||||
|
||||
# generate patch file
|
||||
cd "$GITHUB_WORKSPACE/stack"
|
||||
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
|
||||
|
||||
# set committer info
|
||||
git config --global user.email "elasticmachine@users.noreply.github.com"
|
||||
git config --global user.name "Elastic Machine"
|
||||
|
||||
# apply patch file
|
||||
cd "$GITHUB_WORKSPACE/serverless"
|
||||
git am -C1 --reject /tmp/patch.diff || git am --quit
|
||||
|
||||
# generate PR body comment
|
||||
comment="Patch applied from $pr_shortcode"
|
||||
|
||||
# enumerate rejected patches in PR comment
|
||||
has_rejects='false'
|
||||
for f in ./**/*.rej; do
|
||||
has_rejects='true'
|
||||
comment="$comment
|
||||
|
||||
## Rejected patch \`$f\` must be resolved:
|
||||
|
||||
\`\`\`diff
|
||||
$(cat "$f")
|
||||
\`\`\`
|
||||
"
|
||||
done
|
||||
|
||||
# delete .rej files
|
||||
rm -fv ./**/*.rej
|
||||
|
||||
# send data to output parameters
|
||||
echo "$comment" > /tmp/pr_body
|
||||
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"
|
||||
51
.github/workflows/serverless-patch.yml
vendored
Normal file
51
.github/workflows/serverless-patch.yml
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
---
|
||||
name: Apply PR changes to serverless
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- closed
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
apply-patch:
|
||||
name: Apply patch
|
||||
runs-on: ubuntu-latest
|
||||
# Only react to merged PRs for security reasons.
|
||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||
if: >
|
||||
github.event.pull_request.merged
|
||||
&& (
|
||||
(
|
||||
github.event.action == 'closed'
|
||||
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
|
||||
)
|
||||
||
|
||||
(
|
||||
github.event.action == 'labeled'
|
||||
&& github.event.label.name == 'apply-to-serverless'
|
||||
)
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: elastic/elasticsearch-js
|
||||
ref: main
|
||||
path: stack
|
||||
fetch-depth: 0
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: elastic/elasticsearch-serverless-js
|
||||
ref: main
|
||||
path: serverless
|
||||
- name: Apply patch from stack to serverless
|
||||
id: apply-patch
|
||||
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: serverless
|
||||
title: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
||||
commit-message: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
||||
body-path: /tmp/pr_body
|
||||
draft: '${{ steps.apply-patch.outputs.PR_DRAFT }}'
|
||||
add-paths: ':!*.rej'
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -63,3 +63,4 @@ test/bundlers/**/bundle.js
|
||||
test/bundlers/parcel-test/.parcel-cache
|
||||
|
||||
lib
|
||||
junit-output
|
||||
|
||||
@ -64,7 +64,6 @@ test
|
||||
scripts
|
||||
|
||||
# ci configuration
|
||||
.ci
|
||||
.travis.yml
|
||||
.buildkite
|
||||
certs
|
||||
|
||||
11
Makefile
11
Makefile
@ -1,11 +0,0 @@
|
||||
.PHONY: integration-setup
|
||||
integration-setup: integration-cleanup
|
||||
DETACH=true .ci/run-elasticsearch.sh
|
||||
|
||||
.PHONY: integration-cleanup
|
||||
integration-cleanup:
|
||||
docker container rm --force --volumes instance || true
|
||||
|
||||
.PHONY: integration
|
||||
integration: integration-setup
|
||||
npm run test:integration
|
||||
19
README.md
19
README.md
@ -21,6 +21,25 @@ of the getting started documentation.
|
||||
Refer to the [Connecting section](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_connecting)
|
||||
of the getting started documentation.
|
||||
|
||||
## Compatibility
|
||||
|
||||
The Elasticsearch client is compatible with currently maintained JS versions.
|
||||
|
||||
Language clients are forward compatible; meaning that clients support
|
||||
communicating with greater or equal minor versions of Elasticsearch without
|
||||
breaking. It does not mean that the client automatically supports new features
|
||||
of newer Elasticsearch versions; it is only possible after a release of a new
|
||||
client version. For example, a 8.12 client version won't automatically support
|
||||
the new features of the 8.13 version of Elasticsearch, the 8.13 client version
|
||||
is required for that. Elasticsearch language clients are only backwards
|
||||
compatible with default distributions and without guarantees made.
|
||||
|
||||
| Elasticsearch Version | Elasticsearch-JS Branch | Supported |
|
||||
| --------------------- | ------------------------ | --------- |
|
||||
| main | main | |
|
||||
| 8.x | 8.x | 8.x |
|
||||
| 7.x | 7.x | 7.17 |
|
||||
|
||||
## Usage
|
||||
|
||||
* [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index)
|
||||
|
||||
@ -6,7 +6,7 @@ metadata:
|
||||
name: elasticsearch-js
|
||||
spec:
|
||||
type: library
|
||||
owner: group:clients-team
|
||||
owner: group:devtools-team
|
||||
lifecycle: production
|
||||
|
||||
---
|
||||
@ -18,7 +18,7 @@ metadata:
|
||||
description: elasticsearch-js - integration tests
|
||||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:clients-team
|
||||
owner: group:devtools-team
|
||||
system: buildkite
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
@ -29,7 +29,7 @@ spec:
|
||||
repository: elastic/elasticsearch-js
|
||||
pipeline_file: .buildkite/pipeline.yml
|
||||
teams:
|
||||
clients-team:
|
||||
devtools-team:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
everyone:
|
||||
access_level: READ_ONLY
|
||||
@ -37,14 +37,11 @@ spec:
|
||||
build_pull_requests: false
|
||||
build_branches: false
|
||||
cancel_intermediate_builds: true
|
||||
cancel_intermediate_builds_branch_filter: '!main'
|
||||
cancel_intermediate_builds_branch_filter: "!main"
|
||||
schedules:
|
||||
main_semi_daily:
|
||||
branch: 'main'
|
||||
cronline: '0 */12 * * *'
|
||||
8_9_semi_daily:
|
||||
branch: '8.9'
|
||||
cronline: '0 */12 * * *'
|
||||
8_8_daily:
|
||||
branch: '8.8'
|
||||
cronline: '@daily'
|
||||
main:
|
||||
branch: "main"
|
||||
cronline: "@daily"
|
||||
8_14:
|
||||
branch: "8.14"
|
||||
cronline: "@daily"
|
||||
|
||||
@ -91,6 +91,95 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[redaction]]
|
||||
==== Redaction of potentially sensitive data
|
||||
|
||||
When the client raises an `Error` that originated at the HTTP layer, like a `ConnectionError` or `TimeoutError`, a `meta` object is often attached to the error object that includes metadata useful for debugging, like request and response information. Because this can include potentially sensitive data, like authentication secrets in an `Authorization` header, the client takes measures to redact common sources of sensitive data when this metadata is attached and serialized.
|
||||
|
||||
If your configuration requires extra headers or other configurations that may include sensitive data, you may want to adjust these settings to account for that.
|
||||
|
||||
By default, the `redaction` option is set to `{ type: 'replace' }`, which recursively searches for sensitive key names, case insensitive, and replaces their values with the string `[redacted]`.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
})
|
||||
|
||||
try {
|
||||
await client.indices.create({ index: 'my_index' })
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers.authorization) // prints "[redacted]"
|
||||
}
|
||||
----
|
||||
|
||||
If you would like to redact additional properties, you can include additional key names to search and replace:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
headers: { 'X-My-Secret-Password': 'shhh it's a secret!' },
|
||||
redaction: {
|
||||
type: "replace",
|
||||
additionalKeys: ["x-my-secret-password"]
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
await client.indices.create({ index: 'my_index' })
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers['X-My-Secret-Password']) // prints "[redacted]"
|
||||
}
|
||||
----
|
||||
|
||||
Alternatively, if you know you're not going to use the metadata at all, setting the redaction type to `remove` will remove all optional sources of potentially sensitive data entirely, or replacing them with `null` for required properties.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
redaction: { type: "remove" }
|
||||
})
|
||||
|
||||
try {
|
||||
await client.indices.create({ index: 'my_index' })
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers) // undefined
|
||||
}
|
||||
----
|
||||
|
||||
Finally, if you prefer to turn off redaction altogether, perhaps while debugging on a local developer environment, you can set the redaction type to `off`. This will revert the client to pre-8.11.0 behavior, where basic redaction is only performed during common serialization methods like `console.log` and `JSON.stringify`.
|
||||
|
||||
WARNING: Setting `redaction.type` to `off` is not recommended in production environments.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
redaction: { type: "off" }
|
||||
})
|
||||
|
||||
try {
|
||||
await client.indices.create({ index: 'my_index' })
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers.authorization) // the actual header value will be logged
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Migrate to v8
|
||||
|
||||
|
||||
@ -260,11 +260,11 @@ _Default:_ `false`
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
|
||||
@ -1,11 +1,200 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.15.1
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.15.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== OpenTelemetry zero-code instrumentation support
|
||||
|
||||
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
|
||||
See {jsclient}/observability.html#_opentelemetry[the docs]
|
||||
for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.1
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `8.14`
|
||||
|
||||
Updated types based on fixes and changes to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.14.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== ES|QL object API helper
|
||||
|
||||
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
|
||||
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
|
||||
|
||||
[discrete]
|
||||
===== `onSuccess` callback added to bulk helper
|
||||
|
||||
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
|
||||
|
||||
[discrete]
|
||||
===== Request retries are more polite
|
||||
|
||||
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== 8.13.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Pin @elastic/transport to `~8.4.1`
|
||||
|
||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
|
||||
|
||||
[discrete]
|
||||
=== 8.13.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.13.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
|
||||
|
||||
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.4.1`
|
||||
|
||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.12 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Upgrade transport to 8.4.1 https://github.com/elastic/elasticsearch-js/pull/2137[#2137]
|
||||
|
||||
Upgrades `@elastic/transport` to 8.4.1 to resolve https://github.com/elastic/elastic-transport-js/pull/83[a bug] where arrays in error diagnostics were unintentionally transformed into objects.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix hang in bulk helper semaphore https://github.com/elastic/elasticsearch-js/pull/2027[#2027]
|
||||
|
||||
The failing state could be reached when a server's response times are slower than flushInterval.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.0
|
||||
|
||||
[discrete]
|
||||
=== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.12.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.11.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.4.0`
|
||||
|
||||
Switching from `^8.4.0` to `~8.4.0` ensures 8.11 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.11.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.11.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.11/release-notes-8.11.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Enhanced support for redacting potentially sensitive data https://github.com/elastic/elasticsearch-js/pull/2095[#2095]
|
||||
|
||||
`@elastic/transport` https://github.com/elastic/elastic-transport-js/releases/tag/v8.4.0[version 8.4.0] introduces enhanced measures for ensuring that request metadata attached to some `Error` objects is redacted. This functionality is primarily to address custom logging solutions that don't use common serialization methods like `JSON.stringify`, `console.log`, or `util.inspect`, which were already accounted for.
|
||||
|
||||
See <<redaction>> for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.10.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.10.0
|
||||
|
||||
[discrete]
|
||||
=== Features
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.10.0`
|
||||
@ -13,6 +202,17 @@
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.9.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.9 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.9.1
|
||||
|
||||
@ -39,7 +239,7 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.
|
||||
[discrete]
|
||||
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
|
||||
|
||||
In the https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
|
||||
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
@ -49,6 +249,17 @@ In the https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/curre
|
||||
|
||||
The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.2`
|
||||
|
||||
Switching from `^8.3.2` to `~8.3.2` ensures 8.8 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.1
|
||||
|
||||
@ -94,6 +305,17 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.0.
|
||||
|
||||
Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.0
|
||||
|
||||
@ -103,6 +325,17 @@ Prior releases contained a bug where type declarations for legacy types that inc
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.6.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.6.0
|
||||
|
||||
@ -287,7 +520,7 @@ client.search({ params }, { options }, (err, result) => {
|
||||
client.search({ params }, { options })
|
||||
.then(console.log)
|
||||
.catch(console.log)
|
||||
|
||||
|
||||
// async-style (sugar syntax on top of promises)
|
||||
const response = await client.search({ params }, { options })
|
||||
console.log(response)
|
||||
@ -342,6 +575,9 @@ The client API leaks HTTP-related notions in many places, and removing them woul
|
||||
|
||||
This could be a rather big breaking change, so a double solution could be used during the 8.x lifecycle. (accepting body keys without them being wrapped in the body as well as the current solution).
|
||||
|
||||
To convert code from 7.x, you need to remove the `body` parameter in all the endpoints request.
|
||||
For instance, this is an example for the `search` endpoint:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
@ -380,6 +616,12 @@ If you weren't extending the internals of the client, this won't be a breaking c
|
||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
||||
The client will expose a new request-specific option to still get the full response details.
|
||||
|
||||
The new behaviour returns the `body` value directly as response.
|
||||
If you want to have the 7.x response format, you need to add `meta : true` in the request.
|
||||
This will return all the HTTP meta information, including the `body`.
|
||||
|
||||
For instance, this is an example for the `search` endpoint:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
@ -438,7 +680,7 @@ If you weren't extending the internals of the client, this won't be a breaking c
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Medium*
|
||||
|
||||
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
|
||||
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
|
||||
This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense.
|
||||
If you are already using `snake_case` keys, this won't be a breaking change for you.
|
||||
|
||||
|
||||
@ -12,6 +12,7 @@ This page contains the information you need to connect and use the Client with
|
||||
* <<client-connect-proxy, Connecting through a proxy>>
|
||||
* <<client-error-handling, Handling errors>>
|
||||
* <<keep-alive, Keep-alive connections>>
|
||||
* <<close-connections, Closing a client's connections>>
|
||||
* <<product-check, Automatic product check>>
|
||||
|
||||
[[authentication]]
|
||||
@ -691,6 +692,20 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[close-connections]]
|
||||
=== Closing a client's connections
|
||||
|
||||
If you would like to close all open connections being managed by an instance of the client, use the `close()` function:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
});
|
||||
client.close();
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[product-check]]
|
||||
=== Automatic product check
|
||||
|
||||
10
docs/doc_examples/00272f75a6afea91f8554ef7cda0c1f2.asciidoc
Normal file
10
docs/doc_examples/00272f75a6afea91f8554ef7cda0c1f2.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.clearCachedRealms({
|
||||
realms: "default_file,ldap1",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/004743b9c9f61588926ccf734696b713.asciidoc
Normal file
11
docs/doc_examples/004743b9c9f61588926ccf734696b713.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.forcemerge({
|
||||
index: ".ds-my-data-stream-2099.03.07-000001",
|
||||
max_num_segments: 1,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,20 +4,16 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: 'bank',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
query: {
|
||||
pinned: {
|
||||
ids: ["1", "4", "100"],
|
||||
organic: {
|
||||
match: {
|
||||
description: "iphone",
|
||||
},
|
||||
},
|
||||
},
|
||||
sort: [
|
||||
{
|
||||
account_number: 'asc'
|
||||
}
|
||||
],
|
||||
from: 10,
|
||||
size: 10
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
20
docs/doc_examples/006e0e16c9f1da58c0bfe57377f7fc38.asciidoc
Normal file
20
docs/doc_examples/006e0e16c9f1da58c0bfe57377f7fc38.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
analysis: {
|
||||
analyzer: {
|
||||
my_analyzer: {
|
||||
tokenizer: "whitespace",
|
||||
filter: ["stemmer"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
40
docs/doc_examples/007179b5e241da650562a5f0a5007823.asciidoc
Normal file
40
docs/doc_examples/007179b5e241da650562a5f0a5007823.asciidoc
Normal file
@ -0,0 +1,40 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.watcher.putWatch({
|
||||
id: "cluster_health_watch",
|
||||
trigger: {
|
||||
schedule: {
|
||||
interval: "10s",
|
||||
},
|
||||
},
|
||||
input: {
|
||||
http: {
|
||||
request: {
|
||||
host: "localhost",
|
||||
port: 9200,
|
||||
path: "/_cluster/health",
|
||||
},
|
||||
},
|
||||
},
|
||||
condition: {
|
||||
compare: {
|
||||
"ctx.payload.status": {
|
||||
eq: "red",
|
||||
},
|
||||
},
|
||||
},
|
||||
actions: {
|
||||
send_email: {
|
||||
email: {
|
||||
to: "username@example.org",
|
||||
subject: "Cluster Status Warning",
|
||||
body: "Cluster status is RED",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.migration.postFeatureUpgrade();
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,18 +4,13 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: 'bank',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
index: "my-index",
|
||||
query: {
|
||||
match: {
|
||||
"http.clientip": "40.135.0.0",
|
||||
},
|
||||
sort: [
|
||||
{
|
||||
account_number: 'asc'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
},
|
||||
fields: ["http.clientip"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.shardStores();
|
||||
console.log(response);
|
||||
----
|
||||
15
docs/doc_examples/00c05aa931fc985985e3e21c93cf43ff.asciidoc
Normal file
15
docs/doc_examples/00c05aa931fc985985e3e21c93cf43ff.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.renderSearchTemplate({
|
||||
source: '{ "query": {{#toJson}}my_query{{/toJson}} }',
|
||||
params: {
|
||||
my_query: {
|
||||
match_all: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/00d65f7b9daa1c6b18eedd8ace206bae.asciidoc
Normal file
12
docs/doc_examples/00d65f7b9daa1c6b18eedd8ace206bae.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.analyze({
|
||||
tokenizer: "standard",
|
||||
filter: ["asciifolding"],
|
||||
text: "açaí à la carte",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
67
docs/doc_examples/00e0c964c79fcc1876ab957da2ffce82.asciidoc
Normal file
67
docs/doc_examples/00e0c964c79fcc1876ab957da2ffce82.asciidoc
Normal file
@ -0,0 +1,67 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "italian_example",
|
||||
settings: {
|
||||
analysis: {
|
||||
filter: {
|
||||
italian_elision: {
|
||||
type: "elision",
|
||||
articles: [
|
||||
"c",
|
||||
"l",
|
||||
"all",
|
||||
"dall",
|
||||
"dell",
|
||||
"nell",
|
||||
"sull",
|
||||
"coll",
|
||||
"pell",
|
||||
"gl",
|
||||
"agl",
|
||||
"dagl",
|
||||
"degl",
|
||||
"negl",
|
||||
"sugl",
|
||||
"un",
|
||||
"m",
|
||||
"t",
|
||||
"s",
|
||||
"v",
|
||||
"d",
|
||||
],
|
||||
articles_case: true,
|
||||
},
|
||||
italian_stop: {
|
||||
type: "stop",
|
||||
stopwords: "_italian_",
|
||||
},
|
||||
italian_keywords: {
|
||||
type: "keyword_marker",
|
||||
keywords: ["esempio"],
|
||||
},
|
||||
italian_stemmer: {
|
||||
type: "stemmer",
|
||||
language: "light_italian",
|
||||
},
|
||||
},
|
||||
analyzer: {
|
||||
rebuilt_italian: {
|
||||
tokenizer: "standard",
|
||||
filter: [
|
||||
"italian_elision",
|
||||
"lowercase",
|
||||
"italian_stop",
|
||||
"italian_keywords",
|
||||
"italian_stemmer",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc
Normal file
19
docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "my-e5-model",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service_settings: {
|
||||
num_allocations: 1,
|
||||
num_threads: 1,
|
||||
model_id: ".multilingual-e5-small",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
13
docs/doc_examples/010d5e901a2690fa7b2396edbe6cd463.asciidoc
Normal file
13
docs/doc_examples/010d5e901a2690fa7b2396edbe6cd463.asciidoc
Normal file
@ -0,0 +1,13 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putIndexTemplate({
|
||||
name: "my-data-stream-template",
|
||||
index_patterns: ["my-data-stream*"],
|
||||
data_stream: {},
|
||||
priority: 500,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
24
docs/doc_examples/0163af36c8472ac0c5160c8b716f5b26.asciidoc
Normal file
24
docs/doc_examples/0163af36c8472ac0c5160c8b716f5b26.asciidoc
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "sales",
|
||||
size: 0,
|
||||
filter_path: "aggregations",
|
||||
query: {
|
||||
term: {
|
||||
type: "t-shirt",
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
avg_price: {
|
||||
avg: {
|
||||
field: "price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,17 +4,14 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.updateAliases({
|
||||
body: {
|
||||
actions: [
|
||||
{
|
||||
add: {
|
||||
index: 'test1',
|
||||
alias: 'alias1'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
actions: [
|
||||
{
|
||||
add: {
|
||||
index: "logs-*",
|
||||
alias: "logs",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
26
docs/doc_examples/019e329ed5a930aef825266822e7377a.asciidoc
Normal file
26
docs/doc_examples/019e329ed5a930aef825266822e7377a.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "asciifold_example",
|
||||
settings: {
|
||||
analysis: {
|
||||
analyzer: {
|
||||
standard_asciifolding: {
|
||||
tokenizer: "standard",
|
||||
filter: ["my_ascii_folding"],
|
||||
},
|
||||
},
|
||||
filter: {
|
||||
my_ascii_folding: {
|
||||
type: "asciifolding",
|
||||
preserve_original: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
37
docs/doc_examples/01b23f09d2b7f140faf649eadbbf3ac3.asciidoc
Normal file
37
docs/doc_examples/01b23f09d2b7f140faf649eadbbf3ac3.asciidoc
Normal file
@ -0,0 +1,37 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.putComponentTemplate({
|
||||
name: "component_template1",
|
||||
template: {
|
||||
mappings: {
|
||||
properties: {
|
||||
"@timestamp": {
|
||||
type: "date",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.cluster.putComponentTemplate({
|
||||
name: "runtime_component_template",
|
||||
template: {
|
||||
mappings: {
|
||||
runtime: {
|
||||
day_of_week: {
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
10
docs/doc_examples/01bc0f2ed30eb3dd23511d01ce0ac6e1.asciidoc
Normal file
10
docs/doc_examples/01bc0f2ed30eb3dd23511d01ce0ac6e1.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transform.startTransform({
|
||||
transform_id: "ecommerce_transform",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
actions: "indices:data/write/bulk",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,18 +4,14 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
query: {
|
||||
index: "my-index-000001",
|
||||
aggs: {
|
||||
"my-agg-name": {
|
||||
terms: {
|
||||
user: [
|
||||
'kimchy',
|
||||
'elasticsearch'
|
||||
],
|
||||
boost: 1
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
field: "my-field",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
10
docs/doc_examples/01dc7bdc223bd651574ed2d3954a5b1c.asciidoc
Normal file
10
docs/doc_examples/01dc7bdc223bd651574ed2d3954a5b1c.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.watcher.executeWatch({
|
||||
id: "my_watch",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
39
docs/doc_examples/01f50acf7998b24969f451e922d145eb.asciidoc
Normal file
39
docs/doc_examples/01f50acf7998b24969f451e922d145eb.asciidoc
Normal file
@ -0,0 +1,39 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "basque_example",
|
||||
settings: {
|
||||
analysis: {
|
||||
filter: {
|
||||
basque_stop: {
|
||||
type: "stop",
|
||||
stopwords: "_basque_",
|
||||
},
|
||||
basque_keywords: {
|
||||
type: "keyword_marker",
|
||||
keywords: ["Adibidez"],
|
||||
},
|
||||
basque_stemmer: {
|
||||
type: "stemmer",
|
||||
language: "basque",
|
||||
},
|
||||
},
|
||||
analyzer: {
|
||||
rebuilt_basque: {
|
||||
tokenizer: "standard",
|
||||
filter: [
|
||||
"lowercase",
|
||||
"basque_stop",
|
||||
"basque_keywords",
|
||||
"basque_stemmer",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/020c95db88ef356093f03be84893ddf9.asciidoc
Normal file
10
docs/doc_examples/020c95db88ef356093f03be84893ddf9.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ccr.followStats({
|
||||
index: "<index>",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/020de6b6cb960a76297452725a38889f.asciidoc
Normal file
20
docs/doc_examples/020de6b6cb960a76297452725a38889f.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
query: {
|
||||
has_child: {
|
||||
type: "child",
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
max_children: 10,
|
||||
min_children: 2,
|
||||
score_mode: "min",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/0246f73cc2ed3dfec577119e8cd15404.asciidoc
Normal file
19
docs/doc_examples/0246f73cc2ed3dfec577119e8cd15404.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putMapping({
|
||||
index: "my-index-000001",
|
||||
properties: {
|
||||
name: {
|
||||
properties: {
|
||||
last: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
28
docs/doc_examples/025155da86802ebf4c3aeee5aab692f9.asciidoc
Normal file
28
docs/doc_examples/025155da86802ebf4c3aeee5aab692f9.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "sales",
|
||||
mappings: {
|
||||
properties: {
|
||||
tags: {
|
||||
type: "keyword",
|
||||
},
|
||||
comments: {
|
||||
type: "nested",
|
||||
properties: {
|
||||
username: {
|
||||
type: "keyword",
|
||||
},
|
||||
comment: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/02520ac7816b2c4cf8fb413fd16122f2.asciidoc
Normal file
11
docs/doc_examples/02520ac7816b2c4cf8fb413fd16122f2.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ml.flushJob({
|
||||
job_id: "low_request_rate",
|
||||
calc_interim: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
23
docs/doc_examples/0264e994a7e68561e2ca6be0f0d90ee9.asciidoc
Normal file
23
docs/doc_examples/0264e994a7e68561e2ca6be0f0d90ee9.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
aggs: {
|
||||
JapaneseCars: {
|
||||
terms: {
|
||||
field: "make",
|
||||
include: ["mazda", "honda"],
|
||||
},
|
||||
},
|
||||
ActiveCarManufacturers: {
|
||||
terms: {
|
||||
field: "make",
|
||||
exclude: ["rover", "jensen"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/0280247e0cf2e561c548f22c9fb31163.asciidoc
Normal file
10
docs/doc_examples/0280247e0cf2e561c548f22c9fb31163.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.invalidateToken({
|
||||
username: "myuser",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
28
docs/doc_examples/02853293a5b7cd9cc7a886eb413bbeb6.asciidoc
Normal file
28
docs/doc_examples/02853293a5b7cd9cc7a886eb413bbeb6.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.analyze({
|
||||
tokenizer: "keyword",
|
||||
char_filter: [
|
||||
{
|
||||
type: "mapping",
|
||||
mappings: [
|
||||
"٠ => 0",
|
||||
"١ => 1",
|
||||
"٢ => 2",
|
||||
"٣ => 3",
|
||||
"٤ => 4",
|
||||
"٥ => 5",
|
||||
"٦ => 6",
|
||||
"٧ => 7",
|
||||
"٨ => 8",
|
||||
"٩ => 9",
|
||||
],
|
||||
},
|
||||
],
|
||||
text: "My license plate is ٢٥٠١٥",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,43 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
aggs: {
|
||||
countries: {
|
||||
terms: {
|
||||
field: 'artist.country',
|
||||
order: [
|
||||
{
|
||||
'rock>playback_stats.avg': 'desc'
|
||||
},
|
||||
{
|
||||
_count: 'desc'
|
||||
}
|
||||
]
|
||||
},
|
||||
aggs: {
|
||||
rock: {
|
||||
filter: {
|
||||
term: {
|
||||
genre: 'rock'
|
||||
}
|
||||
},
|
||||
aggs: {
|
||||
playback_stats: {
|
||||
stats: {
|
||||
field: 'play_count'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
21
docs/doc_examples/029de2f5383a42e1ac4ca1565bd2a130.asciidoc
Normal file
21
docs/doc_examples/029de2f5383a42e1ac4ca1565bd2a130.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
mappings: {
|
||||
properties: {
|
||||
full_name: {
|
||||
type: "text",
|
||||
index_prefixes: {
|
||||
min_chars: 1,
|
||||
max_chars: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
16
docs/doc_examples/02b00f21e9d23d82276ace0dd154d779.asciidoc
Normal file
16
docs/doc_examples/02b00f21e9d23d82276ace0dd154d779.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
routing: "user1,user2",
|
||||
query: {
|
||||
match: {
|
||||
title: "document",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,9 +4,7 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: '*',
|
||||
q: 'user:kimchy'
|
||||
})
|
||||
console.log(response)
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
24
docs/doc_examples/02c48d461536709c3fc8a0e8147c3787.asciidoc
Normal file
24
docs/doc_examples/02c48d461536709c3fc8a0e8147c3787.asciidoc
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "pipelineB",
|
||||
description: "outer pipeline",
|
||||
processors: [
|
||||
{
|
||||
pipeline: {
|
||||
name: "pipelineA",
|
||||
},
|
||||
},
|
||||
{
|
||||
set: {
|
||||
field: "outer_pipeline_set",
|
||||
value: "outer",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/02f65c6bab8f40bf3ce18160623d1870.asciidoc
Normal file
10
docs/doc_examples/02f65c6bab8f40bf3ce18160623d1870.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getTemplate({
|
||||
name: "template_1",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
38
docs/doc_examples/02fad6b80bb29c2a7e6840db2fc67b18.asciidoc
Normal file
38
docs/doc_examples/02fad6b80bb29c2a7e6840db2fc67b18.asciidoc
Normal file
@ -0,0 +1,38 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_wildcard: {
|
||||
type: "wildcard",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: 1,
|
||||
document: {
|
||||
my_wildcard: "This string can be quite lengthy",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
wildcard: {
|
||||
my_wildcard: {
|
||||
value: "*quite*lengthy",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
@ -4,10 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.get({
|
||||
index: 'twitter',
|
||||
id: '2',
|
||||
routing: 'user1'
|
||||
})
|
||||
console.log(response)
|
||||
index: "my-index-000001",
|
||||
id: 0,
|
||||
_source: "*.id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
10
docs/doc_examples/032eac56b798bea29390e102538f4a26.asciidoc
Normal file
10
docs/doc_examples/032eac56b798bea29390e102538f4a26.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.refresh({
|
||||
index: "my-index-000001,my-index-000002",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,22 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
aggs: {
|
||||
genres: {
|
||||
terms: {
|
||||
script: {
|
||||
source: "doc['genre'].value",
|
||||
lang: 'painless'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
26
docs/doc_examples/033838729cfb5d1a28d04f69ee78d924.asciidoc
Normal file
26
docs/doc_examples/033838729cfb5d1a28d04f69ee78d924.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "example",
|
||||
document: {
|
||||
location: {
|
||||
type: "Polygon",
|
||||
orientation: "LEFT",
|
||||
coordinates: [
|
||||
[
|
||||
[-177, 10],
|
||||
[176, 15],
|
||||
[172, 0],
|
||||
[176, -15],
|
||||
[-177, -10],
|
||||
[-177, 10],
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
36
docs/doc_examples/0350410d11579f4e876c798ce1eaef5b.asciidoc
Normal file
36
docs/doc_examples/0350410d11579f4e876c798ce1eaef5b.asciidoc
Normal file
@ -0,0 +1,36 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: 5,
|
||||
refresh: "true",
|
||||
document: {
|
||||
query: {
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match: {
|
||||
message: {
|
||||
query: "Japanese art",
|
||||
_name: "query1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
message: {
|
||||
query: "Holand culture",
|
||||
_name: "query2",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
38
docs/doc_examples/0350ff5ebb8207c004eb771088339cb4.asciidoc
Normal file
38
docs/doc_examples/0350ff5ebb8207c004eb771088339cb4.asciidoc
Normal file
@ -0,0 +1,38 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "example-index",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
text: "blue shoes sale",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "ml.tokens",
|
||||
inference_id: "my_elser_model",
|
||||
query: "What blue shoes are on sale?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 50,
|
||||
rank_constant: 20,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
29
docs/doc_examples/03582fc93683e573062bcfda45e01d69.asciidoc
Normal file
29
docs/doc_examples/03582fc93683e573062bcfda45e01d69.asciidoc
Normal file
@ -0,0 +1,29 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
analysis: {
|
||||
analyzer: {
|
||||
my_custom_analyzer: {
|
||||
type: "custom",
|
||||
tokenizer: "standard",
|
||||
char_filter: ["html_strip"],
|
||||
filter: ["lowercase", "asciifolding"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.indices.analyze({
|
||||
index: "my-index-000001",
|
||||
analyzer: "my_custom_analyzer",
|
||||
text: "Is this déjà vu</b>?",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
11
docs/doc_examples/035a7a919eb6513b4769a3727b7d6447.asciidoc
Normal file
11
docs/doc_examples/035a7a919eb6513b4769a3727b7d6447.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.analyze({
|
||||
analyzer: "whitespace",
|
||||
text: "The quick brown fox.",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.getServiceAccounts();
|
||||
console.log(response);
|
||||
----
|
||||
14
docs/doc_examples/03b1d76fa0b773d5b7d74ecb7e1e1a80.asciidoc
Normal file
14
docs/doc_examples/03b1d76fa0b773d5b7d74ecb7e1e1a80.asciidoc
Normal file
@ -0,0 +1,14 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.snapshot.restore({
|
||||
repository: "my_repository",
|
||||
snapshot: "my_snapshot_2099.05.06",
|
||||
indices: "my-index,logs-my_app-default",
|
||||
rename_pattern: "(.+)",
|
||||
rename_replacement: "restored-$1",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/03c4b815bf1e6a8c5cfcc6ddf94bc093.asciidoc
Normal file
11
docs/doc_examples/03c4b815bf1e6a8c5cfcc6ddf94bc093.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.sql.query({
|
||||
format: "txt",
|
||||
query: "SELECT * FROM library ORDER BY page_count DESC LIMIT 5",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc
Normal file
12
docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.connector.updateApiKeyId({
|
||||
connector_id: "my-connector",
|
||||
api_key_id: "my-api-key-id",
|
||||
api_key_secret_id: "my-connector-secret-id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/044b2f99e7438e408685b258db17f863.asciidoc
Normal file
12
docs/doc_examples/044b2f99e7438e408685b258db17f863.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.eql.search({
|
||||
index: "my-data-stream",
|
||||
query: '\n process where process.name == "regsvr32.exe"\n ',
|
||||
size: 50,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
52
docs/doc_examples/046b2249bbc49e77848c114cee940f17.asciidoc
Normal file
52
docs/doc_examples/046b2249bbc49e77848c114cee940f17.asciidoc
Normal file
@ -0,0 +1,52 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "How is the weather in Jamaica?",
|
||||
fields: ["title", "description"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
text_expansion: {
|
||||
"ml.inference.title_expanded.predicted_value": {
|
||||
model_id: ".elser_model_2",
|
||||
model_text: "How is the weather in Jamaica?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
text_expansion: {
|
||||
"ml.inference.description_expanded.predicted_value": {
|
||||
model_id: ".elser_model_2",
|
||||
model_text: "How is the weather in Jamaica?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
window_size: 10,
|
||||
rank_constant: 20,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
27
docs/doc_examples/0470d7101637568b9d3d1239f06325a7.asciidoc
Normal file
27
docs/doc_examples/0470d7101637568b9d3d1239f06325a7.asciidoc
Normal file
@ -0,0 +1,27 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_internal/desired_nodes/<history_id>/<version>",
|
||||
body: {
|
||||
nodes: [
|
||||
{
|
||||
settings: {
|
||||
"node.name": "instance-000187",
|
||||
"node.external_id": "instance-000187",
|
||||
"node.roles": ["data_hot", "master"],
|
||||
"node.attr.data": "hot",
|
||||
"node.attr.logical_availability_zone": "zone-0",
|
||||
},
|
||||
processors: 8,
|
||||
memory: "58gb",
|
||||
storage: "2tb",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,20 +4,14 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: 'Will Smith',
|
||||
type: 'cross_fields',
|
||||
fields: [
|
||||
'first_name',
|
||||
'last_name'
|
||||
],
|
||||
operator: 'and'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "Will Smith",
|
||||
type: "cross_fields",
|
||||
fields: ["first_name", "last_name"],
|
||||
operator: "and",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
|
||||
10
docs/doc_examples/048652b6abfe195da8ea8cef10ee01b1.asciidoc
Normal file
10
docs/doc_examples/048652b6abfe195da8ea8cef10ee01b1.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transform.resetTransform({
|
||||
transform_id: "ecommerce_transform",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,17 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.create({
|
||||
index: 'twitter',
|
||||
id: '1',
|
||||
body: {
|
||||
user: 'kimchy',
|
||||
post_date: '2009-11-15T14:12:12',
|
||||
message: 'trying out Elasticsearch'
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
31
docs/doc_examples/04d586a536061ec1045d0bb2dc3d1a5f.asciidoc
Normal file
31
docs/doc_examples/04d586a536061ec1045d0bb2dc3d1a5f.asciidoc
Normal file
@ -0,0 +1,31 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "set_os",
|
||||
description: "sets the value of host.os.name from the field os",
|
||||
processors: [
|
||||
{
|
||||
set: {
|
||||
field: "host.os.name",
|
||||
value: "{{{os}}}",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.ingest.simulate({
|
||||
id: "set_os",
|
||||
docs: [
|
||||
{
|
||||
_source: {
|
||||
os: "Ubuntu",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
27
docs/doc_examples/04d6ce0c903bd468afbecd3aa1c4a78a.asciidoc
Normal file
27
docs/doc_examples/04d6ce0c903bd468afbecd3aa1c4a78a.asciidoc
Normal file
@ -0,0 +1,27 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "my-pipeline-id",
|
||||
description: "My optional pipeline description",
|
||||
processors: [
|
||||
{
|
||||
set: {
|
||||
description: "My optional processor description",
|
||||
field: "my-keyword-field",
|
||||
value: "foo",
|
||||
},
|
||||
},
|
||||
],
|
||||
_meta: {
|
||||
reason: "set my-keyword-field to foo",
|
||||
serialization: {
|
||||
class: "MyPipeline",
|
||||
id: 10,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/04de2e3a9c00c2056b07bf9cf9e63a99.asciidoc
Normal file
20
docs/doc_examples/04de2e3a9c00c2056b07bf9cf9e63a99.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "google_vertex_ai_embeddings",
|
||||
inference_config: {
|
||||
service: "googlevertexai",
|
||||
service_settings: {
|
||||
service_account_json: "<service_account_json>",
|
||||
model_id: "<model_id>",
|
||||
location: "<location>",
|
||||
project_id: "<project_id>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,9 +4,8 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.health({
|
||||
wait_for_status: 'yellow',
|
||||
timeout: '50s'
|
||||
})
|
||||
console.log(response)
|
||||
wait_for_status: "yellow",
|
||||
timeout: "50s",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
|
||||
@ -1,30 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
query: {
|
||||
term: {
|
||||
user: 'kimchy'
|
||||
}
|
||||
},
|
||||
sort: {
|
||||
_script: {
|
||||
type: 'number',
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: "doc['field_name'].value * params.factor",
|
||||
params: {
|
||||
factor: 1.1
|
||||
}
|
||||
},
|
||||
order: 'asc'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
37
docs/doc_examples/0502284d4685c478eb68761f979f4303.asciidoc
Normal file
37
docs/doc_examples/0502284d4685c478eb68761f979f4303.asciidoc
Normal file
@ -0,0 +1,37 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ml.evaluateDataFrame({
|
||||
index: "house_price_predictions",
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
"ml.is_training": false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
evaluation: {
|
||||
regression: {
|
||||
actual_field: "price",
|
||||
predicted_field: "ml.price_prediction",
|
||||
metrics: {
|
||||
r_squared: {},
|
||||
mse: {},
|
||||
msle: {
|
||||
offset: 10,
|
||||
},
|
||||
huber: {
|
||||
delta: 1.5,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/050b3947025fee403232b8e6e9112dab.asciidoc
Normal file
12
docs/doc_examples/050b3947025fee403232b8e6e9112dab.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.sql.query({
|
||||
format: "yaml",
|
||||
query: "SELECT * FROM library ORDER BY page_count DESC",
|
||||
fetch_size: 5,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
54
docs/doc_examples/05148cc541f447486d9daf15ab77292b.asciidoc
Normal file
54
docs/doc_examples/05148cc541f447486d9daf15ab77292b.asciidoc
Normal file
@ -0,0 +1,54 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ilm.putLifecycle({
|
||||
name: "logs",
|
||||
policy: {
|
||||
phases: {
|
||||
hot: {
|
||||
actions: {
|
||||
rollover: {
|
||||
max_primary_shard_size: "50gb",
|
||||
},
|
||||
},
|
||||
},
|
||||
warm: {
|
||||
min_age: "30d",
|
||||
actions: {
|
||||
shrink: {
|
||||
number_of_shards: 1,
|
||||
},
|
||||
forcemerge: {
|
||||
max_num_segments: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
cold: {
|
||||
min_age: "60d",
|
||||
actions: {
|
||||
searchable_snapshot: {
|
||||
snapshot_repository: "found-snapshots",
|
||||
},
|
||||
},
|
||||
},
|
||||
frozen: {
|
||||
min_age: "90d",
|
||||
actions: {
|
||||
searchable_snapshot: {
|
||||
snapshot_repository: "found-snapshots",
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
min_age: "735d",
|
||||
actions: {
|
||||
delete: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
23
docs/doc_examples/0518c673094fb18ecb491a3b78af4695.asciidoc
Normal file
23
docs/doc_examples/0518c673094fb18ecb491a3b78af4695.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ilm.putLifecycle({
|
||||
name: "my_policy",
|
||||
policy: {
|
||||
phases: {
|
||||
warm: {
|
||||
actions: {
|
||||
allocate: {
|
||||
include: {
|
||||
box_type: "hot,warm",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/05284c8ea91769c09c8db47db8a6629a.asciidoc
Normal file
10
docs/doc_examples/05284c8ea91769c09c8db47db8a6629a.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cat.repositories({
|
||||
v: "true",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/053497b6960f80fd7b005b7c6d54358f.asciidoc
Normal file
19
docs/doc_examples/053497b6960f80fd7b005b7c6d54358f.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ilm.putLifecycle({
|
||||
name: "my_policy",
|
||||
policy: {
|
||||
phases: {
|
||||
delete: {
|
||||
actions: {
|
||||
delete: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
24
docs/doc_examples/05500e77aef581d92f6c605f7a48f7df.asciidoc
Normal file
24
docs/doc_examples/05500e77aef581d92f6c605f7a48f7df.asciidoc
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "example",
|
||||
document: {
|
||||
location: {
|
||||
type: "polygon",
|
||||
coordinates: [
|
||||
[
|
||||
[1000, -1001],
|
||||
[1001, -1001],
|
||||
[1001, -1000],
|
||||
[1000, -1000],
|
||||
[1000, -1001],
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/059e04aaf093379401f665c33ac796dc.asciidoc
Normal file
20
docs/doc_examples/059e04aaf093379401f665c33ac796dc.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.analyze({
|
||||
tokenizer: "whitespace",
|
||||
filter: [
|
||||
{
|
||||
type: "keyword_marker",
|
||||
keywords: ["jumping"],
|
||||
},
|
||||
"stemmer",
|
||||
],
|
||||
text: "fox running and jumping",
|
||||
explain: true,
|
||||
attributes: "keyword",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
78
docs/doc_examples/05a09078fe1016e900e445ad4039cf97.asciidoc
Normal file
78
docs/doc_examples/05a09078fe1016e900e445ad4039cf97.asciidoc
Normal file
@ -0,0 +1,78 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "clientips",
|
||||
mappings: {
|
||||
properties: {
|
||||
client_ip: {
|
||||
type: "keyword",
|
||||
},
|
||||
env: {
|
||||
type: "keyword",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "clientips",
|
||||
operations: [
|
||||
{
|
||||
index: {},
|
||||
},
|
||||
{
|
||||
client_ip: "172.21.0.5",
|
||||
env: "Development",
|
||||
},
|
||||
{
|
||||
index: {},
|
||||
},
|
||||
{
|
||||
client_ip: "172.21.2.113",
|
||||
env: "QA",
|
||||
},
|
||||
{
|
||||
index: {},
|
||||
},
|
||||
{
|
||||
client_ip: "172.21.2.162",
|
||||
env: "QA",
|
||||
},
|
||||
{
|
||||
index: {},
|
||||
},
|
||||
{
|
||||
client_ip: "172.21.3.15",
|
||||
env: "Production",
|
||||
},
|
||||
{
|
||||
index: {},
|
||||
},
|
||||
{
|
||||
client_ip: "172.21.3.16",
|
||||
env: "Production",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.enrich.putPolicy({
|
||||
name: "clientip_policy",
|
||||
match: {
|
||||
indices: "clientips",
|
||||
match_field: "client_ip",
|
||||
enrich_fields: ["env"],
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
|
||||
const response3 = await client.enrich.executePolicy({
|
||||
name: "clientip_policy",
|
||||
wait_for_completion: "false",
|
||||
});
|
||||
console.log(response3);
|
||||
----
|
||||
10
docs/doc_examples/05ba0fdd0215e313ecea8a2f8f5a43b4.asciidoc
Normal file
10
docs/doc_examples/05ba0fdd0215e313ecea8a2f8f5a43b4.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataStream({
|
||||
name: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
21
docs/doc_examples/05bee3adf46b9d6a2fef96c51bf958da.asciidoc
Normal file
21
docs/doc_examples/05bee3adf46b9d6a2fef96c51bf958da.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.putRole({
|
||||
name: "click_role",
|
||||
indices: [
|
||||
{
|
||||
names: ["events-*"],
|
||||
privileges: ["read"],
|
||||
query: {
|
||||
match: {
|
||||
category: "click",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
21
docs/doc_examples/05e637284bc3bedd46e0b7c26ad983c4.asciidoc
Normal file
21
docs/doc_examples/05e637284bc3bedd46e0b7c26ad983c4.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "alibabacloud_ai_search_embeddings_pipeline",
|
||||
processors: [
|
||||
{
|
||||
inference: {
|
||||
model_id: "alibabacloud_ai_search_embeddings",
|
||||
input_output: {
|
||||
input_field: "content",
|
||||
output_field: "content_embedding",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user