Compare commits

..

11 Commits

Author SHA1 Message Date
fb6afbdda2 8.12.0 changelog (#2125) (#2126)
(cherry picked from commit 57ee5cf6c2)

Co-authored-by: Quentin Pradet <quentin.pradet@elastic.co>
2024-01-31 13:58:02 +04:00
b6335490f7 Auto-generated code for 8.12 (#2124) 2024-01-31 11:33:06 +04:00
b4280a5b77 Revert "Auto-generated code for 8.12"
This reverts commit 09afe9c75c. Two links
were invalid, security-api-get-settings.html and
security-api-update-settings.html.
2024-01-30 17:34:38 +04:00
09afe9c75c Auto-generated code for 8.12 2024-01-30 16:56:40 +04:00
d9195d54f7 Improved the body BC break description in request/response for 8.x documentation (#2117) (#2119)
* Improved the body bc break in 8.x documentation

* Removed just in the sentence

(cherry picked from commit 6eabf37097)

Co-authored-by: Enrico Zimuel <e.zimuel@gmail.com>
2024-01-04 13:58:14 +01:00
728c6cf2ee [Backport 8.12] Add missing snippets (#2114)
For https://github.com/elastic/clients-team/issues/728

(cherry picked from commit 5413eb5f35)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-14 17:20:48 -06:00
e4b090b90e [Backport 8.12] Integration test improvements (#2111)
* Improvements to integrations

Borrowed largely from https://github.com/elastic/elasticsearch-serverless-js/pull/38

* Bump all the things to 8.12.0

* Split Dockerfile copy into two layers

* Fix test cron names

(cherry picked from commit 4aaf49b6ea)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-14 16:41:15 -06:00
d9dd69b172 Set version to 8.12.0 (#2108) 2023-12-14 13:54:15 -06:00
58de8e169b [Backport 8.12] Add doc for closing connections (#2105)
(cherry picked from commit d3f22f1e14)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-14 09:47:11 -06:00
a2fb62ba43 [Backport 8.12] 8.11.0 changelog (#2099)
* Changelog for 8.11.0

* Add redaction docs link to changelog

(cherry picked from commit 1fb789862d)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-12 16:11:24 -06:00
1e8565ad3b [Backport 8.12] Bump transport to 8.4.0 (#2098)
* Support for transport 8.4.0 redaction functionality

* Docs for `redaction` options

(cherry picked from commit c2c417a9fd)

Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-12-12 16:04:26 -06:00
3515 changed files with 9678 additions and 82389 deletions

View File

@ -2,7 +2,7 @@
# #
# Shared cleanup routines between different steps # Shared cleanup routines between different steps
# #
# Please source .buildkite/functions/imports.sh as a whole not just this file # Please source .ci/functions/imports.sh as a whole not just this file
# #
# Version 1.0.0 # Version 1.0.0
# - Initial version after refactor # - Initial version after refactor

View File

@ -2,7 +2,7 @@
# #
# Exposes a routine scripts can call to wait for a container if that container set up a health command # Exposes a routine scripts can call to wait for a container if that container set up a health command
# #
# Please source .buildkite/functions/imports.sh as a whole not just this file # Please source .ci/functions/imports.sh as a whole not just this file
# #
# Version 1.0.1 # Version 1.0.1
# - Initial version after refactor # - Initial version after refactor

View File

@ -6,7 +6,7 @@ steps:
env: env:
NODE_VERSION: "{{ matrix.nodejs }}" NODE_VERSION: "{{ matrix.nodejs }}"
TEST_SUITE: "{{ matrix.suite }}" TEST_SUITE: "{{ matrix.suite }}"
STACK_VERSION: 8.15.0 STACK_VERSION: 8.12.0-SNAPSHOT
matrix: matrix:
setup: setup:
suite: suite:
@ -15,7 +15,6 @@ steps:
nodejs: nodejs:
- "18" - "18"
- "20" - "20"
- "22"
command: ./.buildkite/run-tests.sh command: ./.buildkite/run-tests.sh
artifact_paths: "./junit-output/junit-*.xml" artifact_paths: "./junit-output/junit-*.xml"
- wait: ~ - wait: ~
@ -27,6 +26,6 @@ steps:
plugins: plugins:
- junit-annotate#v2.4.1: - junit-annotate#v2.4.1:
artifacts: "junit-output/junit-*.xml" artifacts: "junit-output/junit-*.xml"
job-uuid-file-pattern: "junit-(.*).xml" job-uuid-file-pattern: 'junit-(.*).xml'
fail-build-on-error: true fail-build-on-error: true
failure-format: file failure-format: file

View File

@ -9,6 +9,7 @@
"\\.md$", "\\.md$",
"\\.asciidoc$", "\\.asciidoc$",
"^docs\\/", "^docs\\/",
"^\\.ci\\/",
"^scripts\\/", "^scripts\\/",
"^catalog-info\\.yaml$", "^catalog-info\\.yaml$",
"^test\\/unit\\/", "^test\\/unit\\/",

View File

@ -74,15 +74,14 @@ async function release (args) {
async function bump (args) { async function bump (args) {
assert(args.length === 1, 'Bump task expects one parameter') assert(args.length === 1, 'Bump task expects one parameter')
let [version] = args const [version] = args
const packageJson = JSON.parse(await readFile( const packageJson = JSON.parse(await readFile(
join(import.meta.url, '..', 'package.json'), join(import.meta.url, '..', 'package.json'),
'utf8' 'utf8'
)) ))
if (version.split('.').length === 2) version = `${version}.0`
const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version) const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
assert(semver.valid(cleanVersion), `${cleanVersion} is not seen as a valid semver version. raw version: ${version}`) assert(semver.valid(cleanVersion))
packageJson.version = cleanVersion packageJson.version = cleanVersion
packageJson.versionCanary = `${cleanVersion}-canary.0` packageJson.versionCanary = `${cleanVersion}-canary.0`
@ -95,7 +94,7 @@ async function bump (args) {
const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), 'utf8') const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), 'utf8')
await writeFile( await writeFile(
join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), join(import.meta.url, '..', '.buildkite', 'pipeline.yml'),
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}`), pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}-SNAPSHOT`),
'utf8' 'utf8'
) )
} }
@ -125,13 +124,6 @@ async function codegen (args) {
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}` await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}` await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
await $`npm run build` await $`npm run build`
// run docs example generation
if (version === 'main') {
await $`node ./scripts/generate-docs-examples.js`
} else {
await $`node ./scripts/generate-docs-examples.js ${version.split('.').slice(0, 2).join('.')}`
}
} }
function onError (err) { function onError (err) {

View File

@ -3,7 +3,7 @@
# #
# Build entry script for elasticsearch-js # Build entry script for elasticsearch-js
# #
# Must be called: ./.github/make.sh <target> <params> # Must be called: ./.ci/make.sh <target> <params>
# #
# Version: 1.1.0 # Version: 1.1.0
# #
@ -34,8 +34,8 @@ STACK_VERSION=$VERSION
set -euo pipefail set -euo pipefail
product="elastic/elasticsearch-js" product="elastic/elasticsearch-js"
output_folder=".buildkite/output" output_folder=".ci/output"
codegen_folder=".buildkite/output" codegen_folder=".ci/output"
OUTPUT_DIR="$repo/${output_folder}" OUTPUT_DIR="$repo/${output_folder}"
NODE_JS_VERSION=18 NODE_JS_VERSION=18
WORKFLOW=${WORKFLOW-staging} WORKFLOW=${WORKFLOW-staging}
@ -131,7 +131,7 @@ esac
echo -e "\033[34;1mINFO: building $product container\033[0m" echo -e "\033[34;1mINFO: building $product container\033[0m"
docker build \ docker build \
--file .buildkite/Dockerfile-make \ --file .ci/Dockerfile \
--tag "$product" \ --tag "$product" \
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \ --build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
--build-arg "BUILDER_UID=$(id -u)" \ --build-arg "BUILDER_UID=$(id -u)" \
@ -156,7 +156,7 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
--rm \ --rm \
$product \ $product \
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \ /bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}" node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
else else
echo -e "\033[34;1mINFO: Running in CI mode" echo -e "\033[34;1mINFO: Running in CI mode"
docker run \ docker run \
@ -171,7 +171,7 @@ else
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \ git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
mkdir -p /usr/src/elastic-client-generator-js/output && \ mkdir -p /usr/src/elastic-client-generator-js/output && \
cd /usr/src/elasticsearch-js && \ cd /usr/src/elasticsearch-js && \
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}" node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
fi fi
# ------------------------------------------------------- # # ------------------------------------------------------- #
@ -179,7 +179,7 @@ fi
# ------------------------------------------------------- # # ------------------------------------------------------- #
if [[ "$CMD" == "assemble" ]]; then if [[ "$CMD" == "assemble" ]]; then
if compgen -G ".buildkite/output/*" > /dev/null; then if compgen -G ".ci/output/*" > /dev/null; then
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m" echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
else else
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m" echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"

View File

@ -1,14 +1,13 @@
--- ---
name: 🐛 Bug report name: 🐛 Bug report
about: Create a report to help us improve about: Create a report to help us improve
labels: ["Category: Bug"]
--- ---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one. It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions **Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed, and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.** locked, and assigned the `not reproducible` label.**
## 🐛 Bug Report ## 🐛 Bug Report

View File

@ -1,14 +1,13 @@
--- ---
name: 🚀 Feature Proposal name: 🚀 Feature Proposal
about: Submit a proposal for a new feature about: Submit a proposal for a new feature
labels: ["Category: Feature"]
--- ---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one. It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions **Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed, and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.** locked, and assigned the `invalid` label.**
## 🚀 Feature Proposal ## 🚀 Feature Proposal

View File

@ -1,7 +1,6 @@
--- ---
name: 💬 Questions / Help name: 💬 Questions / Help
about: If you have questions, please check our Gitter or Help repo about: If you have questions, please check our Gitter or Help repo
labels: ["Category: Question"]
--- ---
## 💬 Questions and Help ## 💬 Questions and Help

56
.github/ISSUE_TEMPLATE/regression.md vendored Normal file
View File

@ -0,0 +1,56 @@
---
name: 💥 Regression Report
about: Report unexpected behavior that worked in previous versions
---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `invalid` label.**
## 💥 Regression Report
A clear and concise description of what the regression is.
## Last working version
Worked up to version:
Stopped working in version:
## To Reproduce
Steps to reproduce the behavior:
Paste your code here:
```js
```
<!--
In some cases, it might be challenging to reproduce the bug in a few lines of code.
You can fork the following repository, which contains all the configuration needed
to spin up a three nodes Elasticsearch cluster with security enabled.
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
https://github.com/delvedor/es-reproduce-issue
--->
## Expected behavior
A clear and concise description of what you expected to happen.
Paste the results here:
```js
```
## Your Environment
- *node version*: 6,8,10
- `@elastic/elasticsearch` *version*: >=7.0.0
- *typescript version*: 4.x (if applicable)
- *os*: Mac, Windows, Linux
- *any other relevant information*

View File

@ -1,92 +0,0 @@
---
name: 💥 Regression Report
description: Report unexpected behavior that worked in previous versions
labels: ["Category: Bug"]
body:
- type: markdown
attributes:
value: |
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.**
- type: textarea
id: report
attributes:
label: Regression report
description: A clear and concise description of what the regression is.
validations:
required: true
- type: input
id: last-working-version
attributes:
label: Last working version
description: Version of `@elastic/elasticsearch` where this last worked.
validations:
required: true
- type: textarea
id: to-reproduce
attributes:
label: To reproduce
description: |
Paste your code here that shows how to reproduce the behavior.
In some cases, it might be challenging to reproduce the bug in a few lines of code.
You can fork the following repository, which contains all the configuration needed to spin up a three nodes Elasticsearch cluster with security enabled.
[This repository](https://github.com/delvedor/es-reproduce-issue) also contains a preconfigured client instance that you can use to reproduce the issue.
validations:
required: true
- type: textarea
id: expected-behavior
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: input
id: node-version
attributes:
label: Node.js version
description: What version of Node.js you are using (`node --version`).
validations:
required: true
- type: input
id: typescript-version
attributes:
label: TypeScript version
description: TypeScript version you are using, if applicable.
- type: input
id: elasticsearch-client-version
attributes:
label: Elasticsearch client version
description: What version of `@elastic/elasticsearch` and `@elastic/transport` you are using (`npm ls -a | grep '@elastic'`).
validations:
required: true
- type: input
id: elasticsearch-version
attributes:
label: Elasticsearch server version
description: What version of Elasticsearch you are using.
validations:
required: true
- type: input
id: operating-system
attributes:
label: Operating system
description: What operating system you are running.
placeholder: e.g. Linux, MacOS, Windows
- type: textarea
id: env-info
attributes:
label: Any other relevant environment information.

View File

@ -1,18 +0,0 @@
name: Automerge
on:
pull_request_review:
types:
- submitted
jobs:
automerge:
runs-on: ubuntu-latest
if: github.event.review.state == 'approved'
steps:
- uses: reitermarkus/automerge@v2
with:
token: ${{ secrets.GH_TOKEN }}
merge-method: squash
pull-request-author-associations: OWNER
review-author-associations: OWNER,CONTRIBUTOR

View File

@ -9,7 +9,7 @@ jobs:
name: Detect files changed name: Detect files changed
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
src-only: "${{ steps.changes.outputs.src-only }}" src-only: '${{ steps.changes.outputs.src-only }}'
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: dorny/paths-filter/@v2.11.1 - uses: dorny/paths-filter/@v2.11.1
@ -17,7 +17,7 @@ jobs:
with: with:
filters: | filters: |
src-only: src-only:
- '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.buildkite,scripts}/**/*|catalog-info.yaml)' - '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.ci,.buildkite,scripts}/**/*|catalog-info.yaml)'
- '.github/workflows/**' - '.github/workflows/**'
test: test:
@ -30,17 +30,24 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
node-version: [18.x, 20.x, 22.x] node-version: [18.x, 20.x]
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4 uses: actions/setup-node@v3
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
# workaround for failing tests on Node.js 14.x
# see https://github.com/actions/setup-node/issues/411
- name: Force install specific npm version
run: |
npm install --global npm@8.3.1
npm install --global npm@9.7.1
- name: Install - name: Install
run: | run: |
npm install npm install
@ -57,13 +64,17 @@ jobs:
name: License check name: License check
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: strategy:
- uses: actions/checkout@v4 matrix:
node-version: [20.x]
- name: Use Node.js steps:
uses: actions/setup-node@v4 - uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with: with:
node-version: 22.x node-version: ${{ matrix.node-version }}
- name: Install - name: Install
run: | run: |
@ -72,13 +83,3 @@ jobs:
- name: License checker - name: License checker
run: | run: |
npm run license-checker npm run license-checker
auto-approve:
name: Auto-approve
needs: [test, license]
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: github.actor == 'elasticmachine'
steps:
- uses: hmarr/auto-approve-action@v4

View File

@ -1,37 +0,0 @@
name: Publish Package to npm
on:
workflow_dispatch:
inputs:
branch:
description: "Git branch to build and publish"
required: true
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.branch }}
- uses: actions/setup-node@v3
with:
node-version: "20.x"
registry-url: "https://registry.npmjs.org"
- run: npm install -g npm
- run: npm install
- run: npm test
- run: npm publish --provenance --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
version=$(jq -r .version package.json)
gh release create \
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
--target "$BRANCH_NAME" \
-t "v$version" \
"v$version"
env:
BRANCH_NAME: ${{ github.event.inputs.branch }}
GH_TOKEN: ${{ github.token }}

View File

@ -1,43 +0,0 @@
#!/usr/bin/env bash
set -exuo pipefail
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
# generate patch file
cd "$GITHUB_WORKSPACE/stack"
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
# set committer info
git config --global user.email "elasticmachine@users.noreply.github.com"
git config --global user.name "Elastic Machine"
# apply patch file
cd "$GITHUB_WORKSPACE/serverless"
git am -C1 --reject /tmp/patch.diff || git am --quit
# generate PR body comment
comment="Patch applied from $pr_shortcode"
# enumerate rejected patches in PR comment
has_rejects='false'
for f in ./**/*.rej; do
has_rejects='true'
comment="$comment
## Rejected patch \`$f\` must be resolved:
\`\`\`diff
$(cat "$f")
\`\`\`
"
done
# delete .rej files
rm -fv ./**/*.rej
# send data to output parameters
echo "$comment" > /tmp/pr_body
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"

View File

@ -1,51 +0,0 @@
---
name: Apply PR changes to serverless
on:
pull_request_target:
types:
- closed
- labeled
jobs:
apply-patch:
name: Apply patch
runs-on: ubuntu-latest
# Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& (
(
github.event.action == 'closed'
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
)
||
(
github.event.action == 'labeled'
&& github.event.label.name == 'apply-to-serverless'
)
)
steps:
- uses: actions/checkout@v4
with:
repository: elastic/elasticsearch-js
ref: main
path: stack
fetch-depth: 0
- uses: actions/checkout@v4
with:
repository: elastic/elasticsearch-serverless-js
ref: main
path: serverless
- name: Apply patch from stack to serverless
id: apply-patch
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
- uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GH_TOKEN }}
path: serverless
title: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
commit-message: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
body-path: /tmp/pr_body
draft: '${{ steps.apply-patch.outputs.PR_DRAFT }}'
add-paths: ':!*.rej'

1
.gitignore vendored
View File

@ -63,4 +63,3 @@ test/bundlers/**/bundle.js
test/bundlers/parcel-test/.parcel-cache test/bundlers/parcel-test/.parcel-cache
lib lib
junit-output

View File

@ -64,6 +64,7 @@ test
scripts scripts
# ci configuration # ci configuration
.ci
.travis.yml .travis.yml
.buildkite .buildkite
certs certs

11
Makefile Normal file
View File

@ -0,0 +1,11 @@
.PHONY: integration-setup
integration-setup: integration-cleanup
DETACH=true .ci/run-elasticsearch.sh
.PHONY: integration-cleanup
integration-cleanup:
docker container rm --force --volumes instance || true
.PHONY: integration
integration: integration-setup
npm run test:integration

View File

@ -21,25 +21,6 @@ of the getting started documentation.
Refer to the [Connecting section](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_connecting) Refer to the [Connecting section](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_connecting)
of the getting started documentation. of the getting started documentation.
## Compatibility
The Elasticsearch client is compatible with currently maintained JS versions.
Language clients are forward compatible; meaning that clients support
communicating with greater or equal minor versions of Elasticsearch without
breaking. It does not mean that the client automatically supports new features
of newer Elasticsearch versions; it is only possible after a release of a new
client version. For example, a 8.12 client version won't automatically support
the new features of the 8.13 version of Elasticsearch, the 8.13 client version
is required for that. Elasticsearch language clients are only backwards
compatible with default distributions and without guarantees made.
| Elasticsearch Version | Elasticsearch-JS Branch | Supported |
| --------------------- | ------------------------ | --------- |
| main | main | |
| 8.x | 8.x | 8.x |
| 7.x | 7.x | 7.17 |
## Usage ## Usage
* [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index) * [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index)

View File

@ -6,7 +6,7 @@ metadata:
name: elasticsearch-js name: elasticsearch-js
spec: spec:
type: library type: library
owner: group:devtools-team owner: group:clients-team
lifecycle: production lifecycle: production
--- ---
@ -18,7 +18,7 @@ metadata:
description: elasticsearch-js - integration tests description: elasticsearch-js - integration tests
spec: spec:
type: buildkite-pipeline type: buildkite-pipeline
owner: group:devtools-team owner: group:clients-team
system: buildkite system: buildkite
implementation: implementation:
apiVersion: buildkite.elastic.dev/v1 apiVersion: buildkite.elastic.dev/v1
@ -29,7 +29,7 @@ spec:
repository: elastic/elasticsearch-js repository: elastic/elasticsearch-js
pipeline_file: .buildkite/pipeline.yml pipeline_file: .buildkite/pipeline.yml
teams: teams:
devtools-team: clients-team:
access_level: MANAGE_BUILD_AND_READ access_level: MANAGE_BUILD_AND_READ
everyone: everyone:
access_level: READ_ONLY access_level: READ_ONLY
@ -37,11 +37,14 @@ spec:
build_pull_requests: false build_pull_requests: false
build_branches: false build_branches: false
cancel_intermediate_builds: true cancel_intermediate_builds: true
cancel_intermediate_builds_branch_filter: "!main" cancel_intermediate_builds_branch_filter: '!main'
schedules: schedules:
main: main_semi_daily:
branch: "main" branch: 'main'
cronline: "@daily" cronline: '0 */12 * * *'
8_14: 8_12_semi_daily:
branch: "8.14" branch: '8.12'
cronline: "@daily" cronline: '0 */12 * * *'
8_11_daily:
branch: '8.11'
cronline: '@daily'

View File

@ -260,11 +260,11 @@ _Default:_ `false`
_Default:_ `null` _Default:_ `null`
|`maxResponseSize` |`maxResponseSize`
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH + |`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
_Default:_ `null` _Default:_ `null`
|`maxCompressedResponseSize` |`maxCompressedResponseSize`
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH + |`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
_Default:_ `null` _Default:_ `null`
|=== |===

View File

@ -1,131 +1,6 @@
[[changelog-client]] [[changelog-client]]
== Release notes == Release notes
[discrete]
=== 8.15.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.15.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
[discrete]
===== OpenTelemetry zero-code instrumentation support
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
See {jsclient}/observability.html#_opentelemetry[the docs]
for more information.
[discrete]
=== 8.14.1
[discrete]
==== Features
[discrete]
===== Improved support for Elasticsearch `8.14`
Updated types based on fixes and changes to the Elasticsearch specification.
[discrete]
=== 8.14.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.14.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
[discrete]
===== ES|QL object API helper
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
[discrete]
===== `onSuccess` callback added to bulk helper
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
[discrete]
===== Request retries are more polite
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
[discrete]
=== 8.13.1
[discrete]
==== Fixes
[discrete]
===== Pin @elastic/transport to `~8.4.1`
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
[discrete]
=== 8.13.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.13.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
[discrete]
==== Fixes
[discrete]
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
[discrete]
=== 8.12.3
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.4.1`
Switching from `^8.4.1` to `~8.4.1` ensures 8.12 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.12.2
[discrete]
==== Fixes
[discrete]
===== Upgrade transport to 8.4.1 https://github.com/elastic/elasticsearch-js/pull/2137[#2137]
Upgrades `@elastic/transport` to 8.4.1 to resolve https://github.com/elastic/elastic-transport-js/pull/83[a bug] where arrays in error diagnostics were unintentionally transformed into objects.
[discrete]
=== 8.12.1
[discrete]
==== Fixes
[discrete]
===== Fix hang in bulk helper semaphore https://github.com/elastic/elasticsearch-js/pull/2027[#2027]
The failing state could be reached when a server's response times are slower than flushInterval.
[discrete] [discrete]
=== 8.12.0 === 8.12.0
@ -138,22 +13,11 @@ The failing state could be reached when a server's response times are slower tha
You can find all the API changes You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here]. https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
[discrete]
=== 8.11.1
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.4.0`
Switching from `^8.4.0` to `~8.4.0` ensures 8.11 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete] [discrete]
=== 8.11.0 === 8.11.0
[discrete] [discrete]
==== Features === Features
[discrete] [discrete]
===== Support for Elasticsearch `v8.11.0` ===== Support for Elasticsearch `v8.11.0`
@ -168,22 +32,11 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.11/release-notes-8.11.
See <<redaction>> for more information. See <<redaction>> for more information.
[discrete]
=== 8.10.1
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.4`
Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete] [discrete]
=== 8.10.0 === 8.10.0
[discrete] [discrete]
==== Features === Features
[discrete] [discrete]
===== Support for Elasticsearch `v8.10.0` ===== Support for Elasticsearch `v8.10.0`
@ -191,17 +44,6 @@ Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required t
You can find all the API changes You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here]. https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here].
[discrete]
=== 8.9.2
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.4`
Switching from `^8.3.4` to `~8.3.4` ensures 8.9 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete] [discrete]
=== 8.9.1 === 8.9.1
@ -228,7 +70,7 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.
[discrete] [discrete]
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732] ===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it. In the https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
[discrete] [discrete]
==== Fixes ==== Fixes
@ -238,17 +80,6 @@ In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could
The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved. The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved.
[discrete]
=== 8.8.2
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.2`
Switching from `^8.3.2` to `~8.3.2` ensures 8.8 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete] [discrete]
=== 8.8.1 === 8.8.1
@ -294,17 +125,6 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.0.
Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key. Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key.
[discrete]
=== 8.7.3
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.1`
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete] [discrete]
=== 8.7.0 === 8.7.0
@ -314,17 +134,6 @@ Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to
You can find all the API changes You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here]. https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
[discrete]
=== 8.6.1
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.1`
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete] [discrete]
=== 8.6.0 === 8.6.0
@ -509,7 +318,7 @@ client.search({ params }, { options }, (err, result) => {
client.search({ params }, { options }) client.search({ params }, { options })
.then(console.log) .then(console.log)
.catch(console.log) .catch(console.log)
// async-style (sugar syntax on top of promises) // async-style (sugar syntax on top of promises)
const response = await client.search({ params }, { options }) const response = await client.search({ params }, { options })
console.log(response) console.log(response)
@ -669,7 +478,7 @@ If you weren't extending the internals of the client, this won't be a breaking c
*Breaking: Yes* | *Migration effort: Medium* *Breaking: Yes* | *Migration effort: Medium*
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`. Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense. This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense.
If you are already using `snake_case` keys, this won't be a breaking change for you. If you are already using `snake_case` keys, this won't be a breaking change for you.

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.clearCachedRealms({
realms: "default_file,ldap1",
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.forcemerge({
index: ".ds-my-data-stream-2099.03.07-000001",
max_num_segments: 1,
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
settings: {
analysis: {
analyzer: {
my_analyzer: {
tokenizer: "whitespace",
filter: ["stemmer"],
},
},
},
},
});
console.log(response);
----

View File

@ -1,40 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.watcher.putWatch({
id: "cluster_health_watch",
trigger: {
schedule: {
interval: "10s",
},
},
input: {
http: {
request: {
host: "localhost",
port: 9200,
path: "/_cluster/health",
},
},
},
condition: {
compare: {
"ctx.payload.status": {
eq: "red",
},
},
},
actions: {
send_email: {
email: {
to: "username@example.org",
subject: "Cluster Status Warning",
body: "Cluster status is RED",
},
},
},
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.migration.postFeatureUpgrade();
console.log(response);
----

View File

@ -1,16 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index",
query: {
match: {
"http.clientip": "40.135.0.0",
},
},
fields: ["http.clientip"],
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.shardStores();
console.log(response);
----

View File

@ -1,15 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.renderSearchTemplate({
source: '{ "query": {{#toJson}}my_query{{/toJson}} }',
params: {
my_query: {
match_all: {},
},
},
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
tokenizer: "standard",
filter: ["asciifolding"],
text: "açaí à la carte",
});
console.log(response);
----

View File

@ -1,67 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "italian_example",
settings: {
analysis: {
filter: {
italian_elision: {
type: "elision",
articles: [
"c",
"l",
"all",
"dall",
"dell",
"nell",
"sull",
"coll",
"pell",
"gl",
"agl",
"dagl",
"degl",
"negl",
"sugl",
"un",
"m",
"t",
"s",
"v",
"d",
],
articles_case: true,
},
italian_stop: {
type: "stop",
stopwords: "_italian_",
},
italian_keywords: {
type: "keyword_marker",
keywords: ["esempio"],
},
italian_stemmer: {
type: "stemmer",
language: "light_italian",
},
},
analyzer: {
rebuilt_italian: {
tokenizer: "standard",
filter: [
"italian_elision",
"lowercase",
"italian_stop",
"italian_keywords",
"italian_stemmer",
],
},
},
},
},
});
console.log(response);
----

View File

@ -1,19 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "text_embedding",
inference_id: "my-e5-model",
inference_config: {
service: "elasticsearch",
service_settings: {
num_allocations: 1,
num_threads: 1,
model_id: ".multilingual-e5-small",
},
},
});
console.log(response);
----

View File

@ -1,13 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.putIndexTemplate({
name: "my-data-stream-template",
index_patterns: ["my-data-stream*"],
data_stream: {},
priority: 500,
});
console.log(response);
----

View File

@ -4,9 +4,12 @@
[source, js] [source, js]
---- ----
const response = await client.cluster.putSettings({ const response = await client.cluster.putSettings({
persistent: { body: {
"xpack.security.transport.filter.enabled": false, transient: {
}, 'cluster.routing.use_adaptive_replica_selection': false
}); }
console.log(response); }
})
console.log(response)
---- ----

View File

@ -4,12 +4,15 @@
[source, js] [source, js]
---- ----
const response = await client.update({ const response = await client.update({
index: "test", index: 'test',
id: 1, id: '1',
doc: { body: {
name: "new_name", doc: {
}, name: 'new_name'
doc_as_upsert: true, },
}); detect_noop: false
console.log(response); }
})
console.log(response)
---- ----

View File

@ -1,24 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "sales",
size: 0,
filter_path: "aggregations",
query: {
term: {
type: "t-shirt",
},
},
aggs: {
avg_price: {
avg: {
field: "price",
},
},
},
});
console.log(response);
----

View File

@ -1,26 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "asciifold_example",
settings: {
analysis: {
analyzer: {
standard_asciifolding: {
tokenizer: "standard",
filter: ["my_ascii_folding"],
},
},
filter: {
my_ascii_folding: {
type: "asciifolding",
preserve_original: true,
},
},
},
},
});
console.log(response);
----

View File

@ -1,37 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cluster.putComponentTemplate({
name: "component_template1",
template: {
mappings: {
properties: {
"@timestamp": {
type: "date",
},
},
},
},
});
console.log(response);
const response1 = await client.cluster.putComponentTemplate({
name: "runtime_component_template",
template: {
mappings: {
runtime: {
day_of_week: {
type: "keyword",
script: {
source:
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
},
},
},
},
},
});
console.log(response1);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transform.startTransform({
transform_id: "ecommerce_transform",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.tasks.list({
human: "true",
detailed: "true",
actions: "indices:data/write/bulk",
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.watcher.executeWatch({
id: "my_watch",
});
console.log(response);
----

View File

@ -1,39 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "basque_example",
settings: {
analysis: {
filter: {
basque_stop: {
type: "stop",
stopwords: "_basque_",
},
basque_keywords: {
type: "keyword_marker",
keywords: ["Adibidez"],
},
basque_stemmer: {
type: "stemmer",
language: "basque",
},
},
analyzer: {
rebuilt_basque: {
tokenizer: "standard",
filter: [
"lowercase",
"basque_stop",
"basque_keywords",
"basque_stemmer",
],
},
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ccr.followStats({
index: "<index>",
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
query: {
has_child: {
type: "child",
query: {
match_all: {},
},
max_children: 10,
min_children: 2,
score_mode: "min",
},
},
});
console.log(response);
----

View File

@ -1,28 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "sales",
mappings: {
properties: {
tags: {
type: "keyword",
},
comments: {
type: "nested",
properties: {
username: {
type: "keyword",
},
comment: {
type: "text",
},
},
},
},
},
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ml.flushJob({
job_id: "low_request_rate",
calc_interim: true,
});
console.log(response);
----

View File

@ -0,0 +1,14 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: 'twitter',
size: '0',
q: 'extra:test',
filter_path: 'hits.total'
})
console.log(response)
----

View File

@ -1,23 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
aggs: {
JapaneseCars: {
terms: {
field: "make",
include: ["mazda", "honda"],
},
},
ActiveCarManufacturers: {
terms: {
field: "make",
exclude: ["rover", "jensen"],
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.invalidateToken({
username: "myuser",
});
console.log(response);
----

View File

@ -1,28 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
tokenizer: "keyword",
char_filter: [
{
type: "mapping",
mappings: [
"٠ => 0",
"١ => 1",
"٢ => 2",
"٣ => 3",
"٤ => 4",
"٥ => 5",
"٦ => 6",
"٧ => 7",
"٨ => 8",
"٩ => 9",
],
},
],
text: "My license plate is ٢٥٠١٥",
});
console.log(response);
----

View File

@ -0,0 +1,43 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
body: {
aggs: {
countries: {
terms: {
field: 'artist.country',
order: [
{
'rock>playback_stats.avg': 'desc'
},
{
_count: 'desc'
}
]
},
aggs: {
rock: {
filter: {
term: {
genre: 'rock'
}
},
aggs: {
playback_stats: {
stats: {
field: 'play_count'
}
}
}
}
}
}
}
}
})
console.log(response)
----

View File

@ -1,21 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
mappings: {
properties: {
full_name: {
type: "text",
index_prefixes: {
min_chars: 1,
max_chars: 10,
},
},
},
},
});
console.log(response);
----

View File

@ -1,24 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "pipelineB",
description: "outer pipeline",
processors: [
{
pipeline: {
name: "pipelineA",
},
},
{
set: {
field: "outer_pipeline_set",
value: "outer",
},
},
],
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getTemplate({
name: "template_1",
});
console.log(response);
----

View File

@ -1,38 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
mappings: {
properties: {
my_wildcard: {
type: "wildcard",
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "my-index-000001",
id: 1,
document: {
my_wildcard: "This string can be quite lengthy",
},
});
console.log(response1);
const response2 = await client.search({
index: "my-index-000001",
query: {
wildcard: {
my_wildcard: {
value: "*quite*lengthy",
},
},
},
});
console.log(response2);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.refresh({
index: "my-index-000001,my-index-000002",
});
console.log(response);
----

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
body: {
aggs: {
genres: {
terms: {
script: {
source: "doc['genre'].value",
lang: 'painless'
}
}
}
}
}
})
console.log(response)
----

View File

@ -1,26 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.index({
index: "example",
document: {
location: {
type: "Polygon",
orientation: "LEFT",
coordinates: [
[
[-177, 10],
[176, 15],
[172, 0],
[176, -15],
[-177, -10],
[-177, 10],
],
],
},
},
});
console.log(response);
----

View File

@ -1,36 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.index({
index: "my-index-000001",
id: 5,
refresh: "true",
document: {
query: {
bool: {
should: [
{
match: {
message: {
query: "Japanese art",
_name: "query1",
},
},
},
{
match: {
message: {
query: "Holand culture",
_name: "query2",
},
},
},
],
},
},
},
});
console.log(response);
----

View File

@ -1,38 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "example-index",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
term: {
text: "blue shoes sale",
},
},
},
},
{
standard: {
query: {
sparse_vector: {
field: "ml.tokens",
inference_id: "my_elser_model",
query: "What blue shoes are on sale?",
},
},
},
},
],
rank_window_size: 50,
rank_constant: 20,
},
},
});
console.log(response);
----

View File

@ -1,29 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
settings: {
analysis: {
analyzer: {
my_custom_analyzer: {
type: "custom",
tokenizer: "standard",
char_filter: ["html_strip"],
filter: ["lowercase", "asciifolding"],
},
},
},
},
});
console.log(response);
const response1 = await client.indices.analyze({
index: "my-index-000001",
analyzer: "my_custom_analyzer",
text: "Is this déjà vu</b>?",
});
console.log(response1);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
analyzer: "whitespace",
text: "The quick brown fox.",
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.getServiceAccounts();
console.log(response);
----

View File

@ -1,35 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000003",
mappings: {
properties: {
metrics: {
subobjects: false,
properties: {
time: {
type: "object",
properties: {
min: {
type: "long",
},
max: {
type: "long",
},
},
},
},
},
},
},
});
console.log(response);
const response1 = await client.indices.getMapping({
index: "my-index-000003",
});
console.log(response1);
----

View File

@ -1,14 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.snapshot.restore({
repository: "my_repository",
snapshot: "my_snapshot_2099.05.06",
indices: "my-index,logs-my_app-default",
rename_pattern: "(.+)",
rename_replacement: "restored-$1",
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.sql.query({
format: "txt",
query: "SELECT * FROM library ORDER BY page_count DESC LIMIT 5",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.connector.updateApiKeyId({
connector_id: "my-connector",
api_key_id: "my-api-key-id",
api_key_secret_id: "my-connector-secret-id",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.eql.search({
index: "my-data-stream",
query: '\n process where process.name == "regsvr32.exe"\n ',
size: 50,
});
console.log(response);
----

View File

@ -1,52 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "my-index",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
multi_match: {
query: "How is the weather in Jamaica?",
fields: ["title", "description"],
},
},
},
},
{
standard: {
query: {
text_expansion: {
"ml.inference.title_expanded.predicted_value": {
model_id: ".elser_model_2",
model_text: "How is the weather in Jamaica?",
},
},
},
},
},
{
standard: {
query: {
text_expansion: {
"ml.inference.description_expanded.predicted_value": {
model_id: ".elser_model_2",
model_text: "How is the weather in Jamaica?",
},
},
},
},
},
],
window_size: 10,
rank_constant: 20,
},
},
});
console.log(response);
----

View File

@ -1,27 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transport.request({
method: "PUT",
path: "/_internal/desired_nodes/&lt;history_id&gt;/&lt;version&gt;",
body: {
nodes: [
{
settings: {
"node.name": "instance-000187",
"node.external_id": "instance-000187",
"node.roles": ["data_hot", "master"],
"node.attr.data": "hot",
"node.attr.logical_availability_zone": "zone-0",
},
processors: 8,
memory: "58gb",
storage: "2tb",
},
],
},
});
console.log(response);
----

View File

@ -4,14 +4,20 @@
[source, js] [source, js]
---- ----
const response = await client.search({ const response = await client.search({
query: { body: {
multi_match: { query: {
query: "Will Smith", multi_match: {
type: "cross_fields", query: 'Will Smith',
fields: ["first_name", "last_name"], type: 'cross_fields',
operator: "and", fields: [
}, 'first_name',
}, 'last_name'
}); ],
console.log(response); operator: 'and'
}
}
}
})
console.log(response)
---- ----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transform.resetTransform({
transform_id: "ecommerce_transform",
});
console.log(response);
----

View File

@ -0,0 +1,17 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.create({
index: 'twitter',
id: '1',
body: {
user: 'kimchy',
post_date: '2009-11-15T14:12:12',
message: 'trying out Elasticsearch'
}
})
console.log(response)
----

View File

@ -1,31 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "set_os",
description: "sets the value of host.os.name from the field os",
processors: [
{
set: {
field: "host.os.name",
value: "{{{os}}}",
},
},
],
});
console.log(response);
const response1 = await client.ingest.simulate({
id: "set_os",
docs: [
{
_source: {
os: "Ubuntu",
},
},
],
});
console.log(response1);
----

View File

@ -1,27 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "my-pipeline-id",
description: "My optional pipeline description",
processors: [
{
set: {
description: "My optional processor description",
field: "my-keyword-field",
value: "foo",
},
},
],
_meta: {
reason: "set my-keyword-field to foo",
serialization: {
class: "MyPipeline",
id: 10,
},
},
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "text_embedding",
inference_id: "google_vertex_ai_embeddings",
inference_config: {
service: "googlevertexai",
service_settings: {
service_account_json: "<service_account_json>",
model_id: "<model_id>",
location: "<location>",
project_id: "<project_id>",
},
},
});
console.log(response);
----

View File

@ -4,8 +4,9 @@
[source, js] [source, js]
---- ----
const response = await client.cluster.health({ const response = await client.cluster.health({
wait_for_status: "yellow", wait_for_status: 'yellow',
timeout: "50s", timeout: '50s'
}); })
console.log(response); console.log(response)
---- ----

View File

@ -0,0 +1,30 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
body: {
query: {
term: {
user: 'kimchy'
}
},
sort: {
_script: {
type: 'number',
script: {
lang: 'painless',
source: "doc['field_name'].value * params.factor",
params: {
factor: 1.1
}
},
order: 'asc'
}
}
}
})
console.log(response)
----

View File

@ -1,37 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ml.evaluateDataFrame({
index: "house_price_predictions",
query: {
bool: {
filter: [
{
term: {
"ml.is_training": false,
},
},
],
},
},
evaluation: {
regression: {
actual_field: "price",
predicted_field: "ml.price_prediction",
metrics: {
r_squared: {},
mse: {},
msle: {
offset: 10,
},
huber: {
delta: 1.5,
},
},
},
},
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.sql.query({
format: "yaml",
query: "SELECT * FROM library ORDER BY page_count DESC",
fetch_size: 5,
});
console.log(response);
----

View File

@ -1,54 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ilm.putLifecycle({
name: "logs",
policy: {
phases: {
hot: {
actions: {
rollover: {
max_primary_shard_size: "50gb",
},
},
},
warm: {
min_age: "30d",
actions: {
shrink: {
number_of_shards: 1,
},
forcemerge: {
max_num_segments: 1,
},
},
},
cold: {
min_age: "60d",
actions: {
searchable_snapshot: {
snapshot_repository: "found-snapshots",
},
},
},
frozen: {
min_age: "90d",
actions: {
searchable_snapshot: {
snapshot_repository: "found-snapshots",
},
},
},
delete: {
min_age: "735d",
actions: {
delete: {},
},
},
},
},
});
console.log(response);
----

View File

@ -1,23 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ilm.putLifecycle({
name: "my_policy",
policy: {
phases: {
warm: {
actions: {
allocate: {
include: {
box_type: "hot,warm",
},
},
},
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cat.repositories({
v: "true",
});
console.log(response);
----

View File

@ -1,19 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ilm.putLifecycle({
name: "my_policy",
policy: {
phases: {
delete: {
actions: {
delete: {},
},
},
},
},
});
console.log(response);
----

View File

@ -1,24 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.index({
index: "example",
document: {
location: {
type: "polygon",
coordinates: [
[
[1000, -1001],
[1001, -1001],
[1001, -1000],
[1000, -1000],
[1000, -1001],
],
],
},
},
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
tokenizer: "whitespace",
filter: [
{
type: "keyword_marker",
keywords: ["jumping"],
},
"stemmer",
],
text: "fox running and jumping",
explain: true,
attributes: "keyword",
});
console.log(response);
----

View File

@ -1,78 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "clientips",
mappings: {
properties: {
client_ip: {
type: "keyword",
},
env: {
type: "keyword",
},
},
},
});
console.log(response);
const response1 = await client.bulk({
index: "clientips",
operations: [
{
index: {},
},
{
client_ip: "172.21.0.5",
env: "Development",
},
{
index: {},
},
{
client_ip: "172.21.2.113",
env: "QA",
},
{
index: {},
},
{
client_ip: "172.21.2.162",
env: "QA",
},
{
index: {},
},
{
client_ip: "172.21.3.15",
env: "Production",
},
{
index: {},
},
{
client_ip: "172.21.3.16",
env: "Production",
},
],
});
console.log(response1);
const response2 = await client.enrich.putPolicy({
name: "clientip_policy",
match: {
indices: "clientips",
match_field: "client_ip",
enrich_fields: ["env"],
},
});
console.log(response2);
const response3 = await client.enrich.executePolicy({
name: "clientip_policy",
wait_for_completion: "false",
});
console.log(response3);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getDataStream({
name: "my-data-stream",
});
console.log(response);
----

View File

@ -1,21 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.putRole({
name: "click_role",
indices: [
{
names: ["events-*"],
privileges: ["read"],
query: {
match: {
category: "click",
},
},
},
],
});
console.log(response);
----

View File

@ -1,21 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "alibabacloud_ai_search_embeddings_pipeline",
processors: [
{
inference: {
model_id: "alibabacloud_ai_search_embeddings",
input_output: {
input_field: "content",
output_field: "content_embedding",
},
},
},
],
});
console.log(response);
----

View File

@ -1,17 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.putSettings({
index: "logs-my_app-default",
settings: {
index: {
lifecycle: {
name: "new-lifecycle-policy",
},
},
},
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ssl.certificates();
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.snapshot.status({
repository: "my_repository",
snapshot: "snapshot_2",
});
console.log(response);
----

View File

@ -1,32 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
settings: {
analysis: {
analyzer: {
my_analyzer: {
tokenizer: "my_tokenizer",
},
},
tokenizer: {
my_tokenizer: {
type: "simple_pattern",
pattern: "[0123456789]{3}",
},
},
},
},
});
console.log(response);
const response1 = await client.indices.analyze({
index: "my-index-000001",
analyzer: "my_analyzer",
text: "fd-786-335-514-x",
});
console.log(response1);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.snapshot.get({
repository: "my_repository",
snapshot: "snapshot*,-snapshot_3",
sort: "name",
});
console.log(response);
----

View File

@ -1,16 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.rollover({
alias: "my-alias",
conditions: {
max_age: "7d",
max_docs: 1000,
max_primary_shard_size: "50gb",
max_primary_shard_docs: "2000",
},
});
console.log(response);
----

View File

@ -1,58 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
mappings: {
properties: {
manager: {
properties: {
age: {
type: "integer",
},
name: {
type: "text",
},
},
},
employees: {
type: "nested",
properties: {
age: {
type: "integer",
},
name: {
type: "text",
},
},
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "my-index-000001",
id: 1,
document: {
region: "US",
manager: {
name: "Alice White",
age: 30,
},
employees: [
{
name: "John Smith",
age: 34,
},
{
name: "Peter Brown",
age: 26,
},
],
},
});
console.log(response1);
----

Some files were not shown because too many files have changed in this diff Show More