Compare commits
21 Commits
v9.0.0-alp
...
backport-2
| Author | SHA1 | Date | |
|---|---|---|---|
| ca3b25c929 | |||
| 424cc94458 | |||
| 7aca5cf652 | |||
| a8927727b1 | |||
| 15a450eba4 | |||
| 8f028a522a | |||
| 411f379006 | |||
| 242b4227ee | |||
| 78332da539 | |||
| 62b2d78b15 | |||
| 599d7e6e07 | |||
| 6116909a9b | |||
| f609271107 | |||
| b4eb8e5441 | |||
| ccf9fcbd93 | |||
| 0c6f323745 | |||
| 04a9eb462d | |||
| 05eaa9fc61 | |||
| 30c6793383 | |||
| 6df75b6a92 | |||
| 383206ef19 |
@ -11,6 +11,6 @@ RUN apt-get clean -y && \
|
|||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
COPY package.json .
|
COPY package.json .
|
||||||
RUN npm install
|
RUN npm install --production=false
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|||||||
@ -24,7 +24,7 @@ USER ${BUILDER_UID}:${BUILDER_GID}
|
|||||||
|
|
||||||
# install dependencies
|
# install dependencies
|
||||||
COPY package.json .
|
COPY package.json .
|
||||||
RUN npm install
|
RUN npm install --production=false
|
||||||
|
|
||||||
# copy project files
|
# copy project files
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|||||||
@ -6,7 +6,7 @@ steps:
|
|||||||
env:
|
env:
|
||||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||||
TEST_SUITE: "{{ matrix.suite }}"
|
TEST_SUITE: "{{ matrix.suite }}"
|
||||||
STACK_VERSION: 8.16.0
|
STACK_VERSION: 8.15.0
|
||||||
matrix:
|
matrix:
|
||||||
setup:
|
setup:
|
||||||
suite:
|
suite:
|
||||||
@ -25,7 +25,7 @@ steps:
|
|||||||
provider: "gcp"
|
provider: "gcp"
|
||||||
image: family/core-ubuntu-2204
|
image: family/core-ubuntu-2204
|
||||||
plugins:
|
plugins:
|
||||||
- junit-annotate#v2.6.0:
|
- junit-annotate#v2.4.1:
|
||||||
artifacts: "junit-output/junit-*.xml"
|
artifacts: "junit-output/junit-*.xml"
|
||||||
job-uuid-file-pattern: "junit-(.*).xml"
|
job-uuid-file-pattern: "junit-(.*).xml"
|
||||||
fail-build-on-error: true
|
fail-build-on-error: true
|
||||||
|
|||||||
@ -5,4 +5,3 @@ elasticsearch
|
|||||||
.git
|
.git
|
||||||
lib
|
lib
|
||||||
junit-output
|
junit-output
|
||||||
.tap
|
|
||||||
|
|||||||
50
.github/ISSUE_TEMPLATE/bug.md
vendored
Normal file
50
.github/ISSUE_TEMPLATE/bug.md
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
---
|
||||||
|
name: 🐛 Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
labels: ["Category: Bug"]
|
||||||
|
---
|
||||||
|
|
||||||
|
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||||
|
|
||||||
|
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||||
|
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||||
|
locked, and assigned the `Category: Not an issue` label.**
|
||||||
|
|
||||||
|
## 🐛 Bug Report
|
||||||
|
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
## To Reproduce
|
||||||
|
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
|
||||||
|
Paste your code here:
|
||||||
|
|
||||||
|
```js
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--
|
||||||
|
In some cases, it might be challenging to reproduce the bug in a few lines of code.
|
||||||
|
You can fork the following repository, which contains all the configuration needed
|
||||||
|
to spin up a three nodes Elasticsearch cluster with security enabled.
|
||||||
|
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
|
||||||
|
https://github.com/delvedor/es-reproduce-issue
|
||||||
|
--->
|
||||||
|
|
||||||
|
## Expected behavior
|
||||||
|
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
Paste the results here:
|
||||||
|
|
||||||
|
```js
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## Your Environment
|
||||||
|
|
||||||
|
- *node version*: 6,8,10
|
||||||
|
- `@elastic/elasticsearch` *version*: >=7.0.0
|
||||||
|
- *os*: Mac, Windows, Linux
|
||||||
|
- *any other relevant information*
|
||||||
66
.github/ISSUE_TEMPLATE/bug.yaml
vendored
66
.github/ISSUE_TEMPLATE/bug.yaml
vendored
@ -1,66 +0,0 @@
|
|||||||
---
|
|
||||||
name: 🐛 Bug report
|
|
||||||
description: Create a report to help us improve
|
|
||||||
labels: ["Category: Bug"]
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
|
||||||
|
|
||||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
|
||||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
|
||||||
locked, and assigned the `Category: Not an issue` label.**
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
id: bug-report
|
|
||||||
attributes:
|
|
||||||
label: 🐛 Bug report
|
|
||||||
description: A clear and concise description of what the bug is.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
id: reproduction
|
|
||||||
attributes:
|
|
||||||
label: To reproduce
|
|
||||||
description: Steps to reproduce the behavior
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
id: expected
|
|
||||||
attributes:
|
|
||||||
label: Expected behavior
|
|
||||||
description: A clear and concise description of what you expected to happen.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
id: node-js-version
|
|
||||||
attributes:
|
|
||||||
label: Node.js version
|
|
||||||
placeholder: 18.x, 20.x, etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
id: client-version
|
|
||||||
attributes:
|
|
||||||
label: "@elastic/elasticsearch version"
|
|
||||||
placeholder: 7.17.0, 8.14.1, etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
id: os
|
|
||||||
attributes:
|
|
||||||
label: Operating system
|
|
||||||
placeholder: Ubuntu 22.04, macOS, etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
id: env-info
|
|
||||||
attributes:
|
|
||||||
label: Any other relevant environment information
|
|
||||||
23
.github/ISSUE_TEMPLATE/feature.md
vendored
Normal file
23
.github/ISSUE_TEMPLATE/feature.md
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
name: 🚀 Feature Proposal
|
||||||
|
about: Submit a proposal for a new feature
|
||||||
|
labels: ["Category: Feature"]
|
||||||
|
---
|
||||||
|
|
||||||
|
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||||
|
|
||||||
|
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||||
|
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||||
|
locked, and assigned the `Category: Not an issue` label.**
|
||||||
|
|
||||||
|
## 🚀 Feature Proposal
|
||||||
|
|
||||||
|
A clear and concise description of what the feature is.
|
||||||
|
|
||||||
|
## Motivation
|
||||||
|
|
||||||
|
Please outline the motivation for the proposal.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
Please provide an example for how this feature would be used.
|
||||||
33
.github/ISSUE_TEMPLATE/feature.yaml
vendored
33
.github/ISSUE_TEMPLATE/feature.yaml
vendored
@ -1,33 +0,0 @@
|
|||||||
---
|
|
||||||
name: 🚀 Feature Proposal
|
|
||||||
description: Submit a proposal for a new feature
|
|
||||||
labels: ["Category: Feature"]
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
|
||||||
|
|
||||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
|
||||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
|
||||||
locked, and assigned the `Category: Not an issue` label.**
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
id: feature-proposal
|
|
||||||
attributes:
|
|
||||||
label: 🚀 Feature Proposal
|
|
||||||
description: A clear and concise description of what the feature is.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
id: motivation
|
|
||||||
attributes:
|
|
||||||
label: Motivation
|
|
||||||
description: Please outline the motivation for the proposal.
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
id: example
|
|
||||||
attributes:
|
|
||||||
label: Example
|
|
||||||
description: Please provide an example for how this feature would be used.
|
|
||||||
11
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
name: 💬 Questions / Help
|
||||||
|
about: If you have questions, please check our Gitter or Help repo
|
||||||
|
labels: ["Category: Question"]
|
||||||
|
---
|
||||||
|
|
||||||
|
## 💬 Questions and Help
|
||||||
|
|
||||||
|
### Please note that this issue tracker is not a help forum and this issue may be closed.
|
||||||
|
|
||||||
|
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||||
21
.github/ISSUE_TEMPLATE/question.yaml
vendored
21
.github/ISSUE_TEMPLATE/question.yaml
vendored
@ -1,21 +0,0 @@
|
|||||||
---
|
|
||||||
name: 💬 Questions / Help
|
|
||||||
description: If you have questions, please check our community forum or support
|
|
||||||
labels: ["Category: Question"]
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
### Please note that this issue tracker is not a help forum and this issue may be closed.
|
|
||||||
|
|
||||||
Please check our [community forum](https://discuss.elastic.co/) or [contact Elastic support](https://www.elastic.co/support) if your issue is not specifically related to the documented functionality of this client library.
|
|
||||||
|
|
||||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
id: question
|
|
||||||
attributes:
|
|
||||||
label: Question
|
|
||||||
description: Your question or comment
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
6
.github/ISSUE_TEMPLATE/security.md
vendored
Normal file
6
.github/ISSUE_TEMPLATE/security.md
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
name: 👮 Security Issue
|
||||||
|
about: Responsible Disclosure
|
||||||
|
---
|
||||||
|
|
||||||
|
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).
|
||||||
8
.github/ISSUE_TEMPLATE/security.yaml
vendored
8
.github/ISSUE_TEMPLATE/security.yaml
vendored
@ -1,8 +0,0 @@
|
|||||||
---
|
|
||||||
name: 👮 Security Issue
|
|
||||||
description: Responsible disclosure
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).
|
|
||||||
18
.github/make.sh
vendored
18
.github/make.sh
vendored
@ -37,7 +37,7 @@ product="elastic/elasticsearch-js"
|
|||||||
output_folder=".buildkite/output"
|
output_folder=".buildkite/output"
|
||||||
codegen_folder=".buildkite/output"
|
codegen_folder=".buildkite/output"
|
||||||
OUTPUT_DIR="$repo/${output_folder}"
|
OUTPUT_DIR="$repo/${output_folder}"
|
||||||
NODE_JS_VERSION=22
|
NODE_JS_VERSION=18
|
||||||
WORKFLOW=${WORKFLOW-staging}
|
WORKFLOW=${WORKFLOW-staging}
|
||||||
mkdir -p "$OUTPUT_DIR"
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
@ -65,7 +65,7 @@ codegen)
|
|||||||
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
||||||
# fall back to branch name or `main` if no VERSION is set
|
# fall back to branch name or `main` if no VERSION is set
|
||||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||||
if [[ "$branch_name" =~ ^[0-9]+\.([0-9]+|x) ]]; then
|
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
|
||||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
||||||
VERSION="$branch_name"
|
VERSION="$branch_name"
|
||||||
else
|
else
|
||||||
@ -121,9 +121,9 @@ bumpmatrix)
|
|||||||
echo -e "\t $0 bump \$VERSION"
|
echo -e "\t $0 bump \$VERSION"
|
||||||
echo -e "\t $0 codegen \$VERSION"
|
echo -e "\t $0 codegen \$VERSION"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------- #
|
# ------------------------------------------------------- #
|
||||||
# Build Container
|
# Build Container
|
||||||
# ------------------------------------------------------- #
|
# ------------------------------------------------------- #
|
||||||
@ -150,7 +150,7 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
|||||||
-u "$(id -u):$(id -g)" \
|
-u "$(id -u):$(id -g)" \
|
||||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||||
--volume /usr/src/elasticsearch-js/node_modules \
|
--volume /usr/src/elasticsearch-js/node_modules \
|
||||||
--volume "$(realpath "$repo/../elastic-client-generator-js"):/usr/src/elastic-client-generator-js" \
|
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
|
||||||
--env "WORKFLOW=$WORKFLOW" \
|
--env "WORKFLOW=$WORKFLOW" \
|
||||||
--name make-elasticsearch-js \
|
--name make-elasticsearch-js \
|
||||||
--rm \
|
--rm \
|
||||||
@ -159,14 +159,6 @@ if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}
|
|||||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||||
else
|
else
|
||||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||||
|
|
||||||
# determine branch to clone
|
|
||||||
GENERATOR_BRANCH="main"
|
|
||||||
if [[ "$VERSION" == 8.* ]]; then
|
|
||||||
GENERATOR_BRANCH="8.x"
|
|
||||||
fi
|
|
||||||
echo -e "\033[34;1mINFO: Generator branch: $GENERATOR_BRANCH"
|
|
||||||
|
|
||||||
docker run \
|
docker run \
|
||||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||||
--volume /usr/src/elasticsearch-js/node_modules \
|
--volume /usr/src/elasticsearch-js/node_modules \
|
||||||
@ -176,7 +168,7 @@ else
|
|||||||
--rm \
|
--rm \
|
||||||
$product \
|
$product \
|
||||||
/bin/bash -c "cd /usr/src && \
|
/bin/bash -c "cd /usr/src && \
|
||||||
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||||
cd /usr/src/elasticsearch-js && \
|
cd /usr/src/elasticsearch-js && \
|
||||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||||
|
|||||||
26
.github/stale.yml
vendored
Normal file
26
.github/stale.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Number of days of inactivity before an issue becomes stale
|
||||||
|
daysUntilStale: 15
|
||||||
|
|
||||||
|
# Number of days of inactivity before a stale issue is closed
|
||||||
|
daysUntilClose: 7
|
||||||
|
|
||||||
|
# Issues with these labels will never be considered stale
|
||||||
|
exemptLabels:
|
||||||
|
- "discussion"
|
||||||
|
- "feature request"
|
||||||
|
- "bug"
|
||||||
|
- "todo"
|
||||||
|
- "good first issue"
|
||||||
|
|
||||||
|
# Label to use when marking an issue as stale
|
||||||
|
staleLabel: stale
|
||||||
|
|
||||||
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
|
markComment: |
|
||||||
|
We understand that this might be important for you, but this issue has been automatically marked as stale because it has not had recent activity either from our end or yours.
|
||||||
|
It will be closed if no further activity occurs, please write a comment if you would like to keep this going.
|
||||||
|
|
||||||
|
Note: in the past months we have built a new client, that has just landed in master. If you want to open an issue or a pr for the legacy client, you should do that in https://github.com/elastic/elasticsearch-js-legacy
|
||||||
|
|
||||||
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
|
closeComment: false
|
||||||
18
.github/workflows/auto-merge.yml
vendored
Normal file
18
.github/workflows/auto-merge.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
name: Automerge
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_review:
|
||||||
|
types:
|
||||||
|
- submitted
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
automerge:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.review.state == 'approved'
|
||||||
|
steps:
|
||||||
|
- uses: reitermarkus/automerge@v2
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GH_TOKEN }}
|
||||||
|
merge-method: squash
|
||||||
|
pull-request-author-associations: OWNER
|
||||||
|
review-author-associations: OWNER,CONTRIBUTOR
|
||||||
19
.github/workflows/docs-build.yml
vendored
19
.github/workflows/docs-build.yml
vendored
@ -1,19 +0,0 @@
|
|||||||
name: docs-build
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request_target: ~
|
|
||||||
merge_group: ~
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
docs-preview:
|
|
||||||
uses: elastic/docs-builder/.github/workflows/preview-build.yml@main
|
|
||||||
with:
|
|
||||||
path-pattern: docs/**
|
|
||||||
permissions:
|
|
||||||
deployments: write
|
|
||||||
id-token: write
|
|
||||||
contents: read
|
|
||||||
pull-requests: read
|
|
||||||
14
.github/workflows/docs-cleanup.yml
vendored
14
.github/workflows/docs-cleanup.yml
vendored
@ -1,14 +0,0 @@
|
|||||||
name: docs-cleanup
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types:
|
|
||||||
- closed
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
docs-preview:
|
|
||||||
uses: elastic/docs-builder/.github/workflows/preview-cleanup.yml@main
|
|
||||||
permissions:
|
|
||||||
contents: none
|
|
||||||
id-token: write
|
|
||||||
deployments: write
|
|
||||||
66
.github/workflows/nodejs.yml
vendored
66
.github/workflows/nodejs.yml
vendored
@ -11,10 +11,8 @@ jobs:
|
|||||||
outputs:
|
outputs:
|
||||||
src-only: "${{ steps.changes.outputs.src-only }}"
|
src-only: "${{ steps.changes.outputs.src-only }}"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
- uses: dorny/paths-filter/@v2.11.1
|
||||||
persist-credentials: false
|
|
||||||
- uses: dorny/paths-filter/@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
|
||||||
id: changes
|
id: changes
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
@ -32,16 +30,14 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
node-version: [18.x, 20.x, 22.x, 23.x]
|
node-version: [18.x, 20.x, 22.x]
|
||||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
|
|
||||||
@ -57,21 +53,15 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
npm run test:unit
|
npm run test:unit
|
||||||
|
|
||||||
- name: ECMAScript module test
|
|
||||||
run: |
|
|
||||||
npm run test:esm
|
|
||||||
|
|
||||||
license:
|
license:
|
||||||
name: License check
|
name: License check
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Use Node.js
|
- name: Use Node.js
|
||||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 22.x
|
node-version: 22.x
|
||||||
|
|
||||||
@ -83,38 +73,12 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
npm run license-checker
|
npm run license-checker
|
||||||
|
|
||||||
test-bun:
|
auto-approve:
|
||||||
name: Test Bun
|
name: Auto-approve
|
||||||
runs-on: ${{ matrix.os }}
|
needs: [test, license]
|
||||||
needs: paths-filter
|
runs-on: ubuntu-latest
|
||||||
# only run if code relevant to unit tests was changed
|
permissions:
|
||||||
if: needs.paths-filter.outputs.src-only == 'true'
|
pull-requests: write
|
||||||
|
if: github.actor == 'elasticmachine'
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: hmarr/auto-approve-action@v4
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Use Bun
|
|
||||||
uses: oven-sh/setup-bun@4bc047ad259df6fc24a6c9b0f9a0cb08cf17fbe5 # v2
|
|
||||||
|
|
||||||
- name: Install
|
|
||||||
run: |
|
|
||||||
bun install
|
|
||||||
|
|
||||||
- name: Lint
|
|
||||||
run: |
|
|
||||||
bun run lint
|
|
||||||
|
|
||||||
- name: Unit test
|
|
||||||
run: |
|
|
||||||
bun run test:unit-bun
|
|
||||||
|
|
||||||
- name: ECMAScript module test
|
|
||||||
run: |
|
|
||||||
bun run test:esm
|
|
||||||
|
|||||||
39
.github/workflows/npm-publish.yml
vendored
39
.github/workflows/npm-publish.yml
vendored
@ -9,52 +9,29 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
|
||||||
ref: ${{ github.event.inputs.branch }}
|
ref: ${{ github.event.inputs.branch }}
|
||||||
- uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "20.x"
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
- run: npm install -g npm
|
- run: npm install -g npm
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npm test
|
- run: npm test
|
||||||
- name: npm publish
|
- run: npm publish --provenance --access public
|
||||||
run: |
|
|
||||||
version=$(jq -r .version package.json)
|
|
||||||
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
|
||||||
if [[ -z "$tag_meta" ]]; then
|
|
||||||
npm publish --provenance --access public
|
|
||||||
else
|
|
||||||
tag=$(echo "$tag_meta" | cut -d '.' -f1)
|
|
||||||
npm publish --provenance --access public --tag "$tag"
|
|
||||||
fi
|
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
- name: Publish version on GitHub
|
- run: |
|
||||||
run: |
|
|
||||||
version=$(jq -r .version package.json)
|
version=$(jq -r .version package.json)
|
||||||
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
|
||||||
if [[ -z "$tag_meta" ]]; then
|
|
||||||
gh release create \
|
gh release create \
|
||||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)"
|
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
|
||||||
--target "$BRANCH_NAME" \
|
--target "$BRANCH_NAME" \
|
||||||
--title "v$version" \
|
-t "v$version" \
|
||||||
"v$version"
|
"v$version"
|
||||||
else
|
|
||||||
tag_main=$(echo "$version" | cut -d '-' -f1)
|
|
||||||
gh release create \
|
|
||||||
-n "This is a $tag_main pre-release. Changes may not be stable." \
|
|
||||||
--latest=false \
|
|
||||||
--prerelease \
|
|
||||||
--target "$BRANCH_NAME" \
|
|
||||||
--title "v$version" \
|
|
||||||
"v$version"
|
|
||||||
fi
|
|
||||||
env:
|
env:
|
||||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
|||||||
43
.github/workflows/serverless-patch.sh
vendored
Executable file
43
.github/workflows/serverless-patch.sh
vendored
Executable file
@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -exuo pipefail
|
||||||
|
|
||||||
|
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
|
||||||
|
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
|
||||||
|
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
|
||||||
|
|
||||||
|
# generate patch file
|
||||||
|
cd "$GITHUB_WORKSPACE/stack"
|
||||||
|
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
|
||||||
|
|
||||||
|
# set committer info
|
||||||
|
git config --global user.email "elasticmachine@users.noreply.github.com"
|
||||||
|
git config --global user.name "Elastic Machine"
|
||||||
|
|
||||||
|
# apply patch file
|
||||||
|
cd "$GITHUB_WORKSPACE/serverless"
|
||||||
|
git am -C1 --reject /tmp/patch.diff || git am --quit
|
||||||
|
|
||||||
|
# generate PR body comment
|
||||||
|
comment="Patch applied from $pr_shortcode"
|
||||||
|
|
||||||
|
# enumerate rejected patches in PR comment
|
||||||
|
has_rejects='false'
|
||||||
|
for f in ./**/*.rej; do
|
||||||
|
has_rejects='true'
|
||||||
|
comment="$comment
|
||||||
|
|
||||||
|
## Rejected patch \`$f\` must be resolved:
|
||||||
|
|
||||||
|
\`\`\`diff
|
||||||
|
$(cat "$f")
|
||||||
|
\`\`\`
|
||||||
|
"
|
||||||
|
done
|
||||||
|
|
||||||
|
# delete .rej files
|
||||||
|
rm -fv ./**/*.rej
|
||||||
|
|
||||||
|
# send data to output parameters
|
||||||
|
echo "$comment" > /tmp/pr_body
|
||||||
|
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"
|
||||||
51
.github/workflows/serverless-patch.yml
vendored
Normal file
51
.github/workflows/serverless-patch.yml
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
---
|
||||||
|
name: Apply PR changes to serverless
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types:
|
||||||
|
- closed
|
||||||
|
- labeled
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
apply-patch:
|
||||||
|
name: Apply patch
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only react to merged PRs for security reasons.
|
||||||
|
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||||
|
if: >
|
||||||
|
github.event.pull_request.merged
|
||||||
|
&& (
|
||||||
|
(
|
||||||
|
github.event.action == 'closed'
|
||||||
|
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
|
||||||
|
)
|
||||||
|
||
|
||||||
|
(
|
||||||
|
github.event.action == 'labeled'
|
||||||
|
&& github.event.label.name == 'apply-to-serverless'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: elastic/elasticsearch-js
|
||||||
|
ref: main
|
||||||
|
path: stack
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: elastic/elasticsearch-serverless-js
|
||||||
|
ref: main
|
||||||
|
path: serverless
|
||||||
|
- name: Apply patch from stack to serverless
|
||||||
|
id: apply-patch
|
||||||
|
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
||||||
|
- uses: peter-evans/create-pull-request@v6
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GH_TOKEN }}
|
||||||
|
path: serverless
|
||||||
|
title: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
||||||
|
commit-message: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
|
||||||
|
body-path: /tmp/pr_body
|
||||||
|
draft: '${{ steps.apply-patch.outputs.PR_DRAFT }}'
|
||||||
|
add-paths: ':!*.rej'
|
||||||
12
.github/workflows/stale.yml
vendored
12
.github/workflows/stale.yml
vendored
@ -1,21 +1,21 @@
|
|||||||
---
|
---
|
||||||
name: "Close stale issues and PRs"
|
name: 'Close stale issues and PRs'
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "30 1 * * *"
|
- cron: '30 1 * * *'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9
|
- uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
stale-issue-label: stale
|
stale-issue-label: stale
|
||||||
stale-pr-label: stale
|
stale-pr-label: stale
|
||||||
days-before-stale: 90
|
days-before-stale: 90
|
||||||
days-before-close: 14
|
days-before-close: 14
|
||||||
exempt-issue-labels: "good first issue,tracking"
|
exempt-issue-labels: 'good first issue'
|
||||||
close-issue-label: closed-stale
|
close-issue-label: closed-stale
|
||||||
close-pr-label: closed-stale
|
close-pr-label: closed-stale
|
||||||
stale-issue-message: "This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||||
stale-pr-message: "This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -64,7 +64,3 @@ test/bundlers/parcel-test/.parcel-cache
|
|||||||
|
|
||||||
lib
|
lib
|
||||||
junit-output
|
junit-output
|
||||||
bun.lockb
|
|
||||||
test-results
|
|
||||||
processinfo
|
|
||||||
.tap
|
|
||||||
|
|||||||
@ -72,5 +72,3 @@ CODE_OF_CONDUCT.md
|
|||||||
CONTRIBUTING.md
|
CONTRIBUTING.md
|
||||||
|
|
||||||
src
|
src
|
||||||
bun.lockb
|
|
||||||
.tap
|
|
||||||
|
|||||||
@ -28,9 +28,6 @@ spec:
|
|||||||
spec:
|
spec:
|
||||||
repository: elastic/elasticsearch-js
|
repository: elastic/elasticsearch-js
|
||||||
pipeline_file: .buildkite/pipeline.yml
|
pipeline_file: .buildkite/pipeline.yml
|
||||||
env:
|
|
||||||
ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true"
|
|
||||||
SLACK_NOTIFICATIONS_CHANNEL: "#devtools-notify-javascript"
|
|
||||||
teams:
|
teams:
|
||||||
devtools-team:
|
devtools-team:
|
||||||
access_level: MANAGE_BUILD_AND_READ
|
access_level: MANAGE_BUILD_AND_READ
|
||||||
@ -45,12 +42,6 @@ spec:
|
|||||||
main:
|
main:
|
||||||
branch: "main"
|
branch: "main"
|
||||||
cronline: "@daily"
|
cronline: "@daily"
|
||||||
8_x:
|
8_14:
|
||||||
branch: "8.x"
|
branch: "8.14"
|
||||||
cronline: "@daily"
|
|
||||||
8_17:
|
|
||||||
branch: "8.17"
|
|
||||||
cronline: "@daily"
|
|
||||||
8_18:
|
|
||||||
branch: "8.18"
|
|
||||||
cronline: "@daily"
|
cronline: "@daily"
|
||||||
|
|||||||
@ -13,6 +13,7 @@ const client = new Client({
|
|||||||
cloud: { id: '<cloud-id>' },
|
cloud: { id: '<cloud-id>' },
|
||||||
auth: { apiKey: 'base64EncodedKey' },
|
auth: { apiKey: 'base64EncodedKey' },
|
||||||
maxRetries: 5,
|
maxRetries: 5,
|
||||||
|
requestTimeout: 60000,
|
||||||
sniffOnStart: true
|
sniffOnStart: true
|
||||||
})
|
})
|
||||||
----
|
----
|
||||||
@ -81,7 +82,7 @@ _Default:_ `3`
|
|||||||
|
|
||||||
|`requestTimeout`
|
|`requestTimeout`
|
||||||
|`number` - Max request timeout in milliseconds for each request. +
|
|`number` - Max request timeout in milliseconds for each request. +
|
||||||
_Default:_ No value
|
_Default:_ `30000`
|
||||||
|
|
||||||
|`pingTimeout`
|
|`pingTimeout`
|
||||||
|`number` - Max ping request timeout in milliseconds for each request. +
|
|`number` - Max ping request timeout in milliseconds for each request. +
|
||||||
@ -251,8 +252,8 @@ const client = new Client({
|
|||||||
----
|
----
|
||||||
|
|
||||||
|`disablePrototypePoisoningProtection`
|
|`disablePrototypePoisoningProtection`
|
||||||
|`boolean`, `'proto'`, `'constructor'` - The client can protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more about this security concern. If needed, you can enable prototype poisoning protection entirely (`false`) or one of the two checks (`'proto'` or `'constructor'`). For performance reasons, it is disabled by default. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
|`boolean`, `'proto'`, `'constructor'` - By the default the client will protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more. If needed you can disable prototype poisoning protection entirely or one of the two checks. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
||||||
_Default:_ `true`
|
_Default:_ `false`
|
||||||
|
|
||||||
|`caFingerprint`
|
|`caFingerprint`
|
||||||
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
|
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
|
||||||
|
|||||||
@ -1,167 +1,6 @@
|
|||||||
[[changelog-client]]
|
[[changelog-client]]
|
||||||
== Release notes
|
== Release notes
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 9.0.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Breaking changes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Drop support for deprecated `body` parameter
|
|
||||||
|
|
||||||
In 8.0, the top-level `body` parameter that was available on all API functions <<remove-body-key,was deprecated>>. In 9.0 this property is completely removed.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Remove the default 30-second timeout on all requests sent to Elasticsearch
|
|
||||||
|
|
||||||
Setting HTTP timeouts on Elasticsearch requests goes against Elastic's recommendations. See <<timeout-best-practices>> for more information.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.17.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `v8.17`
|
|
||||||
|
|
||||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Report correct transport connection type in telemetry
|
|
||||||
|
|
||||||
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.17.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.17`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.17/release-notes-8.17.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.16.4
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `v8.16`
|
|
||||||
|
|
||||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Report correct transport connection type in telemetry
|
|
||||||
|
|
||||||
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.16.3
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `v8.16`
|
|
||||||
|
|
||||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.16.2
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `v8.16`
|
|
||||||
|
|
||||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Drop testing artifacts from npm package
|
|
||||||
|
|
||||||
Tap, the unit testing tool used by this project, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.16.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Fix ECMAScript imports
|
|
||||||
|
|
||||||
Fixed package configuration to correctly support native ECMAScript `import` syntax.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.16.0
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Features
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support for Elasticsearch `v8.16`
|
|
||||||
|
|
||||||
You can find all the API changes
|
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.16/release-notes-8.16.0.html[here].
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Support Apache Arrow in ES|QL helper
|
|
||||||
|
|
||||||
The ES|QL helper can now return results as an Apache Arrow `Table` or `RecordBatchReader`, which enables high-performance calculations on ES|QL results, even if the response data is larger than the system's available memory. See <<esql-helper>> for more information.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Pass prototype poisoning options to serializer correctly
|
|
||||||
|
|
||||||
The client's `disablePrototypePoisoningProtection` option was set to `true` by default, but when it was set to any other value it was ignored, making it impossible to enable prototype poisoning protection without providing a custom serializer implementation.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.15.3
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `v8.15`
|
|
||||||
|
|
||||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Drop testing artifacts from npm package
|
|
||||||
|
|
||||||
Tap, the unit testing tool, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.15.2
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `v8.15`
|
|
||||||
|
|
||||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
=== 8.15.1
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Fixes
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
===== Improved support for Elasticsearch `v8.15`
|
|
||||||
|
|
||||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
=== 8.15.0
|
=== 8.15.0
|
||||||
|
|
||||||
@ -716,7 +555,6 @@ ac.abort()
|
|||||||
----
|
----
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
[[remove-body-key]]
|
|
||||||
===== Remove the body key from the request
|
===== Remove the body key from the request
|
||||||
|
|
||||||
*Breaking: Yes* | *Migration effort: Small*
|
*Breaking: Yes* | *Migration effort: Small*
|
||||||
|
|||||||
@ -28,6 +28,7 @@ const client = new Client({
|
|||||||
})
|
})
|
||||||
const child = client.child({
|
const child = client.child({
|
||||||
headers: { 'x-foo': 'bar' },
|
headers: { 'x-foo': 'bar' },
|
||||||
|
requestTimeout: 1000
|
||||||
})
|
})
|
||||||
|
|
||||||
client.info().then(console.log, console.log)
|
client.info().then(console.log, console.log)
|
||||||
|
|||||||
@ -7,6 +7,5 @@ section, you can see the possible options that you can use to configure it.
|
|||||||
|
|
||||||
* <<basic-config>>
|
* <<basic-config>>
|
||||||
* <<advanced-config>>
|
* <<advanced-config>>
|
||||||
* <<timeout-best-practices>>
|
|
||||||
* <<child>>
|
* <<child>>
|
||||||
* <<client-testing>>
|
* <<client-testing>>
|
||||||
|
|||||||
@ -349,7 +349,7 @@ In this case, the result will be:
|
|||||||
body: object | boolean
|
body: object | boolean
|
||||||
statusCode: number
|
statusCode: number
|
||||||
headers: object
|
headers: object
|
||||||
warnings: string[],
|
warnings: [string],
|
||||||
meta: object
|
meta: object
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
@ -410,23 +410,19 @@ The supported request specific options are:
|
|||||||
[cols=2*]
|
[cols=2*]
|
||||||
|===
|
|===
|
||||||
|`ignore`
|
|`ignore`
|
||||||
|`number[]` - HTTP status codes which should not be considered errors for this request. +
|
|`[number]` - HTTP status codes which should not be considered errors for this request. +
|
||||||
_Default:_ `null`
|
_Default:_ `null`
|
||||||
|
|
||||||
|`requestTimeout`
|
|`requestTimeout`
|
||||||
|`number | string | null` - Max request timeout for the request in milliseconds. This overrides the client default, which is to not time out at all. See https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-network.html#_http_client_configuration[Elasticsearch best practices for HTML clients] for more info. +
|
|`number` - Max request timeout for the request in milliseconds, it overrides the client default. +
|
||||||
_Default:_ No timeout
|
_Default:_ `30000`
|
||||||
|
|
||||||
|`retryOnTimeout`
|
|
||||||
|`boolean` - Retry requests that have timed out.
|
|
||||||
_Default:_ `false`
|
|
||||||
|
|
||||||
|`maxRetries`
|
|`maxRetries`
|
||||||
|`number` - Max number of retries for the request, it overrides the client default. +
|
|`number` - Max number of retries for the request, it overrides the client default. +
|
||||||
_Default:_ `3`
|
_Default:_ `3`
|
||||||
|
|
||||||
|`compression`
|
|`compression`
|
||||||
|`string | boolean` - Enables body compression for the request. +
|
|`string, boolean` - Enables body compression for the request. +
|
||||||
_Options:_ `false`, `'gzip'` +
|
_Options:_ `false`, `'gzip'` +
|
||||||
_Default:_ `false`
|
_Default:_ `false`
|
||||||
|
|
||||||
@ -450,10 +446,6 @@ _Default:_ `null`
|
|||||||
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
||||||
_Default:_ `null`
|
_Default:_ `null`
|
||||||
|
|
||||||
|`opaqueId`
|
|
||||||
|`string` - Set the `X-Opaque-Id` HTTP header. See {ref}/api-conventions.html#x-opaque-id
|
|
||||||
_Default:_ `null`
|
|
||||||
|
|
||||||
|`maxResponseSize`
|
|`maxResponseSize`
|
||||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||||
_Default:_ `null`
|
_Default:_ `null`
|
||||||
@ -466,17 +458,6 @@ _Default:_ `null`
|
|||||||
|`AbortSignal` - The AbortSignal instance to allow request abortion. +
|
|`AbortSignal` - The AbortSignal instance to allow request abortion. +
|
||||||
_Default:_ `null`
|
_Default:_ `null`
|
||||||
|
|
||||||
|`meta`
|
|
||||||
|`boolean` - Rather than returning the body, return an object containing `body`, `statusCode`, `headers` and `meta` keys +
|
|
||||||
_Default_: `false`
|
|
||||||
|
|
||||||
|`redaction`
|
|
||||||
|`object` - Options for redacting potentially sensitive data from error metadata. See <<redaction>>.
|
|
||||||
|
|
||||||
|`retryBackoff`
|
|
||||||
|`(min: number, max: number, attempt: number) => number;` - A function that calculates how long to sleep, in seconds, before the next request retry +
|
|
||||||
_Default:_ A built-in function that uses exponential backoff with jitter.
|
|
||||||
|
|
||||||
|===
|
|===
|
||||||
|
|
||||||
[discrete]
|
[discrete]
|
||||||
|
|||||||
@ -1,11 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.getDataStream({
|
|
||||||
name: "my-data-stream",
|
|
||||||
filter_path: "data_streams.indices.index_name",
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,46 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "retrievers_example",
|
|
||||||
retriever: {
|
|
||||||
rrf: {
|
|
||||||
retrievers: [
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
range: {
|
|
||||||
year: {
|
|
||||||
gt: 2023,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
term: {
|
|
||||||
topic: "elastic",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
rank_window_size: 10,
|
|
||||||
rank_constant: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
_source: false,
|
|
||||||
aggs: {
|
|
||||||
topics: {
|
|
||||||
terms: {
|
|
||||||
field: "topic",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.inference.put({
|
|
||||||
task_type: "rerank",
|
|
||||||
inference_id: "my-rerank-model",
|
|
||||||
inference_config: {
|
|
||||||
service: "cohere",
|
|
||||||
service_settings: {
|
|
||||||
model_id: "rerank-english-v3.0",
|
|
||||||
api_key: "{{COHERE_API_KEY}}",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -4,7 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "my-index-000002",
|
index: "my-index-000003",
|
||||||
mappings: {
|
mappings: {
|
||||||
properties: {
|
properties: {
|
||||||
metrics: {
|
metrics: {
|
||||||
@ -29,7 +29,7 @@ const response = await client.indices.create({
|
|||||||
console.log(response);
|
console.log(response);
|
||||||
|
|
||||||
const response1 = await client.indices.getMapping({
|
const response1 = await client.indices.getMapping({
|
||||||
index: "my-index-000002",
|
index: "my-index-000003",
|
||||||
});
|
});
|
||||||
console.log(response1);
|
console.log(response1);
|
||||||
----
|
----
|
||||||
@ -1,8 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.cluster.reroute();
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,42 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.create({
|
|
||||||
index: "my-rank-vectors-bit",
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
|
||||||
my_vector: {
|
|
||||||
type: "rank_vectors",
|
|
||||||
element_type: "bit",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
|
|
||||||
const response1 = await client.bulk({
|
|
||||||
index: "my-rank-vectors-bit",
|
|
||||||
refresh: "true",
|
|
||||||
operations: [
|
|
||||||
{
|
|
||||||
index: {
|
|
||||||
_id: "1",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
my_vector: [127, -127, 0, 1, 42],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
index: {
|
|
||||||
_id: "2",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
my_vector: "8100012a7f",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
console.log(response1);
|
|
||||||
----
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "retrievers_example_nested",
|
|
||||||
retriever: {
|
|
||||||
rrf: {
|
|
||||||
retrievers: [
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
nested: {
|
|
||||||
path: "nested_field",
|
|
||||||
inner_hits: {
|
|
||||||
name: "nested_vector",
|
|
||||||
_source: false,
|
|
||||||
fields: ["nested_field.paragraph_id"],
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
knn: {
|
|
||||||
field: "nested_field.nested_vector",
|
|
||||||
query_vector: [1, 0, 0.5],
|
|
||||||
k: 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
term: {
|
|
||||||
topic: "ai",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
rank_window_size: 10,
|
|
||||||
rank_constant: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
_source: ["topic"],
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.transport.request({
|
|
||||||
method: "POST",
|
|
||||||
path: "/_query/async",
|
|
||||||
querystring: {
|
|
||||||
format: "json",
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
query:
|
|
||||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
|
||||||
include_ccs_metadata: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -4,7 +4,12 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "books",
|
index: "idx",
|
||||||
|
mappings: {
|
||||||
|
_source: {
|
||||||
|
mode: "synthetic",
|
||||||
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -1,18 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "cooking_blog",
|
|
||||||
query: {
|
|
||||||
match: {
|
|
||||||
title: {
|
|
||||||
query: "fluffy pancakes breakfast",
|
|
||||||
minimum_should_match: 2,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,57 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "retrievers_example",
|
|
||||||
retriever: {
|
|
||||||
rrf: {
|
|
||||||
retrievers: [
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
term: {
|
|
||||||
topic: "elastic",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
rrf: {
|
|
||||||
retrievers: [
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
query_string: {
|
|
||||||
query:
|
|
||||||
"(information retrieval) OR (artificial intelligence)",
|
|
||||||
default_field: "text",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
knn: {
|
|
||||||
field: "vector",
|
|
||||||
query_vector: [0.23, 0.67, 0.89],
|
|
||||||
k: 3,
|
|
||||||
num_candidates: 5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
rank_window_size: 10,
|
|
||||||
rank_constant: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
rank_window_size: 10,
|
|
||||||
rank_constant: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
_source: false,
|
|
||||||
size: 1,
|
|
||||||
explain: true,
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -3,9 +3,8 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.transport.request({
|
const response = await client.searchApplication.renderQuery({
|
||||||
method: "POST",
|
name: "my-app",
|
||||||
path: "/_application/search_application/my-app/_render_query",
|
|
||||||
body: {
|
body: {
|
||||||
params: {
|
params: {
|
||||||
query_string: "my first query",
|
query_string: "my first query",
|
||||||
|
|||||||
@ -1,21 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.inference.put({
|
|
||||||
task_type: "text_embedding",
|
|
||||||
inference_id: "watsonx-embeddings",
|
|
||||||
inference_config: {
|
|
||||||
service: "watsonxai",
|
|
||||||
service_settings: {
|
|
||||||
api_key: "<api_key>",
|
|
||||||
url: "<url>",
|
|
||||||
model_id: "ibm/slate-30m-english-rtrvr",
|
|
||||||
project_id: "<project_id>",
|
|
||||||
api_version: "2024-03-14",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -5,16 +5,10 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
settings: {
|
mappings: {
|
||||||
index: {
|
_source: {
|
||||||
mapping: {
|
|
||||||
source: {
|
|
||||||
mode: "synthetic",
|
mode: "synthetic",
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
properties: {
|
||||||
f: {
|
f: {
|
||||||
type: "scaled_float",
|
type: "scaled_float",
|
||||||
15
docs/doc_examples/0e83f140237d75469a428ff403564bb5.asciidoc
Normal file
15
docs/doc_examples/0e83f140237d75469a428ff403564bb5.asciidoc
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.cluster.putSettings({
|
||||||
|
persistent: {
|
||||||
|
"cluster.routing.allocation.disk.watermark.low": "100gb",
|
||||||
|
"cluster.routing.allocation.disk.watermark.high": "50gb",
|
||||||
|
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
|
||||||
|
"cluster.info.update.interval": "1m",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -5,16 +5,10 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
settings: {
|
mappings: {
|
||||||
index: {
|
_source: {
|
||||||
mapping: {
|
|
||||||
source: {
|
|
||||||
mode: "synthetic",
|
mode: "synthetic",
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
properties: {
|
||||||
text: {
|
text: {
|
||||||
type: "text",
|
type: "text",
|
||||||
@ -1,15 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.transport.request({
|
|
||||||
method: "GET",
|
|
||||||
path: "/_connector/_sync_job",
|
|
||||||
querystring: {
|
|
||||||
connector_id: "my-connector-id",
|
|
||||||
size: "1",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.transport.request({
|
|
||||||
method: "POST",
|
|
||||||
path: "/_inference/chat_completion/openai-completion/_stream",
|
|
||||||
body: {
|
|
||||||
model: "gpt-4o",
|
|
||||||
messages: [
|
|
||||||
{
|
|
||||||
role: "user",
|
|
||||||
content: "What is Elastic?",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.addBlock({
|
|
||||||
index: ".ml-anomalies-custom-example",
|
|
||||||
block: "read_only",
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.update({
|
|
||||||
index: ".elastic-connectors",
|
|
||||||
id: "connector_id",
|
|
||||||
doc: {
|
|
||||||
features: {
|
|
||||||
native_connector_api_keys: {
|
|
||||||
enabled: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.inference.put({
|
|
||||||
task_type: "my-inference-endpoint",
|
|
||||||
inference_id: "_update",
|
|
||||||
inference_config: {
|
|
||||||
service_settings: {
|
|
||||||
api_key: "<API_KEY>",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -4,16 +4,15 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.search({
|
const response = await client.search({
|
||||||
index: "my-index-bit-vectors",
|
index: "test-index",
|
||||||
query: {
|
query: {
|
||||||
script_score: {
|
nested: {
|
||||||
|
path: "inference_field.inference.chunks",
|
||||||
query: {
|
query: {
|
||||||
match_all: {},
|
sparse_vector: {
|
||||||
},
|
field: "inference_field.inference.chunks.embeddings",
|
||||||
script: {
|
inference_id: "my-inference-id",
|
||||||
source: "dotProduct(params.query_vector, 'my_dense_vector')",
|
query: "mountain lake",
|
||||||
params: {
|
|
||||||
query_vector: [8, 5, -15, 1, -7],
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -5,16 +5,10 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
settings: {
|
mappings: {
|
||||||
index: {
|
_source: {
|
||||||
mapping: {
|
|
||||||
source: {
|
|
||||||
mode: "synthetic",
|
mode: "synthetic",
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
properties: {
|
||||||
date: {
|
date: {
|
||||||
type: "date_nanos",
|
type: "date_nanos",
|
||||||
@ -1,19 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.connector.updateConfiguration({
|
|
||||||
connector_id: "my-connector-id",
|
|
||||||
values: {
|
|
||||||
host: "127.0.0.1",
|
|
||||||
port: 5432,
|
|
||||||
username: "myuser",
|
|
||||||
password: "mypassword",
|
|
||||||
database: "chinook",
|
|
||||||
schema: "public",
|
|
||||||
tables: "album,artist",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "my-index-bit-vectors",
|
|
||||||
query: {
|
|
||||||
script_score: {
|
|
||||||
query: {
|
|
||||||
match_all: {},
|
|
||||||
},
|
|
||||||
script: {
|
|
||||||
source: "dotProduct(params.query_vector, 'my_dense_vector')",
|
|
||||||
params: {
|
|
||||||
query_vector: [
|
|
||||||
0.23, 1.45, 3.67, 4.89, -0.56, 2.34, 3.21, 1.78, -2.45, 0.98, -0.12,
|
|
||||||
3.45, 4.56, 2.78, 1.23, 0.67, 3.89, 4.12, -2.34, 1.56, 0.78, 3.21,
|
|
||||||
4.12, 2.45, -1.67, 0.34, -3.45, 4.56, -2.78, 1.23, -0.67, 3.89,
|
|
||||||
-4.34, 2.12, -1.56, 0.78, -3.21, 4.45, 2.12, 1.67,
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
|||||||
{
|
{
|
||||||
attachment: {
|
attachment: {
|
||||||
field: "data",
|
field: "data",
|
||||||
remove_binary: true,
|
remove_binary: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -11,7 +11,7 @@ const response = await client.searchApplication.put({
|
|||||||
script: {
|
script: {
|
||||||
lang: "mustache",
|
lang: "mustache",
|
||||||
source:
|
source:
|
||||||
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
'\n {\n "query": {\n "bool": {\n "must": [\n {{#query}}\n \n {{/query}}\n ],\n "filter": {{#toJson}}_es_filters{{/toJson}}\n }\n },\n "_source": {\n "includes": ["title", "plot"]\n },\n "highlight": {\n "fields": {\n "title": { "fragment_size": 0 },\n "plot": { "fragment_size": 200 }\n }\n },\n "aggs": {{#toJson}}_es_aggs{{/toJson}},\n "from": {{from}},\n "size": {{size}},\n "sort": {{#toJson}}_es_sort_fields{{/toJson}}\n }\n ',
|
||||||
params: {
|
params: {
|
||||||
query: "",
|
query: "",
|
||||||
_es_filters: {},
|
_es_filters: {},
|
||||||
|
|||||||
@ -1,15 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.delete({
|
|
||||||
index: "books",
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
|
|
||||||
const response1 = await client.indices.delete({
|
|
||||||
index: "my-explicit-mappings-books",
|
|
||||||
});
|
|
||||||
console.log(response1);
|
|
||||||
----
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.security.createApiKey({
|
|
||||||
name: "my-connector-api-key",
|
|
||||||
role_descriptors: {
|
|
||||||
"my-connector-connector-role": {
|
|
||||||
cluster: ["monitor", "manage_connector"],
|
|
||||||
indices: [
|
|
||||||
{
|
|
||||||
names: [
|
|
||||||
"my-index_name",
|
|
||||||
".search-acl-filter-my-index_name",
|
|
||||||
".elastic-connectors*",
|
|
||||||
],
|
|
||||||
privileges: ["all"],
|
|
||||||
allow_restricted_indices: false,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -7,9 +7,9 @@ const response = await client.indices.create({
|
|||||||
index: "my-index-000002",
|
index: "my-index-000002",
|
||||||
mappings: {
|
mappings: {
|
||||||
properties: {
|
properties: {
|
||||||
inference_field: {
|
datetime: {
|
||||||
type: "semantic_text",
|
type: "date",
|
||||||
inference_id: "my-openai-endpoint",
|
format: "uuuu/MM/dd HH:mm:ss||uuuu/MM/dd||epoch_millis",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -3,9 +3,7 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.transport.request({
|
const response = await client.simulate.ingest({
|
||||||
method: "POST",
|
|
||||||
path: "/_ingest/_simulate",
|
|
||||||
body: {
|
body: {
|
||||||
docs: [
|
docs: [
|
||||||
{
|
{
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
----
|
----
|
||||||
const response = await client.transport.request({
|
const response = await client.transport.request({
|
||||||
method: "DELETE",
|
method: "DELETE",
|
||||||
path: "/_ingest/ip_location/database/my-database-id",
|
path: "/_ingest/geoip/database/my-database-id",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -9,6 +9,7 @@ const response = await client.indices.create({
|
|||||||
properties: {
|
properties: {
|
||||||
inference_field: {
|
inference_field: {
|
||||||
type: "semantic_text",
|
type: "semantic_text",
|
||||||
|
inference_id: "my-elser-endpoint",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1,19 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.index({
|
|
||||||
index: "mv",
|
|
||||||
refresh: "true",
|
|
||||||
document: {
|
|
||||||
a: [2, null, 1],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
|
|
||||||
const response1 = await client.esql.query({
|
|
||||||
query: "FROM mv | LIMIT 1",
|
|
||||||
});
|
|
||||||
console.log(response1);
|
|
||||||
----
|
|
||||||
@ -5,7 +5,7 @@
|
|||||||
----
|
----
|
||||||
const response = await client.cat.mlTrainedModels({
|
const response = await client.cat.mlTrainedModels({
|
||||||
h: "c,o,l,ct,v",
|
h: "c,o,l,ct,v",
|
||||||
v: "true",
|
v: "ture",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -5,16 +5,10 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
settings: {
|
mappings: {
|
||||||
index: {
|
_source: {
|
||||||
mapping: {
|
|
||||||
source: {
|
|
||||||
mode: "synthetic",
|
mode: "synthetic",
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
properties: {
|
||||||
flattened: {
|
flattened: {
|
||||||
type: "flattened",
|
type: "flattened",
|
||||||
@ -4,11 +4,9 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.putSettings({
|
const response = await client.indices.putSettings({
|
||||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
index: ".watches",
|
||||||
settings: {
|
settings: {
|
||||||
index: {
|
"index.routing.allocation.include.role": "watcher",
|
||||||
number_of_replicas: 0,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -5,16 +5,10 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
settings: {
|
mappings: {
|
||||||
index: {
|
_source: {
|
||||||
mapping: {
|
|
||||||
source: {
|
|
||||||
mode: "synthetic",
|
mode: "synthetic",
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
properties: {
|
||||||
text: {
|
text: {
|
||||||
type: "text",
|
type: "text",
|
||||||
@ -1,19 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.security.queryRole({
|
|
||||||
query: {
|
|
||||||
bool: {
|
|
||||||
must_not: {
|
|
||||||
term: {
|
|
||||||
"metadata._reserved": true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
sort: ["name"],
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,33 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.security.createApiKey({
|
|
||||||
name: "john-api-key",
|
|
||||||
expiration: "1d",
|
|
||||||
role_descriptors: {
|
|
||||||
"sharepoint-online-role": {
|
|
||||||
index: [
|
|
||||||
{
|
|
||||||
names: ["sharepoint-search-application"],
|
|
||||||
privileges: ["read"],
|
|
||||||
query: {
|
|
||||||
template: {
|
|
||||||
params: {
|
|
||||||
access_control: ["john@example.co", "Engineering Members"],
|
|
||||||
},
|
|
||||||
source:
|
|
||||||
'\n {\n "bool": {\n "should": [\n {\n "bool": {\n "must_not": {\n "exists": {\n "field": "_allow_access_control"\n }\n }\n }\n },\n {\n "terms": {\n "_allow_access_control.enum": {{#toJson}}access_control{{/toJson}}\n }\n }\n ]\n }\n }\n ',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
restriction: {
|
|
||||||
workflows: ["search_application_query"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -5,16 +5,10 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
settings: {
|
mappings: {
|
||||||
index: {
|
_source: {
|
||||||
mapping: {
|
|
||||||
source: {
|
|
||||||
mode: "synthetic",
|
mode: "synthetic",
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
properties: {
|
||||||
my_range: {
|
my_range: {
|
||||||
type: "ip_range",
|
type: "ip_range",
|
||||||
@ -14,7 +14,6 @@ const response = await client.indices.putSettings({
|
|||||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||||
"index.search.slowlog.include.user": true,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -1,67 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.create({
|
|
||||||
index: "my-rank-vectors-bit",
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
|
||||||
my_vector: {
|
|
||||||
type: "rank_vectors",
|
|
||||||
element_type: "bit",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
|
|
||||||
const response1 = await client.bulk({
|
|
||||||
index: "my-rank-vectors-bit",
|
|
||||||
refresh: "true",
|
|
||||||
operations: [
|
|
||||||
{
|
|
||||||
index: {
|
|
||||||
_id: "1",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
my_vector: [127, -127, 0, 1, 42],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
index: {
|
|
||||||
_id: "2",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
my_vector: "8100012a7f",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
console.log(response1);
|
|
||||||
|
|
||||||
const response2 = await client.search({
|
|
||||||
index: "my-rank-vectors-bit",
|
|
||||||
query: {
|
|
||||||
script_score: {
|
|
||||||
query: {
|
|
||||||
match_all: {},
|
|
||||||
},
|
|
||||||
script: {
|
|
||||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
|
||||||
params: {
|
|
||||||
query_vector: [
|
|
||||||
[
|
|
||||||
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
|
|
||||||
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
|
|
||||||
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
|
|
||||||
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
|
|
||||||
],
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response2);
|
|
||||||
----
|
|
||||||
28
docs/doc_examples/2577acb462b95bd4394523cf2f8a661f.asciidoc
Normal file
28
docs/doc_examples/2577acb462b95bd4394523cf2f8a661f.asciidoc
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
// This file is autogenerated, DO NOT EDIT
|
||||||
|
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||||
|
|
||||||
|
[source, js]
|
||||||
|
----
|
||||||
|
const response = await client.esql.query({
|
||||||
|
format: "txt",
|
||||||
|
query:
|
||||||
|
"\n FROM library\n | SORT page_count DESC\n | KEEP name, author\n | LOOKUP era ON author\n | LIMIT 5\n ",
|
||||||
|
tables: {
|
||||||
|
era: {
|
||||||
|
author: {
|
||||||
|
keyword: [
|
||||||
|
"Frank Herbert",
|
||||||
|
"Peter F. Hamilton",
|
||||||
|
"Vernor Vinge",
|
||||||
|
"Alastair Reynolds",
|
||||||
|
"James S.A. Corey",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
era: {
|
||||||
|
keyword: ["The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(response);
|
||||||
|
----
|
||||||
@ -1,11 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.addBlock({
|
|
||||||
index: ".ml-anomalies-custom-example",
|
|
||||||
block: "write",
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -5,16 +5,10 @@
|
|||||||
----
|
----
|
||||||
const response = await client.indices.create({
|
const response = await client.indices.create({
|
||||||
index: "idx",
|
index: "idx",
|
||||||
settings: {
|
mappings: {
|
||||||
index: {
|
_source: {
|
||||||
mapping: {
|
|
||||||
source: {
|
|
||||||
mode: "synthetic",
|
mode: "synthetic",
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
properties: {
|
||||||
bool: {
|
bool: {
|
||||||
type: "boolean",
|
type: "boolean",
|
||||||
@ -3,9 +3,7 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.transport.request({
|
const response = await client.security.oidcLogout({
|
||||||
method: "POST",
|
|
||||||
path: "/_security/oidc/logout",
|
|
||||||
body: {
|
body: {
|
||||||
token:
|
token:
|
||||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||||
|
|||||||
@ -1,26 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "my-rank-vectors-float",
|
|
||||||
query: {
|
|
||||||
script_score: {
|
|
||||||
query: {
|
|
||||||
match_all: {},
|
|
||||||
},
|
|
||||||
script: {
|
|
||||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
|
||||||
params: {
|
|
||||||
query_vector: [
|
|
||||||
[0.5, 10, 6],
|
|
||||||
[-0.5, 10, 10],
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.ingest.putPipeline({
|
|
||||||
id: "attachment",
|
|
||||||
description: "Extract attachment information including original binary",
|
|
||||||
processors: [
|
|
||||||
{
|
|
||||||
attachment: {
|
|
||||||
field: "data",
|
|
||||||
remove_binary: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
|
|
||||||
const response1 = await client.index({
|
|
||||||
index: "my-index-000001",
|
|
||||||
id: "my_id",
|
|
||||||
pipeline: "attachment",
|
|
||||||
document: {
|
|
||||||
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response1);
|
|
||||||
|
|
||||||
const response2 = await client.get({
|
|
||||||
index: "my-index-000001",
|
|
||||||
id: "my_id",
|
|
||||||
});
|
|
||||||
console.log(response2);
|
|
||||||
----
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.create({
|
|
||||||
index: "test-index",
|
|
||||||
query: {
|
|
||||||
match: {
|
|
||||||
my_field: "Which country is Paris in?",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
highlight: {
|
|
||||||
fields: {
|
|
||||||
my_field: {
|
|
||||||
type: "semantic",
|
|
||||||
number_of_fragments: 2,
|
|
||||||
order: "score",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -3,12 +3,10 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.transport.request({
|
const response = await client.esql.asyncQueryGet({
|
||||||
method: "GET",
|
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||||
path: "/_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
|
||||||
querystring: {
|
|
||||||
wait_for_completion_timeout: "30s",
|
wait_for_completion_timeout: "30s",
|
||||||
},
|
body: null,
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
|
|||||||
@ -5,8 +5,11 @@
|
|||||||
----
|
----
|
||||||
const response = await client.cluster.putSettings({
|
const response = await client.cluster.putSettings({
|
||||||
persistent: {
|
persistent: {
|
||||||
"cluster.routing.allocation.disk.watermark.low": "90%",
|
"cluster.indices.close.enable": false,
|
||||||
"cluster.routing.allocation.disk.watermark.high": "95%",
|
"indices.recovery.max_bytes_per_sec": "50mb",
|
||||||
|
},
|
||||||
|
transient: {
|
||||||
|
"*": null,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -1,77 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.create({
|
|
||||||
index: "my-index-000001",
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
|
||||||
attributes: {
|
|
||||||
type: "passthrough",
|
|
||||||
priority: 10,
|
|
||||||
properties: {
|
|
||||||
id: {
|
|
||||||
type: "keyword",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
|
|
||||||
const response1 = await client.index({
|
|
||||||
index: "my-index-000001",
|
|
||||||
id: 1,
|
|
||||||
document: {
|
|
||||||
attributes: {
|
|
||||||
id: "foo",
|
|
||||||
zone: 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response1);
|
|
||||||
|
|
||||||
const response2 = await client.search({
|
|
||||||
index: "my-index-000001",
|
|
||||||
query: {
|
|
||||||
bool: {
|
|
||||||
must: [
|
|
||||||
{
|
|
||||||
match: {
|
|
||||||
id: "foo",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
match: {
|
|
||||||
zone: 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response2);
|
|
||||||
|
|
||||||
const response3 = await client.search({
|
|
||||||
index: "my-index-000001",
|
|
||||||
query: {
|
|
||||||
bool: {
|
|
||||||
must: [
|
|
||||||
{
|
|
||||||
match: {
|
|
||||||
"attributes.id": "foo",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
match: {
|
|
||||||
"attributes.zone": 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response3);
|
|
||||||
----
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.create({
|
|
||||||
index: "my-explicit-mappings-books",
|
|
||||||
mappings: {
|
|
||||||
dynamic: false,
|
|
||||||
properties: {
|
|
||||||
name: {
|
|
||||||
type: "text",
|
|
||||||
},
|
|
||||||
author: {
|
|
||||||
type: "text",
|
|
||||||
},
|
|
||||||
release_date: {
|
|
||||||
type: "date",
|
|
||||||
format: "yyyy-MM-dd",
|
|
||||||
},
|
|
||||||
page_count: {
|
|
||||||
type: "integer",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -4,6 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.cluster.reroute({
|
const response = await client.cluster.reroute({
|
||||||
|
metric: "none",
|
||||||
commands: [
|
commands: [
|
||||||
{
|
{
|
||||||
move: {
|
move: {
|
||||||
@ -1,25 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.index({
|
|
||||||
index: "idx_keep",
|
|
||||||
id: 1,
|
|
||||||
document: {
|
|
||||||
path: {
|
|
||||||
to: [
|
|
||||||
{
|
|
||||||
foo: [3, 2, 1],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
foo: [30, 20, 10],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
bar: "baz",
|
|
||||||
},
|
|
||||||
ids: [200, 100, 300, 100],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,43 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "my-index-000001",
|
|
||||||
retriever: {
|
|
||||||
rule: {
|
|
||||||
match_criteria: {
|
|
||||||
query_string: "puggles",
|
|
||||||
user_country: "us",
|
|
||||||
},
|
|
||||||
ruleset_ids: ["my-ruleset"],
|
|
||||||
retriever: {
|
|
||||||
rrf: {
|
|
||||||
retrievers: [
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
query_string: {
|
|
||||||
query: "pugs",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
standard: {
|
|
||||||
query: {
|
|
||||||
query_string: {
|
|
||||||
query: "puggles",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -4,10 +4,9 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.putSettings({
|
const response = await client.indices.putSettings({
|
||||||
index: "*",
|
index: "my-index-000001",
|
||||||
settings: {
|
settings: {
|
||||||
"index.indexing.slowlog.include.user": true,
|
"index.indexing.slowlog.include.user": true,
|
||||||
"index.indexing.slowlog.threshold.index.warn": "30s",
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -4,11 +4,9 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.putSettings({
|
const response = await client.indices.putSettings({
|
||||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
index: "my-index-000001",
|
||||||
settings: {
|
settings: {
|
||||||
index: {
|
"index.merge.policy.max_merge_at_once_explicit": null,
|
||||||
number_of_replicas: "<original_number_of_replicas>",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -1,23 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.create({
|
|
||||||
index: "test-index",
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
|
||||||
source_field: {
|
|
||||||
type: "text",
|
|
||||||
fields: {
|
|
||||||
infer_field: {
|
|
||||||
type: "semantic_text",
|
|
||||||
inference_id: ".elser-2-elasticsearch",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -45,7 +45,7 @@ console.log(response);
|
|||||||
|
|
||||||
const response1 = await client.indices.putIndexTemplate({
|
const response1 = await client.indices.putIndexTemplate({
|
||||||
name: 2,
|
name: 2,
|
||||||
index_patterns: ["k9s*"],
|
index_patterns: ["k8s*"],
|
||||||
composed_of: ["destination_template"],
|
composed_of: ["destination_template"],
|
||||||
data_stream: {},
|
data_stream: {},
|
||||||
});
|
});
|
||||||
@ -1,23 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.inference.put({
|
|
||||||
task_type: "rerank",
|
|
||||||
inference_id: "my-elastic-rerank",
|
|
||||||
inference_config: {
|
|
||||||
service: "elasticsearch",
|
|
||||||
service_settings: {
|
|
||||||
model_id: ".rerank-v1",
|
|
||||||
num_threads: 1,
|
|
||||||
adaptive_allocations: {
|
|
||||||
enabled: true,
|
|
||||||
min_number_of_allocations: 1,
|
|
||||||
max_number_of_allocations: 4,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.search({
|
|
||||||
index: "my-index-*",
|
|
||||||
query: {
|
|
||||||
bool: {
|
|
||||||
must: [
|
|
||||||
{
|
|
||||||
match: {
|
|
||||||
"user.id": "kimchy",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
must_not: [
|
|
||||||
{
|
|
||||||
terms: {
|
|
||||||
_index: ["my-index-01"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.rollover({
|
const response = await client.indices.unfreeze({
|
||||||
alias: "datastream",
|
index: "my-index-000001",
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -1,31 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.ilm.putLifecycle({
|
|
||||||
name: "my_policy",
|
|
||||||
policy: {
|
|
||||||
phases: {
|
|
||||||
hot: {
|
|
||||||
actions: {
|
|
||||||
rollover: {
|
|
||||||
max_primary_shard_size: "50gb",
|
|
||||||
},
|
|
||||||
searchable_snapshot: {
|
|
||||||
snapshot_repository: "backing_repo",
|
|
||||||
replicate_for: "14d",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
min_age: "28d",
|
|
||||||
actions: {
|
|
||||||
delete: {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
----
|
|
||||||
@ -1,52 +0,0 @@
|
|||||||
// This file is autogenerated, DO NOT EDIT
|
|
||||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
|
||||||
|
|
||||||
[source, js]
|
|
||||||
----
|
|
||||||
const response = await client.indices.create({
|
|
||||||
index: "my-index-bit-vectors",
|
|
||||||
mappings: {
|
|
||||||
properties: {
|
|
||||||
my_dense_vector: {
|
|
||||||
type: "dense_vector",
|
|
||||||
index: false,
|
|
||||||
element_type: "bit",
|
|
||||||
dims: 40,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response);
|
|
||||||
|
|
||||||
const response1 = await client.index({
|
|
||||||
index: "my-index-bit-vectors",
|
|
||||||
id: 1,
|
|
||||||
document: {
|
|
||||||
my_dense_vector: [8, 5, -15, 1, -7],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response1);
|
|
||||||
|
|
||||||
const response2 = await client.index({
|
|
||||||
index: "my-index-bit-vectors",
|
|
||||||
id: 2,
|
|
||||||
document: {
|
|
||||||
my_dense_vector: [-1, 115, -3, 4, -128],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response2);
|
|
||||||
|
|
||||||
const response3 = await client.index({
|
|
||||||
index: "my-index-bit-vectors",
|
|
||||||
id: 3,
|
|
||||||
document: {
|
|
||||||
my_dense_vector: [2, 18, -5, 0, -124],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(response3);
|
|
||||||
|
|
||||||
const response4 = await client.indices.refresh({
|
|
||||||
index: "my-index-bit-vectors",
|
|
||||||
});
|
|
||||||
console.log(response4);
|
|
||||||
----
|
|
||||||
@ -4,11 +4,9 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.indices.putSettings({
|
const response = await client.indices.putSettings({
|
||||||
index: "*",
|
index: "my-index-000001",
|
||||||
settings: {
|
settings: {
|
||||||
"index.search.slowlog.include.user": true,
|
"index.search.slowlog.include.user": true,
|
||||||
"index.search.slowlog.threshold.fetch.warn": "30s",
|
|
||||||
"index.search.slowlog.threshold.query.warn": "30s",
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
@ -6,7 +6,6 @@
|
|||||||
const response = await client.indices.resolveCluster({
|
const response = await client.indices.resolveCluster({
|
||||||
name: "not-present,clust*:my-index*,oldcluster:*",
|
name: "not-present,clust*:my-index*,oldcluster:*",
|
||||||
ignore_unavailable: "false",
|
ignore_unavailable: "false",
|
||||||
timeout: "5s",
|
|
||||||
});
|
});
|
||||||
console.log(response);
|
console.log(response);
|
||||||
----
|
----
|
||||||
@ -4,6 +4,7 @@
|
|||||||
[source, js]
|
[source, js]
|
||||||
----
|
----
|
||||||
const response = await client.cluster.reroute({
|
const response = await client.cluster.reroute({
|
||||||
|
metric: "none",
|
||||||
commands: [
|
commands: [
|
||||||
{
|
{
|
||||||
allocate_empty_primary: {
|
allocate_empty_primary: {
|
||||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
|||||||
{
|
{
|
||||||
attachment: {
|
attachment: {
|
||||||
field: "data",
|
field: "data",
|
||||||
remove_binary: true,
|
remove_binary: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user