Compare commits

..

13 Commits

3557 changed files with 10767 additions and 95423 deletions

View File

@ -1,16 +0,0 @@
ARG NODE_VERSION=${NODE_VERSION:-18}
FROM node:$NODE_VERSION
# Install required tools
RUN apt-get clean -y && \
apt-get -qy update && \
apt-get -y install zip && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
WORKDIR /usr/src/app
COPY package.json .
RUN npm install --production=false
COPY . .

View File

@ -1,30 +0,0 @@
ARG NODE_JS_VERSION=${NODE_JS_VERSION:-18}
FROM node:${NODE_JS_VERSION}
ARG BUILDER_UID=1000
ARG BUILDER_GID=1000
ENV BUILDER_USER elastic
ENV BUILDER_GROUP elastic
# install zip util
RUN apt-get clean -y && \
apt-get update -y && \
apt-get install -y zip
# Set user permissions and directory
RUN (id -g ${BUILDER_GID} || groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP}) \
&& (id -u ${BUILDER_UID} || useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GID} -m elastic) \
&& mkdir -p /usr/src/elasticsearch-js \
&& chown -R ${BUILDER_UID}:${BUILDER_GID} /usr/src/
WORKDIR /usr/src/elasticsearch-js
# run remainder of commands as non-root user
USER ${BUILDER_UID}:${BUILDER_GID}
# install dependencies
COPY package.json .
RUN npm install --production=false
# copy project files
COPY . .

View File

@ -1,21 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDYjCCAkqgAwIBAgIVAIClHav09e9XGWJrnshywAjUHTnXMA0GCSqGSIb3DQEB
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
ZXJhdGVkIENBMB4XDTIzMDMyODE3MDIzOVoXDTI2MDMyNzE3MDIzOVowEzERMA8G
A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCV
+t5/g6u2r3awCtzqp17KG0hRxzkVoJoF8DYzVh+Rv9ymxQW0C/U8dQihAjkZHaIA
n49lSyNLkwWtmqQgPcimV4d6XuTYx2ahDixXYtjmoOSwH5dRtovKPCNKDPkUj9Vq
NwMW0uB1VxniMKI4DnYFqBgHL9kQKhQqvas6Gx0X6ptGRCLYCtVxeFcau6nnkZJt
urb+HNV5waOh0uTmsqnnslK3NjCQ/f030vPKxM5fOqOU5ajUHpZFJ6ZFmS32074H
l+mZoRT/GtbnVtIg+CJXsWThF3/L4iBImv+rkY9MKX5fyMLJgmIJG68S90IQGR8c
Z2lZYzC0J7zjMsYlODbDAgMBAAGjgYswgYgwHQYDVR0OBBYEFIDIcECn3AVHc3jk
MpQ4r7Kc3WCsMB8GA1UdIwQYMBaAFJYCWKn16g+acbing4Vl45QGUBs0MDsGA1Ud
EQQ0MDKCCWxvY2FsaG9zdIIIaW5zdGFuY2WHBH8AAAGHEAAAAAAAAAAAAAAAAAAA
AAGCA2VzMTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBCwUAA4IBAQBtX3RQ5ATpfORM
lrnhaUPGOWkjnb3p3BrdAWUaWoh136QhaXqxKiALQQhTtTerkXOcuquy9MmAyYvS
9fDdGvLCAO8pPCXjnzonCHerCLGdS7f/eqvSFWCdy7LPHzTAFYfVWVvbZed+83TL
bDY63AMwIexj34vJEStMapuFwWx05fstE8qZWIbYCL87sF5H/MRhzlz3ScAhQ1N7
tODH7zvLzSxFGGEzCIKZ0iPFKbd3Y0wE6SptDSKhOqlnC8kkNeI2GjWsqVfHKsoF
pDFmri7IfOucuvalXJ6xiHPr9RDbuxEXs0u8mteT5nFQo7EaEGdHpg1pNGbfBOzP
lmj/dRS9
-----END CERTIFICATE-----

View File

@ -1,27 +0,0 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAlfref4Ortq92sArc6qdeyhtIUcc5FaCaBfA2M1Yfkb/cpsUF
tAv1PHUIoQI5GR2iAJ+PZUsjS5MFrZqkID3IpleHel7k2MdmoQ4sV2LY5qDksB+X
UbaLyjwjSgz5FI/VajcDFtLgdVcZ4jCiOA52BagYBy/ZECoUKr2rOhsdF+qbRkQi
2ArVcXhXGrup55GSbbq2/hzVecGjodLk5rKp57JStzYwkP39N9LzysTOXzqjlOWo
1B6WRSemRZkt9tO+B5fpmaEU/xrW51bSIPgiV7Fk4Rd/y+IgSJr/q5GPTCl+X8jC
yYJiCRuvEvdCEBkfHGdpWWMwtCe84zLGJTg2wwIDAQABAoIBAAEP7HYNNnDWdYMD
+WAtYM12X/W5s/wUP94juaBI4u4iZH2EZodlixEdZUCTXgq43WsDUhxX05s7cE+p
H5DuSCHtoo2WHvGKAposwRDm2f3YVWQ2Xyb2ahNt69LYHHWrO+XQ60YYTa3r8Gn3
7dFR3I016/jyn5DeEVaglvS1dfj2UG4ybR4KkMfcKd94X0rKvz3wzAhHIh+hwMtv
sVk7V4vSnKf2mJXwIVECTolnEJEkCjWjjymgUJYKT8yN7JnAsHRcvMa6kWwIGrLp
oQCEaJwYM6ynCRS989pLt3vA2iu5VkYhiHXJ9Ds/5b5yzhzmj+ymzKbFKrrUUrmn
+2Jp1K0CgYEAw8BchALsD/+JuoXjinA14MH7PZjIsXyhtPk+c4pk42iMNyg1J8XF
Y/ITepLYsl2bZqQI1jOJdDqsTwIsva9r749lsmkYI3VOxhi7+qBK0sThR66C87lX
iU2QpnZ9NloC6ort4a3MEvZ/gRQcXdBrNlNoza2p7PHAVDTnsdSrNKUCgYEAxCQV
uo85oZyfnMufn/gcI9IeYOgiB0tO3a8cAFX2wQW1y935t6Z13ApUQc4EnCOH7ZBc
td5kT+xGdRWnfPZ38FM1dd5MBdGE69s3q8pJDUExSgNLqaF6/5bD32qui66L3ugu
eMjxrzqJsc2uQTPCs18SGsyRmf54DpY8HglOmUcCgYAGRDgx+a347SNJl1OrcOAo
q80RMbzrAaRjmL8JD9se9I/YjC73cPtasbsx51WMkDaTWJj30nqJ//7YIKeyAtWf
u6Vzyq19JRo6eTw7T7pVePwFQW7rwnks6hDBY3WqscL6IyxuVxP7X2zBgxVNY4ir
Gox2WSLhdPPFPlRUewxoCQKBgAJvqE1u5fpZ5ame5dao0ECppXLyrymEB/C88g4X
Az+WgJGNqkJbsO8QuccvdeMylcefmWcw4fIULzPZFwF4VjkH74wNPMh9t7buPBzI
IGwnuSMAM3ph5RMzni8yNgTKIDaej6U0abwRcBBjS5zHtc1giusGS3CsNnWH7Cs7
VlyVAoGBAK+prq9t9x3tC3NfCZH8/Wfs/X0T1qm11RiL5+tOhmbguWAqSSBy8OjX
Yh8AOXrFuMGldcaTXxMeiKvI2cyybnls1MFsPoeV/fSMJbex7whdeJeTi66NOSKr
oftUHvkHS0Vv/LicMEOufFGslb4T9aPJ7oyhoSlz9CfAutDWk/q/
-----END RSA PRIVATE KEY-----

View File

@ -1,32 +0,0 @@
---
steps:
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}"
agents:
provider: "gcp"
env:
NODE_VERSION: "{{ matrix.nodejs }}"
TEST_SUITE: "{{ matrix.suite }}"
STACK_VERSION: 8.15.0
matrix:
setup:
suite:
- "free"
- "platinum"
nodejs:
- "18"
- "20"
- "22"
command: ./.buildkite/run-tests.sh
artifact_paths: "./junit-output/junit-*.xml"
- wait: ~
continue_on_failure: true
- label: ":junit: Test results"
agents:
provider: "gcp"
image: family/core-ubuntu-2204
plugins:
- junit-annotate#v2.4.1:
artifacts: "junit-output/junit-*.xml"
job-uuid-file-pattern: "junit-(.*).xml"
fail-build-on-error: true
failure-format: file

View File

@ -1,19 +0,0 @@
{
"jobs": [
{
"enabled": true,
"pipeline_slug": "elasticsearch-js-integration-tests",
"allowed_repo_permissions": ["admin", "write"],
"build_on_commit": true,
"skip_ci_on_only_changed": [
"\\.md$",
"\\.asciidoc$",
"^docs\\/",
"^scripts\\/",
"^catalog-info\\.yaml$",
"^test\\/unit\\/",
"^\\.github\\/"
]
}
]
}

View File

@ -1,31 +0,0 @@
#!/usr/bin/env bash
#
# Once called Elasticsearch should be up and running
#
script_path=$(dirname "$(realpath -s "$0")")
set -euo pipefail
repo=$(pwd)
export NODE_VERSION=${NODE_VERSION:-18}
echo "--- :javascript: Building Docker image"
docker build \
--file "$script_path/Dockerfile" \
--tag elastic/elasticsearch-js \
--build-arg NODE_VERSION="$NODE_VERSION" \
.
echo "--- :javascript: Running $TEST_SUITE tests"
mkdir -p "$repo/junit-output"
docker run \
--network="${network_name}" \
--env "TEST_ES_SERVER=${elasticsearch_url}" \
--env "ELASTIC_PASSWORD=${elastic_password}" \
--env "TEST_SUITE=${TEST_SUITE}" \
--env "ELASTIC_USER=elastic" \
--env "BUILDKITE=true" \
--volume "$repo/junit-output:/junit-output" \
--name elasticsearch-js \
--rm \
elastic/elasticsearch-js \
bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"

View File

@ -1,16 +0,0 @@
#!/usr/bin/env bash
#
# Script to run Elasticsearch container and Elasticsearch client integration tests on Buildkite
#
# Version 0.1
#
script_path=$(dirname "$(realpath -s "$0")")
source "$script_path/functions/imports.sh"
set -euo pipefail
echo "--- :elasticsearch: Starting Elasticsearch"
DETACH=true bash "$script_path/run-elasticsearch.sh"
echo "+++ :javascript: Run Client"
bash "$script_path/run-client.sh"

15
.ci/Dockerfile Normal file
View File

@ -0,0 +1,15 @@
ARG NODE_JS_VERSION=16
FROM node:${NODE_JS_VERSION}
# Create app directory
WORKDIR /usr/src/app
RUN apt-get clean -y
RUN apt-get update -y
RUN apt-get install -y zip
# Install app dependencies
COPY package*.json ./
RUN npm install
COPY . .

19
.ci/certs/testnode.crt Executable file
View File

@ -0,0 +1,19 @@
-----BEGIN CERTIFICATE-----
MIIDIzCCAgugAwIBAgIVAMTO6uVx9dLox2t0lY4IcBKZXb5WMA0GCSqGSIb3DQEB
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
ZXJhdGVkIENBMB4XDTIwMDIyNjA1NTA1OVoXDTIzMDIyNTA1NTA1OVowEzERMA8G
A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDK
YLTOikVENiN/qYupOsoXd7VYYnryyfCC/dK4FC2aozkbqjFzBdvPGAasoc4yEiH5
CGeXMgJuOjk1maqetmdIsw00j4oHJviYsnGXzxxS5swhD7spcW4Uk4V4tAUzrbfT
vW/2WW/yYCLe5phVb2chz0jL+WYb4bBmdfs/t6RtP9RqsplYAmVp3gZ6lt2YNtvE
k9gz0TVk3DuO1TquIClfRYUjuywS6xDSvxJ8Jl91EfDWM8QU+9F+YAtiv74xl2U3
P0wwMqNvMxf9/3ak3lTQGsgO4L6cwbKpVLMMzxSVunZz/sgl19xy3qHHz1Qr2MjJ
/2c2J7vahUL4NPRkjJClAgMBAAGjTTBLMB0GA1UdDgQWBBS2Wn8E2VZv4oenY+pR
O8G3zfQXhzAfBgNVHSMEGDAWgBSWAlip9eoPmnG4p4OFZeOUBlAbNDAJBgNVHRME
AjAAMA0GCSqGSIb3DQEBCwUAA4IBAQAvwPvCiJJ6v9jYcyvYY8I3gP0oCwrylpRL
n91UlgRSHUmuAObyOoVN5518gSV/bTU2SDrstcLkLFxHvnfpoGJoxsQEHuGxwDRI
nhYNd62EKLerehNM/F9ILKmvTh8f6QPCzjUuExTXv+63l2Sr6dBS7FHsGs6UKUYO
llM/y9wMZ1LCuZuBg9RhtgpFXRSgDM9Z7Begu0d/BPX9od/qAeZg9Arz4rwUiCN4
IJOMEBEPi5q1tgeS0Fb1Grpqd0Uz5tZKtEHNKzLG+zSMmkneL62Nk2HsmEFZKwzg
u2pU42UaUE596G6o78s1aLn9ICcElPHTjiuZNSiyuu9IzvFDjGQw
-----END CERTIFICATE-----

27
.ci/certs/testnode.key Executable file
View File

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEAymC0zopFRDYjf6mLqTrKF3e1WGJ68snwgv3SuBQtmqM5G6ox
cwXbzxgGrKHOMhIh+QhnlzICbjo5NZmqnrZnSLMNNI+KByb4mLJxl88cUubMIQ+7
KXFuFJOFeLQFM623071v9llv8mAi3uaYVW9nIc9Iy/lmG+GwZnX7P7ekbT/UarKZ
WAJlad4GepbdmDbbxJPYM9E1ZNw7jtU6riApX0WFI7ssEusQ0r8SfCZfdRHw1jPE
FPvRfmALYr++MZdlNz9MMDKjbzMX/f92pN5U0BrIDuC+nMGyqVSzDM8Ulbp2c/7I
Jdfcct6hx89UK9jIyf9nNie72oVC+DT0ZIyQpQIDAQABAoIBADAh7f7NjgnaInlD
ds8KB3SraPsbeQhzlPtiqRJU4j/MIFH/GYG03AGWQkget67a9y+GmzSvlTpoKKEh
6h2TXl9BDpv4o6ht0WRn1HJ5tM/Wyqf2WNpTew3zxCPgFPikkXsPrChYPzLTQJfp
GkP/mfTFmxfAOlPZSp4j41zVLYs53eDkAegFPVfKSr1XNNJ3QODLPcIBfxBYsiC9
oU+jRW8xYuj31cEl5k5UqrChJ1rm3mt6cguqXKbISuoSvi13gXI6DccqhuLAU+Kr
ib2XYrRP+pWocZo/pM9WUVoNGtFxfY88sAQtvG6gDKo2AURtFyq84Ow0h9mdixV/
gRIDPcECgYEA5nEqE3OKuG9WuUFGXvjtn4C0F6JjflYWh7AbX51S4F6LKrW6/XHL
Rg4BtF+XReT7OQ6llsV8kZeUxsUckkgDLzSaA8lysNDV5KkhAWHfRqH//QKFbqZi
JL9t3x63Qt81US8s2hQk3khPYTRM8ZB3xHiXvZYSGC/0x/DxfEO3QJECgYEA4NK5
sxtrat8sFz6SK9nWEKimPjDVzxJ0hxdX4tRq/JdOO5RncawVqt6TNP9gTuxfBvhW
MhJYEsQj8iUoL1dxo9d1eP8HEANNV0iX5OBvJNmgBp+2OyRSyr+PA55+wAxYuAE7
QKaitOjW57fpArNRt2hQyiSzTuqUFRWTWJHCWNUCgYAEurPTXF6vdFGCUc2g61jt
GhYYGhQSpq+lrz6Qksj9o9MVWE9zHh++21C7o+6V16I0RJGva3QoBMVf4vG4KtQt
5tV2WG8LI+4P2Ey+G4UajP6U8bVNVQrUmD0oBBhcvfn5JY+1Fg6/pRpD82/U0VMz
7AmpMWhDqNBMPiymkTk0kQKBgCuWb05cSI0ly4SOKwS5bRk5uVFhYnKNH255hh6C
FGP4acB/WzbcqC7CjEPAJ0nl5d6SExQOHmk1AcsWjR3wlCWxxiK5PwNJwJrlhh1n
reS1FKN0H36D4lFQpkeLWQOe4Sx7gKNeKzlr0w6Fx3Uwku0+Gju2tdTdAey8jB6l
08opAoGAEe1AuR/OFp2xw6V8TH9UHkkpGxy+OrXI6PX6tgk29PgB+uiMu4RwbjVz
1di1KKq2XecAilVbnyqY+edADxYGbSnci9x5wQRIebfMi3VXKtV8NQBv2as6qwtW
JDcQUWotOHjpdvmfJWWkcBhbAKrgX8ukww00ZI/lC3/rmkGnBBg=
-----END RSA PRIVATE KEY-----

7
.ci/docker/Dockerfile Normal file
View File

@ -0,0 +1,7 @@
ARG NODE_JS_VERSION=10
FROM node:${NODE_JS_VERSION}-alpine
RUN apk --no-cache add git
# Create app directory
WORKDIR /usr/src/app

View File

@ -2,7 +2,7 @@
#
# Shared cleanup routines between different steps
#
# Please source .buildkite/functions/imports.sh as a whole not just this file
# Please source .ci/functions/imports.sh as a whole not just this file
#
# Version 1.0.0
# - Initial version after refactor

View File

View File

@ -2,7 +2,7 @@
#
# Exposes a routine scripts can call to wait for a container if that container set up a health command
#
# Please source .buildkite/functions/imports.sh as a whole not just this file
# Please source .ci/functions/imports.sh as a whole not just this file
#
# Version 1.0.1
# - Initial version after refactor

81
.ci/jobs/defaults.yml Normal file
View File

@ -0,0 +1,81 @@
---
##### GLOBAL METADATA
- meta:
cluster: clients-ci
##### JOB DEFAULTS
- job:
project-type: matrix
logrotate:
daysToKeep: 30
numToKeep: 100
parameters:
- string:
name: branch_specifier
default: refs/heads/main
description: the Git branch specifier to build (<branchName>, <tagName>,
<commitId>, etc.)
properties:
- github:
url: https://github.com/elastic/elasticsearch-js/
- inject:
properties-content: HOME=$JENKINS_HOME
concurrent: true
node: flyweight
scm:
- git:
name: origin
credentials-id: f6c7695a-671e-4f4f-a331-acdce44ff9ba
reference-repo: /var/lib/jenkins/.git-references/elasticsearch-js.git
branches:
- ${branch_specifier}
url: https://github.com/elastic/elasticsearch-js.git
basedir: ''
wipe-workspace: 'True'
triggers:
- github
vault:
# vault read auth/approle/role/clients-ci/role-id
role_id: ddbd0d44-0e51-105b-177a-c8fdfd445126
axes:
- axis:
type: slave
name: label
values:
- linux
- axis:
type: yaml
filename: .ci/test-matrix.yml
name: STACK_VERSION
- axis:
type: yaml
filename: .ci/test-matrix.yml
name: NODE_JS_VERSION
- axis:
type: yaml
filename: .ci/test-matrix.yml
name: TEST_SUITE
yaml-strategy:
exclude-key: exclude
filename: .ci/test-matrix.yml
wrappers:
- ansicolor
- timeout:
type: absolute
timeout: 120
fail: true
- timestamps
- workspace-cleanup
builders:
- shell: |-
#!/usr/local/bin/runbld
.ci/run-tests
publishers:
- email:
recipients: build-lang-clients@elastic.co
- junit:
results: "**/*-junit.xml"
allow-empty-results: true

View File

@ -0,0 +1,15 @@
---
- job:
name: elastic+elasticsearch-js+7.17
display-name: 'elastic / elasticsearch-js # 7.17'
description: Testing the elasticsearch-js 7.17 branch.
junit_results: "*-junit.xml"
parameters:
- string:
name: branch_specifier
default: refs/heads/7.17
description: the Git branch specifier to build (<branchName>, <tagName>,
<commitId>, etc.)
triggers:
- github
- timed: 'H */12 * * *'

View File

@ -0,0 +1,15 @@
---
- job:
name: elastic+elasticsearch-js+8.1
display-name: 'elastic / elasticsearch-js # 8.1'
description: Testing the elasticsearch-js 8.1 branch.
junit_results: "*-junit.xml"
parameters:
- string:
name: branch_specifier
default: refs/heads/8.1
description: the Git branch specifier to build (<branchName>, <tagName>,
<commitId>, etc.)
triggers:
- github
- timed: 'H */12 * * *'

View File

@ -0,0 +1,15 @@
---
- job:
name: elastic+elasticsearch-js+8.2
display-name: 'elastic / elasticsearch-js # 8.2'
description: Testing the elasticsearch-js 8.2 branch.
junit_results: "*-junit.xml"
parameters:
- string:
name: branch_specifier
default: refs/heads/8.2
description: the Git branch specifier to build (<branchName>, <tagName>,
<commitId>, etc.)
triggers:
- github
- timed: 'H */12 * * *'

View File

@ -0,0 +1,15 @@
---
- job:
name: elastic+elasticsearch-js+main
display-name: 'elastic / elasticsearch-js # main'
description: Testing the elasticsearch-js main branch.
junit_results: "*-junit.xml"
parameters:
- string:
name: branch_specifier
default: refs/heads/main
description: the Git branch specifier to build (<branchName>, <tagName>,
<commitId>, etc.)
triggers:
- github
- timed: 'H */12 * * *'

View File

@ -0,0 +1,19 @@
---
- job:
name: elastic+elasticsearch-js+pull-request
display-name: 'elastic / elasticsearch-js # pull-request'
description: Testing of elasticsearch-js pull requests.
junit_results: "*-junit.xml"
scm:
- git:
branches:
- ${ghprbActualCommit}
refspec: +refs/pull/*:refs/remotes/origin/pr/*
triggers:
- github-pull-request:
org-list:
- elastic
allow-whitelist-orgs-as-admins: true
github-hooks: true
status-context: clients-ci
cancel-builds-on-update: true

View File

@ -28,11 +28,6 @@ import assert from 'assert'
import { join } from 'desm'
import semver from 'semver'
// xz/globals loads minimist-parsed args as a global `argv`, but it
// interprets args like '8.10' as numbers and shortens them to '8.1'.
// so we have to import and configure minimist ourselves.
import minimist from 'minimist'
const argv = minimist(process.argv.slice(2), { string: ['_', 'task'] })
assert(typeof argv.task === 'string', 'Missing task parameter')
switch (argv.task) {
@ -74,15 +69,14 @@ async function release (args) {
async function bump (args) {
assert(args.length === 1, 'Bump task expects one parameter')
let [version] = args
const [version] = args
const packageJson = JSON.parse(await readFile(
join(import.meta.url, '..', 'package.json'),
'utf8'
))
if (version.split('.').length === 2) version = `${version}.0`
const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
assert(semver.valid(cleanVersion), `${cleanVersion} is not seen as a valid semver version. raw version: ${version}`)
assert(semver.valid(cleanVersion))
packageJson.version = cleanVersion
packageJson.versionCanary = `${cleanVersion}-canary.0`
@ -92,46 +86,37 @@ async function bump (args) {
'utf8'
)
const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), 'utf8')
const testMatrix = await readFile(join(import.meta.url, 'test-matrix.yml'), 'utf8')
await writeFile(
join(import.meta.url, '..', '.buildkite', 'pipeline.yml'),
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}`),
join(import.meta.url, 'test-matrix.yml'),
testMatrix.replace(/STACK_VERSION:\s+\- "[0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?"/, `STACK_VERSION:\n - "${cleanVersion}-SNAPSHOT"`), // eslint-disable-line
'utf8'
)
}
// this command can only be executed locally for now
async function codegen (args) {
assert(args.length === 1, 'Codegen task expects one parameter')
const version = args[0].toString()
assert(args.length === 1, 'Bump task expects one parameter')
const clientGeneratorPath = join(import.meta.url, '..', '..', 'elastic-client-generator-js')
const [version] = args
const isGeneratorCloned = await $`[[ -d ${clientGeneratorPath} ]]`.exitCode === 0
assert(isGeneratorCloned, 'You must clone the elastic-client-generator-js first')
await $`npm install --prefix ${clientGeneratorPath}`
// generate elasticsearch client. this command will take a while!
// this command will take a while!
if (version === 'main') {
await $`npm run elasticsearch --prefix ${clientGeneratorPath} -- --version main`
} else {
await $`npm run elasticsearch --prefix ${clientGeneratorPath} -- --version ${version.split('.').slice(0, 2).join('.')}`
}
// clean up fixable linter issues
await $`npm run fix --prefix ${clientGeneratorPath}`
await $`npm run lint --prefix ${clientGeneratorPath}`
await $`rm -rf ${join(import.meta.url, '..', 'src', 'api')}`
await $`mkdir ${join(import.meta.url, '..', 'src', 'api')}`
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
await $`npm run build`
// run docs example generation
if (version === 'main') {
await $`node ./scripts/generate-docs-examples.js`
} else {
await $`node ./scripts/generate-docs-examples.js ${version.split('.').slice(0, 2).join('.')}`
}
}
function onError (err) {

View File

@ -1,17 +1,18 @@
#!/usr/bin/env bash
# ------------------------------------------------------- #
#
# Build entry script for elasticsearch-js
# Skeleton for common build entry script for all elastic
# clients. Needs to be adapted to individual client usage.
#
# Must be called: ./.github/make.sh <target> <params>
# Must be called: ./.ci/make.sh <target> <params>
#
# Version: 1.1.0
#
# Targets:
# ---------------------------
# assemble <VERSION> : build client artifacts with version
# assemble <VERSION> : build client artefacts with version
# bump <VERSION> : bump client internals to version
# bumpmatrix <VERSION> : bump stack version in test matrix to version
# codegen <VERSION> : generate endpoints
# docsgen <VERSION> : generate documentation
# examplegen : generate the doc examples
@ -22,8 +23,10 @@
# ------------------------------------------------------- #
# Bootstrap
# ------------------------------------------------------- #
script_path=$(dirname "$(realpath -s "$0")")
repo=$(realpath "$script_path/../")
generator=$(realpath "$script_path/../../elastic-client-generator-js")
# shellcheck disable=SC1090
CMD=$1
@ -31,20 +34,24 @@ TASK=$1
TASK_ARGS=()
VERSION=$2
STACK_VERSION=$VERSION
NODE_JS_VERSION=16
WORKFLOW=${WORKFLOW-staging}
set -euo pipefail
product="elastic/elasticsearch-js"
output_folder=".buildkite/output"
codegen_folder=".buildkite/output"
output_folder=".ci/output"
OUTPUT_DIR="$repo/${output_folder}"
NODE_JS_VERSION=18
WORKFLOW=${WORKFLOW-staging}
REPO_BINDING="${OUTPUT_DIR}:/sln/${output_folder}"
mkdir -p "$OUTPUT_DIR"
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
# ------------------------------------------------------- #
# Parse Command
# ------------------------------------------------------- #
case $CMD in
clean)
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
@ -57,29 +64,18 @@ case $CMD in
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: assemble artifact $VERSION\033[0m"
echo -e "\033[36;1mTARGET: assemble artefact $VERSION\033[0m"
TASK=release
TASK_ARGS=("$VERSION" "$output_folder")
;;
codegen)
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
# fall back to branch name or `main` if no VERSION is set
branch_name=$(git rev-parse --abbrev-ref HEAD)
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
VERSION="$branch_name"
else
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using \`main\`\033[0m"
VERSION="main"
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: codegen -> missing version parameter\033[0m"
exit 1
fi
fi
if [ "$VERSION" = 'main' ]; then
echo -e "\033[36;1mTARGET: codegen API $VERSION\033[0m"
else
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
fi
TASK=codegen
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION")
;;
docsgen)
@ -89,11 +85,13 @@ case $CMD in
fi
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
TASK=codegen
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
examplesgen)
echo -e "\033[36;1mTARGET: generate examples\033[0m"
TASK=codegen
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
bump)
@ -103,23 +101,11 @@ case $CMD in
fi
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
TASK=bump
TASK_ARGS=("$VERSION")
;;
bumpmatrix)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
TASK=bumpmatrix
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION")
;;
*)
echo -e "\n'$CMD' is not supported right now\n"
echo -e "\nUsage:"
echo -e "\t $0 release \$VERSION\n"
echo -e "\t $0 bump \$VERSION"
echo -e "\t $0 codegen \$VERSION"
echo -e "\nUsage:\n\t $CMD is not supported right now\n"
exit 1
esac
@ -131,11 +117,11 @@ esac
echo -e "\033[34;1mINFO: building $product container\033[0m"
docker build \
--file .buildkite/Dockerfile-make \
--tag "$product" \
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
--build-arg "BUILDER_UID=$(id -u)" \
--build-arg "BUILDER_GID=$(id -g)" \
--file .ci/Dockerfile \
--tag ${product} \
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
--build-arg USER_ID="$(id -u)" \
--build-arg GROUP_ID="$(id -g)" \
.
# ------------------------------------------------------- #
@ -144,42 +130,22 @@ docker build \
echo -e "\033[34;1mINFO: running $product container\033[0m"
if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}" ]]; then
echo -e "\033[34;1mINFO: Running in local mode"
docker run \
-u "$(id -u):$(id -g)" \
--volume "$repo:/usr/src/elasticsearch-js" \
--volume /usr/src/elasticsearch-js/node_modules \
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
--env "WORKFLOW=$WORKFLOW" \
docker run \
--volume $repo:/usr/src/app \
--volume $generator:/usr/src/elastic-client-generator-js \
--volume /usr/src/app/node_modules \
--env "WORKFLOW=${WORKFLOW}" \
--name make-elasticsearch-js \
--rm \
$product \
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
else
echo -e "\033[34;1mINFO: Running in CI mode"
docker run \
--volume "$repo:/usr/src/elasticsearch-js" \
--volume /usr/src/elasticsearch-js/node_modules \
-u "$(id -u):$(id -g)" \
--env "WORKFLOW=$WORKFLOW" \
--name make-elasticsearch-js \
--rm \
$product \
/bin/bash -c "cd /usr/src && \
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
mkdir -p /usr/src/elastic-client-generator-js/output && \
cd /usr/src/elasticsearch-js && \
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
fi
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}
# ------------------------------------------------------- #
# Post Command tasks & checks
# ------------------------------------------------------- #
if [[ "$CMD" == "assemble" ]]; then
if compgen -G ".buildkite/output/*" > /dev/null; then
if compgen -G ".ci/output/*" > /dev/null; then
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"

14
.ci/packer_cache.sh Normal file
View File

@ -0,0 +1,14 @@
#!/usr/bin/env bash
source /usr/local/bin/bash_standard_lib.sh
DOCKER_IMAGES="node:17-alpine
node:16-alpine
node:14-alpine
"
for di in ${DOCKER_IMAGES}
do
(retry 2 docker pull "${di}") || echo "Error pulling ${di} Docker image, we continue"
done

View File

@ -26,18 +26,12 @@ script_path=$(dirname $(realpath -s $0))
source $script_path/functions/imports.sh
set -euo pipefail
echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on separate terminals \033[0m"
echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on seperate terminals \033[0m"
cleanup_node $es_node_name
master_node_name=${es_node_name}
cluster_name=${moniker}${suffix}
# Set vm.max_map_count kernel setting to 262144
if [ "$(sysctl vm.max_map_count)" != 'vm.max_map_count = 262144' ]; then
echo "vm.max_map_count may be too low. resetting."
sudo sysctl -w vm.max_map_count=262144
fi
declare -a volumes
environment=($(cat <<-END
--env ELASTIC_PASSWORD=$elastic_password
@ -79,7 +73,6 @@ END
))
else
environment+=($(cat <<-END
--env node.roles=data,data_cold,data_content,data_frozen,data_hot,data_warm,ingest,master,ml,remote_cluster_client,transform
--env xpack.security.enabled=false
--env xpack.security.http.ssl.enabled=false
END
@ -91,13 +84,6 @@ if [[ "$TEST_SUITE" == "platinum" ]]; then
cert_validation_flags="--insecure --cacert /usr/share/elasticsearch/config/certs/ca.crt --resolve ${es_node_name}:443:127.0.0.1"
fi
echo "--- :elasticsearch: Environment setup"
echo "TEST_SUITE: $TEST_SUITE"
echo "Elasticsearch URL: $elasticsearch_url"
echo "Elasticsearch External URL: $external_elasticsearch_url"
echo "--- :elasticsearch: Running container"
# Pull the container, retry on failures up to 5 times with
# short delays between each attempt. Fixes most transient network errors.
docker_pull_attempts=0
@ -152,4 +138,6 @@ END
if wait_for_container "$es_node_name" "$network_name"; then
echo -e "\033[32;1mSUCCESS:\033[0m Running on: $node_url\033[0m"
fi
done

43
.ci/run-repository.sh Executable file
View File

@ -0,0 +1,43 @@
#!/usr/bin/env bash
# parameters are available to this script
# STACK_VERSION -- version e.g Major.Minor.Patch(-Prelease)
# TEST_SUITE -- which test suite to run: free or platinum
# ELASTICSEARCH_URL -- The url at which elasticsearch is reachable, a default is composed based on STACK_VERSION and TEST_SUITE
# NODE_JS_VERSION -- node js version (defined in test-matrix.yml, a default is hardcoded here)
script_path=$(dirname $(realpath -s $0))
source $script_path/functions/imports.sh
set -euo pipefail
NODE_JS_VERSION=${NODE_JS_VERSION-16}
ELASTICSEARCH_URL=${ELASTICSEARCH_URL-"$elasticsearch_url"}
elasticsearch_container=${elasticsearch_container-}
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
echo -e "\033[34;1mINFO:\033[0m TEST_SUITE ${TEST_SUITE}\033[0m"
echo -e "\033[34;1mINFO:\033[0m URL ${ELASTICSEARCH_URL}\033[0m"
echo -e "\033[34;1mINFO:\033[0m CONTAINER ${elasticsearch_container}\033[0m"
echo -e "\033[34;1mINFO:\033[0m NODE_JS_VERSION ${NODE_JS_VERSION}\033[0m"
echo -e "\033[1m>>>>> Build docker container >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
docker build \
--file .ci/Dockerfile \
--tag elastic/elasticsearch-js \
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
.
echo -e "\033[1m>>>>> NPM run test:integration >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
repo=$(realpath $(dirname $(realpath -s $0))/../)
docker run \
--network=${network_name} \
--env "TEST_ES_SERVER=${ELASTICSEARCH_URL}" \
--env "TEST_SUITE=${TEST_SUITE}" \
--volume $repo:/usr/src/app \
--volume /usr/src/app/node_modules \
--name elasticsearch-js \
--rm \
elastic/elasticsearch-js \
npm run test:integration

23
.ci/run-tests Executable file
View File

@ -0,0 +1,23 @@
#!/usr/bin/env bash
#
# Version 1.1
# - Moved to .ci folder and seperated out `run-repository.sh`
# - Add `$RUNSCRIPTS` env var for running Elasticsearch dependent products
script_path=$(dirname $(realpath -s $0))
source $script_path/functions/imports.sh
set -euo pipefail
echo -e "\033[1m>>>>> Start [$STACK_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
DETACH=true bash .ci/run-elasticsearch.sh
if [[ -n "$RUNSCRIPTS" ]]; then
for RUNSCRIPT in ${RUNSCRIPTS//,/ } ; do
echo -e "\033[1m>>>>> Running run-$RUNSCRIPT.sh >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
CONTAINER_NAME=${RUNSCRIPT} \
DETACH=true \
bash .ci/run-${RUNSCRIPT}.sh
done
fi
echo -e "\033[1m>>>>> Repository specific tests >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
bash .ci/run-repository.sh

14
.ci/test-matrix.yml Normal file
View File

@ -0,0 +1,14 @@
---
STACK_VERSION:
- "8.4.4-SNAPSHOT"
NODE_JS_VERSION:
- 18
- 16
- 14
TEST_SUITE:
- free
- platinum
exclude: ~

View File

@ -3,5 +3,3 @@ npm-debug.log
test/benchmarks
elasticsearch
.git
lib
junit-output

View File

@ -1,14 +1,13 @@
---
name: 🐛 Bug report
about: Create a report to help us improve
labels: ["Category: Bug"]
---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.**
locked, and assigned the `not reproducible` label.**
## 🐛 Bug Report

View File

@ -1,14 +1,13 @@
---
name: 🚀 Feature Proposal
about: Submit a proposal for a new feature
labels: ["Category: Feature"]
---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.**
locked, and assigned the `invalid` label.**
## 🚀 Feature Proposal

View File

@ -1,7 +1,6 @@
---
name: 💬 Questions / Help
about: If you have questions, please check our Gitter or Help repo
labels: ["Category: Question"]
---
## 💬 Questions and Help

56
.github/ISSUE_TEMPLATE/regression.md vendored Normal file
View File

@ -0,0 +1,56 @@
---
name: 💥 Regression Report
about: Report unexpected behavior that worked in previous versions
---
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `invalid` label.**
## 💥 Regression Report
A clear and concise description of what the regression is.
## Last working version
Worked up to version:
Stopped working in version:
## To Reproduce
Steps to reproduce the behavior:
Paste your code here:
```js
```
<!--
In some cases, it might be challenging to reproduce the bug in a few lines of code.
You can fork the following repository, which contains all the configuration needed
to spin up a three nodes Elasticsearch cluster with security enabled.
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
https://github.com/delvedor/es-reproduce-issue
--->
## Expected behavior
A clear and concise description of what you expected to happen.
Paste the results here:
```js
```
## Your Environment
- *node version*: 6,8,10
- `@elastic/elasticsearch` *version*: >=7.0.0
- *typescript version*: 4.x (if applicable)
- *os*: Mac, Windows, Linux
- *any other relevant information*

View File

@ -1,92 +0,0 @@
---
name: 💥 Regression Report
description: Report unexpected behavior that worked in previous versions
labels: ["Category: Bug"]
body:
- type: markdown
attributes:
value: |
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
**Please read this entire template before posting any issue. If you ignore these instructions
and post an issue here that does not follow the instructions, your issue might be closed,
locked, and assigned the `Category: Not an issue` label.**
- type: textarea
id: report
attributes:
label: Regression report
description: A clear and concise description of what the regression is.
validations:
required: true
- type: input
id: last-working-version
attributes:
label: Last working version
description: Version of `@elastic/elasticsearch` where this last worked.
validations:
required: true
- type: textarea
id: to-reproduce
attributes:
label: To reproduce
description: |
Paste your code here that shows how to reproduce the behavior.
In some cases, it might be challenging to reproduce the bug in a few lines of code.
You can fork the following repository, which contains all the configuration needed to spin up a three nodes Elasticsearch cluster with security enabled.
[This repository](https://github.com/delvedor/es-reproduce-issue) also contains a preconfigured client instance that you can use to reproduce the issue.
validations:
required: true
- type: textarea
id: expected-behavior
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: input
id: node-version
attributes:
label: Node.js version
description: What version of Node.js you are using (`node --version`).
validations:
required: true
- type: input
id: typescript-version
attributes:
label: TypeScript version
description: TypeScript version you are using, if applicable.
- type: input
id: elasticsearch-client-version
attributes:
label: Elasticsearch client version
description: What version of `@elastic/elasticsearch` and `@elastic/transport` you are using (`npm ls -a | grep '@elastic'`).
validations:
required: true
- type: input
id: elasticsearch-version
attributes:
label: Elasticsearch server version
description: What version of Elasticsearch you are using.
validations:
required: true
- type: input
id: operating-system
attributes:
label: Operating system
description: What operating system you are running.
placeholder: e.g. Linux, MacOS, Windows
- type: textarea
id: env-info
attributes:
label: Any other relevant environment information.

View File

@ -1,18 +0,0 @@
name: Automerge
on:
pull_request_review:
types:
- submitted
jobs:
automerge:
runs-on: ubuntu-latest
if: github.event.review.state == 'approved'
steps:
- uses: reitermarkus/automerge@v2
with:
token: ${{ secrets.GH_TOKEN }}
merge-method: squash
pull-request-author-associations: OWNER
review-author-associations: OWNER,CONTRIBUTOR

View File

@ -1,27 +1,16 @@
---
name: Backport
on:
pull_request_target:
pull_request:
types:
- closed
- labeled
jobs:
backport:
name: Backport
runs-on: ubuntu-latest
# Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& (
github.event.action == 'closed'
|| (
github.event.action == 'labeled'
&& contains(github.event.label.name, 'backport')
)
)
name: Backport
steps:
- uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4
- name: Backport
uses: tibdex/backport@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,43 +1,22 @@
---
name: Node CI
on:
pull_request: {}
on: [push, pull_request]
jobs:
paths-filter:
name: Detect files changed
runs-on: ubuntu-latest
outputs:
src-only: "${{ steps.changes.outputs.src-only }}"
steps:
- uses: actions/checkout@v4
- uses: dorny/paths-filter/@v2.11.1
id: changes
with:
filters: |
src-only:
- '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.buildkite,scripts}/**/*|catalog-info.yaml)'
- '.github/workflows/**'
test:
name: Test
runs-on: ${{ matrix.os }}
needs: paths-filter
# only run if code relevant to unit tests was changed
if: needs.paths-filter.outputs.src-only == 'true'
strategy:
fail-fast: false
matrix:
node-version: [18.x, 20.x, 22.x]
node-version: [14.x, 16.x, 18.x]
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
@ -53,17 +32,159 @@ jobs:
run: |
npm run test:unit
# - name: Acceptance test
# run: |
# npm run test:acceptance
# helpers-integration-test:
# name: Helpers integration test
# runs-on: ubuntu-latest
# strategy:
# matrix:
# node-version: [12.x, 14.x, 16.x]
# steps:
# - uses: actions/checkout@v2
# - name: Configure sysctl limits
# run: |
# sudo swapoff -a
# sudo sysctl -w vm.swappiness=1
# sudo sysctl -w fs.file-max=262144
# sudo sysctl -w vm.max_map_count=262144
# - name: Runs Elasticsearch
# uses: elastic/elastic-github-actions/elasticsearch@master
# with:
# stack-version: 8.0.0-SNAPSHOT
# - name: Use Node.js ${{ matrix.node-version }}
# uses: actions/setup-node@v1
# with:
# node-version: ${{ matrix.node-version }}
# - name: Install
# run: |
# npm install
# - name: Integration test
# run: |
# npm run test:integration:helpers
# bundler-support:
# name: Bundler support
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v2
# - name: Configure sysctl limits
# run: |
# sudo swapoff -a
# sudo sysctl -w vm.swappiness=1
# sudo sysctl -w fs.file-max=262144
# sudo sysctl -w vm.max_map_count=262144
# - name: Runs Elasticsearch
# uses: elastic/elastic-github-actions/elasticsearch@master
# with:
# stack-version: 8.0.0-SNAPSHOT
# - name: Use Node.js 14.x
# uses: actions/setup-node@v1
# with:
# node-version: 14.x
# - name: Install
# run: |
# npm install
# npm install --prefix test/bundlers/parcel-test
# npm install --prefix test/bundlers/rollup-test
# npm install --prefix test/bundlers/webpack-test
# - name: Build
# run: |
# npm run build --prefix test/bundlers/parcel-test
# npm run build --prefix test/bundlers/rollup-test
# npm run build --prefix test/bundlers/webpack-test
# - name: Run bundle
# run: |
# npm start --prefix test/bundlers/parcel-test
# npm start --prefix test/bundlers/rollup-test
# npm start --prefix test/bundlers/webpack-test
# mock-support:
# name: Mock support
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v2
# - name: Use Node.js 14.x
# uses: actions/setup-node@v1
# with:
# node-version: 14.x
# - name: Install
# run: |
# npm install
# npm install --prefix test/mock
# - name: Run test
# run: |
# npm test --prefix test/mock
# code-coverage:
# name: Code coverage
# runs-on: ubuntu-latest
# strategy:
# matrix:
# node-version: [14.x]
# steps:
# - uses: actions/checkout@v2
# - name: Use Node.js ${{ matrix.node-version }}
# uses: actions/setup-node@v1
# with:
# node-version: ${{ matrix.node-version }}
# - name: Install
# run: |
# npm install
# - name: Code coverage report
# run: |
# npm run test:coverage-report
# - name: Upload coverage to Codecov
# uses: codecov/codecov-action@v1
# with:
# file: ./coverage.lcov
# fail_ci_if_error: true
# - name: Code coverage 100%
# run: |
# npm run test:coverage-100
license:
name: License check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
strategy:
matrix:
node-version: [16.x]
- name: Use Node.js
uses: actions/setup-node@v4
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: 22.x
node-version: ${{ matrix.node-version }}
- name: Install
run: |
@ -72,13 +193,3 @@ jobs:
- name: License checker
run: |
npm run license-checker
auto-approve:
name: Auto-approve
needs: [test, license]
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: github.actor == 'elasticmachine'
steps:
- uses: hmarr/auto-approve-action@v4

View File

@ -1,37 +0,0 @@
name: Publish Package to npm
on:
workflow_dispatch:
inputs:
branch:
description: "Git branch to build and publish"
required: true
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.branch }}
- uses: actions/setup-node@v3
with:
node-version: "20.x"
registry-url: "https://registry.npmjs.org"
- run: npm install -g npm
- run: npm install
- run: npm test
- run: npm publish --provenance --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
version=$(jq -r .version package.json)
gh release create \
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
--target "$BRANCH_NAME" \
-t "v$version" \
"v$version"
env:
BRANCH_NAME: ${{ github.event.inputs.branch }}
GH_TOKEN: ${{ github.token }}

View File

@ -1,43 +0,0 @@
#!/usr/bin/env bash
set -exuo pipefail
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
# generate patch file
cd "$GITHUB_WORKSPACE/stack"
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
# set committer info
git config --global user.email "elasticmachine@users.noreply.github.com"
git config --global user.name "Elastic Machine"
# apply patch file
cd "$GITHUB_WORKSPACE/serverless"
git am -C1 --reject /tmp/patch.diff || git am --quit
# generate PR body comment
comment="Patch applied from $pr_shortcode"
# enumerate rejected patches in PR comment
has_rejects='false'
for f in ./**/*.rej; do
has_rejects='true'
comment="$comment
## Rejected patch \`$f\` must be resolved:
\`\`\`diff
$(cat "$f")
\`\`\`
"
done
# delete .rej files
rm -fv ./**/*.rej
# send data to output parameters
echo "$comment" > /tmp/pr_body
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"

View File

@ -1,51 +0,0 @@
---
name: Apply PR changes to serverless
on:
pull_request_target:
types:
- closed
- labeled
jobs:
apply-patch:
name: Apply patch
runs-on: ubuntu-latest
# Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& (
(
github.event.action == 'closed'
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
)
||
(
github.event.action == 'labeled'
&& github.event.label.name == 'apply-to-serverless'
)
)
steps:
- uses: actions/checkout@v4
with:
repository: elastic/elasticsearch-js
ref: main
path: stack
fetch-depth: 0
- uses: actions/checkout@v4
with:
repository: elastic/elasticsearch-serverless-js
ref: main
path: serverless
- name: Apply patch from stack to serverless
id: apply-patch
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
- uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GH_TOKEN }}
path: serverless
title: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
commit-message: 'Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}'
body-path: /tmp/pr_body
draft: '${{ steps.apply-patch.outputs.PR_DRAFT }}'
add-paths: ':!*.rej'

View File

@ -1,21 +0,0 @@
---
name: 'Close stale issues and PRs'
on:
schedule:
- cron: '30 1 * * *'
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
with:
stale-issue-label: stale
stale-pr-label: stale
days-before-stale: 90
days-before-close: 14
exempt-issue-labels: 'good first issue'
close-issue-label: closed-stale
close-pr-label: closed-stale
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'

1
.gitignore vendored
View File

@ -63,4 +63,3 @@ test/bundlers/**/bundle.js
test/bundlers/parcel-test/.parcel-cache
lib
junit-output

View File

@ -64,8 +64,8 @@ test
scripts
# ci configuration
.ci
.travis.yml
.buildkite
certs
.github
CODE_OF_CONDUCT.md

View File

@ -8,7 +8,7 @@ improving the documentation,
submitting bug reports and feature requests or writing code.
## Repository structure
The `main` branch is considered unstable, and it's compatible with Elasticsearch main. Unless you are patching an issue, new features should always be sent to the `main` branch, in case of a bugfix, it depends if the bug affects all the release lines.<br/>
The `master` branch is considered unstable, and it's compatible with Elasticsearch master. Unless you are patching an issue, new features should always be sent to the `master` branch, in case of a bugfix, it depends if the bug affects all the release lines.<br/>
There is a branch for every supported release line, such as `7.x` or `6.x`. We release bugfixes as soon as possible, while minor and major releases are published at the same time of the Elastic Stack.
Usually for every release line there will be a *published* version and a *next* version. Eg: the `7.x` branch contains the version published on npm, and bugfixes should be sent there, while `7.2` *(assuming that 7.1.x is released)* contains the next version, and new features should be sent there.
@ -31,7 +31,7 @@ Once your changes are ready to submit for review:
1. Test your changes
Run the test suite to make sure that nothing is broken.
Usually running `npm test` is enough; our CI will take care of running the integration tests. If you want to run the integration tests yourself, see [the *Testing* section](#testing) below.
Usually run `npm test` is enough, our CI will take care of running the integration test. If you want to run the integration test yourself, see the *Testing* section below.
2. Submit a pull request
@ -58,50 +58,36 @@ Once your changes are ready to submit for review:
### Code generation
The entire content of the `src/api/` directory is automatically generated from [the Elasticsearch specification](https://github.com/elastic/elasticsearch-specification), as is the `docs/reference.asciidoc` file.
This code generation is done using a separate repository that is not currently available to the public.
If you find discrepancies between this client's API code and what you see when actually interacting with an Elasticsearch API, you can open a pull request here to fix it.
For API fixes, it's likely a change will need to be made to the specification as well, to ensure your fix is not undone by the code generation process.
We will do our best to make sure this is addressed when reviewing and merging your changes.
PRs to improve the specification are also welcome!
It is implemented in TypeScript, so JavaScript devs should be able to understand it fairly easily.
Spec fixes are particularly helpful, as they will be reflected in ALL official Elasticsearch clients, not just this one.
The entire content of the API folder is generated as well as the `docs/reference.asciidoc` file.<br/>
If you want to run the code generation you should run the following command:
```sh
node scripts/generate --tag <tag name>
# or
node scripts/generate --branch <branch name>
```
Then you should copy the content of `api/generated.d.ts` into the `index.d.ts` file *(automate this step would be a nice pr!)*.
### Testing
There are a few different test scripts.
Usually during development you only need to run `npm test`, but if you want you can run just a part of the suite:
There are different test scripts, usually during development you only need to run `npm test`, but if you want you can run just a part of the suite, following you will find all the testing scripts and what they do.
| Script | Description |
|---|---|
| `npm run test:unit` | Runs the content of the `test/unit` folder. |
| `npm run test:coverage-100` | Runs unit tests enforcing 100% coverage. |
| `npm run test:coverage-report` | Runs unit tests and generates an `lcov` coverage report. |
| `npm run test:coverage-ui` | Runs unit tests and generates an HTML coverage report. |
| `npm run test:integration` | Runs the integration test runner.<br/>**Note: requires a living instance of Elasticsearch.** |
| `npm run lint` | Run the [linter](https://github.com/standard/ts-standard). |
| `npm run lint:fix` | Fixes linter errors. |
| `npm run license-checker` | Checks that all dependencies have acceptable open source licenses. |
| `npm test` | Runs `lint` and `test:unit`. |
| `npm run test:behavior` | Runs the content of the `test/behavior` folder. |
| `npm run test:types` | Runs the content of the `test/types` folder. |
| `npm run test:unit -- --cov --coverage-report=html` | Runs the content of the `test/unit` folder and calculates the code coverage. |
| `npm run test:integration` | Runs the integration test runner.<br/>*Note: it requires a living instance of Elasticsearch.* |
| `npm run lint` | Run the [linter](https://standardjs.com/). |
| `npm run lint:fix` | Fixes the lint errors. |
| `npm test` | Runs lint, unit, behavior, and types test. |
#### Integration test
The integration test are generated on the fly by the runner you will find inside `test/integration`, once you execute it, it will clone the Elasticsearch repository and checkout the correct version to grab the [OSS yaml files](https://github.com/elastic/elasticsearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/test) and the [Elastic licensed yaml files](https://github.com/elastic/elasticsearch/tree/master/x-pack/plugin/src/test/resources/rest-api-spec/test) that will be used for generating the test.
The integration tests are generated on the fly by the runner you will find inside `test/integration`.
Once you execute it, it will fetch the [YAML REST test files](https://github.com/elastic/elasticsearch/tree/main/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test) from our artifacts API.
These are used to generate the integration tests.
Usually this step is executed by CI since it takes some time, but you can easily run this yourself!
Just follow this steps:
1. Boot a fresh Elasticsearch instance, which can be done in a Docker container by running `STACK_VERSION=8.10.0 DETACH=true .buildkite/run-elasticsearch.sh`, where `STACK_VERSION` and `DETACH` environment variables can be adjusted to your needs. A `TEST_SUITE` env var can also be set to `free` or `platinum`, and defaults to `free`.
1. Run `npm run test:integration` to run the whole suite, or `npm run test:integration -- --bail` to stop after the first failure.
1. Grab a coffee, it will take some time. ;)
This suite is very large, and not all tests will pass.
This is fine.
This suite is mostly used to identify notable changes in success/fail rate over time as we make changes to the client.
Usually this step is executed by CI since it takes some time, but you can easily run this yourself! Just follow this steps:
1. Boot an Elasticsearch instance, you can do that by running `./scripts/es-docker.sh` or `./scripts/es-docker-platinum.sh`, the first one will work only with the OSS APIs, while the second will work also with the Elastic licensed APIs;
1. If you are running the OSS test, you should use `npm run test:integration`, otherwise use `TEST_ES_SERVER=https://elastic:changeme@localhost:9200 npm run test:integration`. You can also pass a `-b` parameter if you want the test to bail out at the first failure: `npm run test:integration -- -b`;
1. Grab a coffee, it will take some time ;)
### Releasing

153
README.md
View File

@ -2,57 +2,27 @@
# Elasticsearch Node.js client
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) [![Build Status](https://badge.buildkite.com/15e4246eb268ea78f6e10aa90bce38c1abb0a4489e79f5a0ac.svg)](https://buildkite.com/elastic/elasticsearch-javascript-client-integration-tests/builds?branch=main) [![Node CI](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml/badge.svg)](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml) [![codecov](https://codecov.io/gh/elastic/elasticsearch-js/branch/master/graph/badge.svg)](https://codecov.io/gh/elastic/elasticsearch-js) [![NPM downloads](https://img.shields.io/npm/dm/@elastic/elasticsearch.svg?style=flat)](https://www.npmjs.com/package/@elastic/elasticsearch)
**[Download the latest version of Elasticsearch](https://www.elastic.co/downloads/elasticsearch)**
or
**[sign-up](https://cloud.elastic.co/registration?elektra=en-ess-sign-up-page)**
**for a free trial of Elastic Cloud**.
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) [![Build Status](https://clients-ci.elastic.co/buildStatus/icon?job=elastic%2Belasticsearch-js%2Bmain)](https://clients-ci.elastic.co/view/JavaScript/job/elastic+elasticsearch-js+main/) [![Node CI](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml/badge.svg)](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml) [![codecov](https://codecov.io/gh/elastic/elasticsearch-js/branch/master/graph/badge.svg)](https://codecov.io/gh/elastic/elasticsearch-js) [![NPM downloads](https://img.shields.io/npm/dm/@elastic/elasticsearch.svg?style=flat)](https://www.npmjs.com/package/@elastic/elasticsearch)
The official Node.js client for Elasticsearch.
## Installation
## Features
- One-to-one mapping with REST API.
- Generalized, pluggable architecture.
- Configurable, automatic discovery of cluster nodes.
- Persistent, Keep-Alive connections.
- Load balancing across all available nodes.
- Child client support.
- TypeScript support out of the box.
Refer to the [Installation section](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_installation)
of the getting started documentation.
## Connecting
Refer to the [Connecting section](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_connecting)
of the getting started documentation.
## Compatibility
The Elasticsearch client is compatible with currently maintained JS versions.
Language clients are forward compatible; meaning that clients support
communicating with greater or equal minor versions of Elasticsearch without
breaking. It does not mean that the client automatically supports new features
of newer Elasticsearch versions; it is only possible after a release of a new
client version. For example, a 8.12 client version won't automatically support
the new features of the 8.13 version of Elasticsearch, the 8.13 client version
is required for that. Elasticsearch language clients are only backwards
compatible with default distributions and without guarantees made.
| Elasticsearch Version | Elasticsearch-JS Branch | Supported |
| --------------------- | ------------------------ | --------- |
| main | main | |
| 8.x | 8.x | 8.x |
| 7.x | 7.x | 7.17 |
## Usage
* [Creating an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_creating_an_index)
* [Indexing a document](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_indexing_documents)
* [Getting documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_getting_documents)
* [Searching documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_searching_documents)
* [Updating documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_updating_documents)
* [Deleting documents](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_deleting_documents)
* [Deleting an index](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html#_deleting_an_index)
## Install
```
npm install @elastic/elasticsearch
```
### Node.js support
NOTE: The minimum supported version of Node.js is `v18`.
NOTE: The minimum supported version of Node.js is `v14`.
The client versioning follows the Elastic Stack versioning, this means that
major, minor, and patch releases are done following a precise schedule that
@ -77,7 +47,6 @@ of `^7.10.0`).
| `10.x` | `April 2021` | `7.12` (mid 2021) |
| `12.x` | `April 2022` | `8.2` (early 2022) |
| `14.x` | `April 2023` | `8.8` (early 2023) |
| `16.x` | `September 2023` | `8.11` (late 2023) |
### Compatibility
@ -98,51 +67,98 @@ npm install @elastic/elasticsearch@<major>
#### Browser
> [!WARNING]
> There is no official support for the browser environment. It exposes your Elasticsearch instance to everyone, which could lead to security issues.
WARNING: There is no official support for the browser environment. It exposes your Elasticsearch instance to everyone, which could lead to security issues.
We recommend that you write a lightweight proxy that uses this client instead, you can see a proxy example [here](./docs/examples/proxy).
## Documentation
* [Introduction](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/introduction.html)
* [Usage](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#client-usage)
* [Client configuration](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-configuration.html)
* [API reference](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/api-reference.html)
* [Authentication](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#authentication)
* [Observability](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html)
* [Creating a child client](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/child.html)
* [Client helpers](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html)
* [Typescript support](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/typescript.html)
* [Testing](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-testing.html)
* [Examples](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/examples.html)
- [Introduction](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/introduction.html)
- [Usage](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#client-usage)
- [Client configuration](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-configuration.html)
- [API reference](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/api-reference.html)
- [Authentication](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#authentication)
- [Observability](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html)
- [Creating a child client](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/child.html)
- [Client helpers](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html)
- [Typescript support](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/typescript.html)
- [Testing](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-testing.html)
- [Examples](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/examples.html)
## Quick start
```js
'use strict'
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
cloud: { id: '<cloud-id>' },
auth: { apiKey: 'base64EncodedKey' }
})
async function run () {
// Let's start by indexing some data
await client.index({
index: 'game-of-thrones',
document: {
character: 'Ned Stark',
quote: 'Winter is coming.'
}
})
await client.index({
index: 'game-of-thrones',
document: {
character: 'Daenerys Targaryen',
quote: 'I am the blood of the dragon.'
}
})
await client.index({
index: 'game-of-thrones',
document: {
character: 'Tyrion Lannister',
quote: 'A mind needs books like a sword needs a whetstone.'
}
})
// here we are forcing an index refresh, otherwise we will not
// get any result in the consequent search
await client.indices.refresh({ index: 'game-of-thrones' })
// Let's search!
const result= await client.search({
index: 'game-of-thrones',
query: {
match: { quote: 'winter' }
}
})
console.log(result.hits.hits)
}
run().catch(console.log)
```
## Install multiple versions
If you are using multiple versions of Elasticsearch, you need to use multiple versions of the client. In the past, install multiple versions of the same package was not possible, but with `npm v6.9`, you can do that via aliasing.
The command you must run to install different version of the client is:
```sh
npm install <alias>@npm:@elastic/elasticsearch@<version>
```
So for example if you need to install `7.x` and `6.x`, you will run:
So for example if you need to install `7.x` and `6.x`, you will run
```sh
npm install es6@npm:@elastic/elasticsearch@6
npm install es7@npm:@elastic/elasticsearch@7
```
And your `package.json` will look like the following:
```json
"dependencies": {
"es6": "npm:@elastic/elasticsearch@^6.7.0",
"es7": "npm:@elastic/elasticsearch@^7.0.0"
}
```
You will require the packages from your code by using the alias you have defined.
```js
const { Client: Client6 } = require('es6')
const { Client: Client7 } = require('es7')
@ -160,10 +176,7 @@ client6.info().then(console.log, console.log)
client7.info().then(console.log, console.log)
```
Finally, if you want to install the client for the next version of Elasticsearch
*(the one that lives in Elasticsearchs main branch)*, you can use the following
command:
Finally, if you want to install the client for the next version of Elasticsearch *(the one that lives in Elasticsearchs main branch)*, you can use the following command:
```sh
npm install esmain@github:elastic/elasticsearch-js
```

View File

@ -1,47 +0,0 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/catalog-info.json
apiVersion: backstage.io/v1alpha1
kind: Component
metadata:
name: elasticsearch-js
spec:
type: library
owner: group:devtools-team
lifecycle: production
---
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
apiVersion: backstage.io/v1alpha1
kind: Resource
metadata:
name: elasticsearch-js-integration-tests
description: elasticsearch-js - integration tests
spec:
type: buildkite-pipeline
owner: group:devtools-team
system: buildkite
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
metadata:
name: elasticsearch-js - integration tests
spec:
repository: elastic/elasticsearch-js
pipeline_file: .buildkite/pipeline.yml
teams:
devtools-team:
access_level: MANAGE_BUILD_AND_READ
everyone:
access_level: READ_ONLY
provider_settings:
build_pull_requests: false
build_branches: false
cancel_intermediate_builds: true
cancel_intermediate_builds_branch_filter: "!main"
schedules:
main:
branch: "main"
cronline: "@daily"
8_14:
branch: "8.14"
cronline: "@daily"

View File

@ -91,95 +91,6 @@ const client = new Client({
})
----
[discrete]
[[redaction]]
==== Redaction of potentially sensitive data
When the client raises an `Error` that originated at the HTTP layer, like a `ConnectionError` or `TimeoutError`, a `meta` object is often attached to the error object that includes metadata useful for debugging, like request and response information. Because this can include potentially sensitive data, like authentication secrets in an `Authorization` header, the client takes measures to redact common sources of sensitive data when this metadata is attached and serialized.
If your configuration requires extra headers or other configurations that may include sensitive data, you may want to adjust these settings to account for that.
By default, the `redaction` option is set to `{ type: 'replace' }`, which recursively searches for sensitive key names, case insensitive, and replaces their values with the string `[redacted]`.
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
cloud: { id: '<cloud-id>' },
auth: { apiKey: 'base64EncodedKey' },
})
try {
await client.indices.create({ index: 'my_index' })
} catch (err) {
console.log(err.meta.meta.request.options.headers.authorization) // prints "[redacted]"
}
----
If you would like to redact additional properties, you can include additional key names to search and replace:
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
cloud: { id: '<cloud-id>' },
auth: { apiKey: 'base64EncodedKey' },
headers: { 'X-My-Secret-Password': 'shhh it's a secret!' },
redaction: {
type: "replace",
additionalKeys: ["x-my-secret-password"]
}
})
try {
await client.indices.create({ index: 'my_index' })
} catch (err) {
console.log(err.meta.meta.request.options.headers['X-My-Secret-Password']) // prints "[redacted]"
}
----
Alternatively, if you know you're not going to use the metadata at all, setting the redaction type to `remove` will remove all optional sources of potentially sensitive data entirely, or replacing them with `null` for required properties.
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
cloud: { id: '<cloud-id>' },
auth: { apiKey: 'base64EncodedKey' },
redaction: { type: "remove" }
})
try {
await client.indices.create({ index: 'my_index' })
} catch (err) {
console.log(err.meta.meta.request.options.headers) // undefined
}
----
Finally, if you prefer to turn off redaction altogether, perhaps while debugging on a local developer environment, you can set the redaction type to `off`. This will revert the client to pre-8.11.0 behavior, where basic redaction is only performed during common serialization methods like `console.log` and `JSON.stringify`.
WARNING: Setting `redaction.type` to `off` is not recommended in production environments.
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
cloud: { id: '<cloud-id>' },
auth: { apiKey: 'base64EncodedKey' },
redaction: { type: "off" }
})
try {
await client.indices.create({ index: 'my_index' })
} catch (err) {
console.log(err.meta.meta.request.options.headers.authorization) // the actual header value will be logged
}
----
[discrete]
==== Migrate to v8

View File

@ -11,7 +11,7 @@ const { Client } = require('@elastic/elasticsearch')
const client = new Client({
cloud: { id: '<cloud-id>' },
auth: { apiKey: 'base64EncodedKey' },
auth: { apiKey: 'base64EncodedKey' }
maxRetries: 5,
requestTimeout: 60000,
sniffOnStart: true
@ -260,11 +260,11 @@ _Default:_ `false`
_Default:_ `null`
|`maxResponseSize`
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH +
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
_Default:_ `null`
|`maxCompressedResponseSize`
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH +
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
_Default:_ `null`
|===

View File

@ -1,354 +1,6 @@
[[changelog-client]]
== Release notes
[discrete]
=== 8.15.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.15.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
[discrete]
===== OpenTelemetry zero-code instrumentation support
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
See {jsclient}/observability.html#_opentelemetry[the docs]
for more information.
[discrete]
=== 8.14.1
[discrete]
==== Features
[discrete]
===== Improved support for Elasticsearch `8.14`
Updated types based on fixes and changes to the Elasticsearch specification.
[discrete]
=== 8.14.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.14.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
[discrete]
===== ES|QL object API helper
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
[discrete]
===== `onSuccess` callback added to bulk helper
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
[discrete]
===== Request retries are more polite
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
[discrete]
=== 8.13.1
[discrete]
==== Fixes
[discrete]
===== Pin @elastic/transport to `~8.4.1`
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
[discrete]
=== 8.13.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.13.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
[discrete]
==== Fixes
[discrete]
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
[discrete]
=== 8.12.3
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.4.1`
Switching from `^8.4.1` to `~8.4.1` ensures 8.12 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.12.2
[discrete]
==== Fixes
[discrete]
===== Upgrade transport to 8.4.1 https://github.com/elastic/elasticsearch-js/pull/2137[#2137]
Upgrades `@elastic/transport` to 8.4.1 to resolve https://github.com/elastic/elastic-transport-js/pull/83[a bug] where arrays in error diagnostics were unintentionally transformed into objects.
[discrete]
=== 8.12.1
[discrete]
==== Fixes
[discrete]
===== Fix hang in bulk helper semaphore https://github.com/elastic/elasticsearch-js/pull/2027[#2027]
The failing state could be reached when a server's response times are slower than flushInterval.
[discrete]
=== 8.12.0
[discrete]
=== Features
[discrete]
===== Support for Elasticsearch `v8.12.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
[discrete]
=== 8.11.1
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.4.0`
Switching from `^8.4.0` to `~8.4.0` ensures 8.11 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.11.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.11.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.11/release-notes-8.11.0.html[here].
[discrete]
===== Enhanced support for redacting potentially sensitive data https://github.com/elastic/elasticsearch-js/pull/2095[#2095]
`@elastic/transport` https://github.com/elastic/elastic-transport-js/releases/tag/v8.4.0[version 8.4.0] introduces enhanced measures for ensuring that request metadata attached to some `Error` objects is redacted. This functionality is primarily to address custom logging solutions that don't use common serialization methods like `JSON.stringify`, `console.log`, or `util.inspect`, which were already accounted for.
See <<redaction>> for more information.
[discrete]
=== 8.10.1
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.4`
Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.10.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.10.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here].
[discrete]
=== 8.9.2
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.4`
Switching from `^8.3.4` to `~8.3.4` ensures 8.9 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.9.1
[discrete]
==== Fixes
[discrete]
===== Upgrade Transport https://github.com/elastic/elasticsearch-js/pull/1968[#1968]
Upgrades `@elastic/transport` to the latest patch release to fix https://github.com/elastic/elastic-transport-js/pull/69[a bug] that could cause the process to exit when handling malformed `HEAD` requests.
[discrete]
=== 8.9.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.9.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.html[here].
[discrete]
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
[discrete]
==== Fixes
[discrete]
===== Updated `user-agent` header https://github.com/elastic/elasticsearch-js/pull/1954[#1954]
The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved.
[discrete]
=== 8.8.2
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.2`
Switching from `^8.3.2` to `~8.3.2` ensures 8.8 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.8.1
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.8.1`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.1.html[here].
[discrete]
==== Fixes
[discrete]
===== Fix index drift bug in bulk helper https://github.com/elastic/elasticsearch-js/pull/1759[#1759]
Fixes a bug in the bulk helper that would cause `onDrop` to send back the wrong JSON document or error on a nonexistent document when an error occurred on a bulk HTTP request that contained a `delete` action.
[discrete]
===== Fix a memory leak caused by an outdated version of Undici https://github.com/elastic/elasticsearch-js/pull/1902[#1902]
Undici 5.5.1, used by https://github.com/elastic/elastic-transport-js[elastic-transport-js], could create a memory leak when a high volume of requests created too many HTTP `abort` listeners. Upgrading Undici to 5.22.1 removed the memory leak.
[discrete]
=== 8.8.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v8.8.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.0.html[here].
[discrete]
==== Fixes
[discrete]
===== Fix type declarations for legacy types with a body key https://github.com/elastic/elasticsearch-js/pull/1784[#1784]
Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key.
[discrete]
=== 8.7.3
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.1`
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.7.0
[discrete]
===== Support for Elasticsearch `v8.7.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
[discrete]
=== 8.6.1
[discrete]
==== Fixes
[discrete]
===== Bump @elastic/transport to `~8.3.1`
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
[discrete]
=== 8.6.0
[discrete]
===== Bump @elastic/transport to 8.3.1+ https://github.com/elastic/elasticsearch-js/pull/1802[#1802]
The `@elastic/transport` dependency has been bumped to `~8.3.1` to ensure
fixes to the `maxResponseSize` option are available in the client.
[discrete]
===== Support for Elasticsearch `v8.6.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.6/release-notes-8.6.0.html[here].
[discrete]
=== 8.5.0
[discrete]
===== Support for Elasticsearch `v8.5.0`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/8.5/release-notes-8.5.0.html[here].
[discrete]
=== 8.4.0
@ -564,9 +216,6 @@ The client API leaks HTTP-related notions in many places, and removing them woul
This could be a rather big breaking change, so a double solution could be used during the 8.x lifecycle. (accepting body keys without them being wrapped in the body as well as the current solution).
To convert code from 7.x, you need to remove the `body` parameter in all the endpoints request.
For instance, this is an example for the `search` endpoint:
[source,js]
----
// from
@ -605,12 +254,6 @@ If you weren't extending the internals of the client, this won't be a breaking c
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
The client will expose a new request-specific option to still get the full response details.
The new behaviour returns the `body` value directly as response.
If you want to have the 7.x response format, you need to add `meta : true` in the request.
This will return all the HTTP meta information, including the `body`.
For instance, this is an example for the `search` endpoint:
[source,js]
----
// from

View File

@ -11,8 +11,6 @@ This page contains the information you need to connect and use the Client with
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
* <<client-connect-proxy, Connecting through a proxy>>
* <<client-error-handling, Handling errors>>
* <<keep-alive, Keep-alive connections>>
* <<close-connections, Closing a client's connections>>
* <<product-check, Automatic product check>>
[[authentication]]
@ -541,17 +539,11 @@ If you need to pass through an http(s) proxy for connecting to {es}, the client
out of the box offers a handy configuration for helping you with it. Under the
hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations.
To use a proxy, you will need to use the `HttpConnection` class from `@elastic/transport` instead.
[source,js]
----
import { HttpConnection } from '@elastic/transport'
const client = new Client({
node: 'http://localhost:9200',
proxy: 'http://localhost:8080',
Connection: HttpConnection,
proxy: 'http://localhost:8080'
})
----
@ -561,12 +553,11 @@ Basic authentication is supported as well:
----
const client = new Client({
node: 'http://localhost:9200',
proxy: 'http:user:pwd@//localhost:8080',
Connection: HttpConnection,
proxy: 'http:user:pwd@//localhost:8080'
})
----
If you are connecting through a non-http(s) proxy, such as a `socks5` or `pac`,
If you are connecting through a not http(s) proxy, such as a `socks5` or `pac`,
you can use the `agent` option to configure it.
[source,js]
@ -576,8 +567,7 @@ const client = new Client({
node: 'http://localhost:9200',
agent () {
return new SocksProxyAgent('socks://127.0.0.1:1080')
},
Connection: HttpConnection,
}
})
----
@ -661,51 +651,6 @@ a|* `name` - `string`
* `headers` - `object`, the response status code
|===
[[keep-alive]]
[discrete]
=== Keep-alive connections
By default, the client uses persistent, keep-alive connections to reduce the overhead of creating a new HTTP connection for each Elasticsearch request.
If you are using the default `UndiciConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 10 minutes.
If you are using the legacy `HttpConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 1 minute.
If you need to disable keep-alive connections, you can override the HTTP agent with your preferred https://nodejs.org/api/http.html#http_new_agent_options[HTTP agent options]:
[source,js]
----
const client = new Client({
node: 'http://localhost:9200',
// the function takes as parameter the option
// object passed to the Connection constructor
agent: (opts) => new CustomAgent()
})
----
Or you can disable the HTTP agent entirely:
[source,js]
----
const client = new Client({
node: 'http://localhost:9200',
// Disable agent and keep-alive
agent: false
})
----
[discrete]
[[close-connections]]
=== Closing a client's connections
If you would like to close all open connections being managed by an instance of the client, use the `close()` function:
[source,js]
----
const client = new Client({
node: 'http://localhost:9200'
});
client.close();
----
[discrete]
[[product-check]]
=== Automatic product check

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.clearCachedRealms({
realms: "default_file,ldap1",
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.forcemerge({
index: ".ds-my-data-stream-2099.03.07-000001",
max_num_segments: 1,
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
settings: {
analysis: {
analyzer: {
my_analyzer: {
tokenizer: "whitespace",
filter: ["stemmer"],
},
},
},
},
});
console.log(response);
----

View File

@ -1,40 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.watcher.putWatch({
id: "cluster_health_watch",
trigger: {
schedule: {
interval: "10s",
},
},
input: {
http: {
request: {
host: "localhost",
port: 9200,
path: "/_cluster/health",
},
},
},
condition: {
compare: {
"ctx.payload.status": {
eq: "red",
},
},
},
actions: {
send_email: {
email: {
to: "username@example.org",
subject: "Cluster Status Warning",
body: "Cluster status is RED",
},
},
},
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.migration.postFeatureUpgrade();
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.shardStores();
console.log(response);
----

View File

@ -1,15 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.renderSearchTemplate({
source: '{ "query": {{#toJson}}my_query{{/toJson}} }',
params: {
my_query: {
match_all: {},
},
},
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
tokenizer: "standard",
filter: ["asciifolding"],
text: "açaí à la carte",
});
console.log(response);
----

View File

@ -1,67 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "italian_example",
settings: {
analysis: {
filter: {
italian_elision: {
type: "elision",
articles: [
"c",
"l",
"all",
"dall",
"dell",
"nell",
"sull",
"coll",
"pell",
"gl",
"agl",
"dagl",
"degl",
"negl",
"sugl",
"un",
"m",
"t",
"s",
"v",
"d",
],
articles_case: true,
},
italian_stop: {
type: "stop",
stopwords: "_italian_",
},
italian_keywords: {
type: "keyword_marker",
keywords: ["esempio"],
},
italian_stemmer: {
type: "stemmer",
language: "light_italian",
},
},
analyzer: {
rebuilt_italian: {
tokenizer: "standard",
filter: [
"italian_elision",
"lowercase",
"italian_stop",
"italian_keywords",
"italian_stemmer",
],
},
},
},
},
});
console.log(response);
----

View File

@ -1,19 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "text_embedding",
inference_id: "my-e5-model",
inference_config: {
service: "elasticsearch",
service_settings: {
num_allocations: 1,
num_threads: 1,
model_id: ".multilingual-e5-small",
},
},
});
console.log(response);
----

View File

@ -1,13 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.putIndexTemplate({
name: "my-data-stream-template",
index_patterns: ["my-data-stream*"],
data_stream: {},
priority: 500,
});
console.log(response);
----

View File

@ -4,9 +4,12 @@
[source, js]
----
const response = await client.cluster.putSettings({
persistent: {
"xpack.security.transport.filter.enabled": false,
},
});
console.log(response);
body: {
transient: {
'cluster.routing.use_adaptive_replica_selection': false
}
}
})
console.log(response)
----

View File

@ -4,12 +4,15 @@
[source, js]
----
const response = await client.update({
index: "test",
id: 1,
index: 'test',
id: '1',
body: {
doc: {
name: "new_name",
name: 'new_name'
},
doc_as_upsert: true,
});
console.log(response);
detect_noop: false
}
})
console.log(response)
----

View File

@ -1,24 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "sales",
size: 0,
filter_path: "aggregations",
query: {
term: {
type: "t-shirt",
},
},
aggs: {
avg_price: {
avg: {
field: "price",
},
},
},
});
console.log(response);
----

View File

@ -1,26 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "asciifold_example",
settings: {
analysis: {
analyzer: {
standard_asciifolding: {
tokenizer: "standard",
filter: ["my_ascii_folding"],
},
},
filter: {
my_ascii_folding: {
type: "asciifolding",
preserve_original: true,
},
},
},
},
});
console.log(response);
----

View File

@ -1,37 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.cluster.putComponentTemplate({
name: "component_template1",
template: {
mappings: {
properties: {
"@timestamp": {
type: "date",
},
},
},
},
});
console.log(response);
const response1 = await client.cluster.putComponentTemplate({
name: "runtime_component_template",
template: {
mappings: {
runtime: {
day_of_week: {
type: "keyword",
script: {
source:
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
},
},
},
},
},
});
console.log(response1);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.transform.startTransform({
transform_id: "ecommerce_transform",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.tasks.list({
human: "true",
detailed: "true",
actions: "indices:data/write/bulk",
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.watcher.executeWatch({
id: "my_watch",
});
console.log(response);
----

View File

@ -1,39 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "basque_example",
settings: {
analysis: {
filter: {
basque_stop: {
type: "stop",
stopwords: "_basque_",
},
basque_keywords: {
type: "keyword_marker",
keywords: ["Adibidez"],
},
basque_stemmer: {
type: "stemmer",
language: "basque",
},
},
analyzer: {
rebuilt_basque: {
tokenizer: "standard",
filter: [
"lowercase",
"basque_stop",
"basque_keywords",
"basque_stemmer",
],
},
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ccr.followStats({
index: "<index>",
});
console.log(response);
----

View File

@ -1,20 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
query: {
has_child: {
type: "child",
query: {
match_all: {},
},
max_children: 10,
min_children: 2,
score_mode: "min",
},
},
});
console.log(response);
----

View File

@ -1,28 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "sales",
mappings: {
properties: {
tags: {
type: "keyword",
},
comments: {
type: "nested",
properties: {
username: {
type: "keyword",
},
comment: {
type: "text",
},
},
},
},
},
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ml.flushJob({
job_id: "low_request_rate",
calc_interim: true,
});
console.log(response);
----

View File

@ -0,0 +1,14 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: 'twitter',
size: '0',
q: 'extra:test',
filter_path: 'hits.total'
})
console.log(response)
----

View File

@ -1,23 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
aggs: {
JapaneseCars: {
terms: {
field: "make",
include: ["mazda", "honda"],
},
},
ActiveCarManufacturers: {
terms: {
field: "make",
exclude: ["rover", "jensen"],
},
},
},
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.invalidateToken({
username: "myuser",
});
console.log(response);
----

View File

@ -1,28 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
tokenizer: "keyword",
char_filter: [
{
type: "mapping",
mappings: [
"٠ => 0",
"١ => 1",
"٢ => 2",
"٣ => 3",
"٤ => 4",
"٥ => 5",
"٦ => 6",
"٧ => 7",
"٨ => 8",
"٩ => 9",
],
},
],
text: "My license plate is ٢٥٠١٥",
});
console.log(response);
----

View File

@ -0,0 +1,43 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
body: {
aggs: {
countries: {
terms: {
field: 'artist.country',
order: [
{
'rock>playback_stats.avg': 'desc'
},
{
_count: 'desc'
}
]
},
aggs: {
rock: {
filter: {
term: {
genre: 'rock'
}
},
aggs: {
playback_stats: {
stats: {
field: 'play_count'
}
}
}
}
}
}
}
}
})
console.log(response)
----

View File

@ -1,21 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
mappings: {
properties: {
full_name: {
type: "text",
index_prefixes: {
min_chars: 1,
max_chars: 10,
},
},
},
},
});
console.log(response);
----

View File

@ -1,24 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "pipelineB",
description: "outer pipeline",
processors: [
{
pipeline: {
name: "pipelineA",
},
},
{
set: {
field: "outer_pipeline_set",
value: "outer",
},
},
],
});
console.log(response);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.getTemplate({
name: "template_1",
});
console.log(response);
----

View File

@ -1,38 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
mappings: {
properties: {
my_wildcard: {
type: "wildcard",
},
},
},
});
console.log(response);
const response1 = await client.index({
index: "my-index-000001",
id: 1,
document: {
my_wildcard: "This string can be quite lengthy",
},
});
console.log(response1);
const response2 = await client.search({
index: "my-index-000001",
query: {
wildcard: {
my_wildcard: {
value: "*quite*lengthy",
},
},
},
});
console.log(response2);
----

View File

@ -1,10 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.refresh({
index: "my-index-000001,my-index-000002",
});
console.log(response);
----

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
body: {
aggs: {
genres: {
terms: {
script: {
source: "doc['genre'].value",
lang: 'painless'
}
}
}
}
}
})
console.log(response)
----

View File

@ -1,26 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.index({
index: "example",
document: {
location: {
type: "Polygon",
orientation: "LEFT",
coordinates: [
[
[-177, 10],
[176, 15],
[172, 0],
[176, -15],
[-177, -10],
[-177, 10],
],
],
},
},
});
console.log(response);
----

View File

@ -1,36 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.index({
index: "my-index-000001",
id: 5,
refresh: "true",
document: {
query: {
bool: {
should: [
{
match: {
message: {
query: "Japanese art",
_name: "query1",
},
},
},
{
match: {
message: {
query: "Holand culture",
_name: "query2",
},
},
},
],
},
},
},
});
console.log(response);
----

View File

@ -1,38 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "example-index",
retriever: {
rrf: {
retrievers: [
{
standard: {
query: {
term: {
text: "blue shoes sale",
},
},
},
},
{
standard: {
query: {
sparse_vector: {
field: "ml.tokens",
inference_id: "my_elser_model",
query: "What blue shoes are on sale?",
},
},
},
},
],
rank_window_size: 50,
rank_constant: 20,
},
},
});
console.log(response);
----

View File

@ -1,29 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000001",
settings: {
analysis: {
analyzer: {
my_custom_analyzer: {
type: "custom",
tokenizer: "standard",
char_filter: ["html_strip"],
filter: ["lowercase", "asciifolding"],
},
},
},
},
});
console.log(response);
const response1 = await client.indices.analyze({
index: "my-index-000001",
analyzer: "my_custom_analyzer",
text: "Is this déjà vu</b>?",
});
console.log(response1);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.analyze({
analyzer: "whitespace",
text: "The quick brown fox.",
});
console.log(response);
----

View File

@ -1,8 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.security.getServiceAccounts();
console.log(response);
----

View File

@ -1,35 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "my-index-000003",
mappings: {
properties: {
metrics: {
subobjects: false,
properties: {
time: {
type: "object",
properties: {
min: {
type: "long",
},
max: {
type: "long",
},
},
},
},
},
},
},
});
console.log(response);
const response1 = await client.indices.getMapping({
index: "my-index-000003",
});
console.log(response1);
----

View File

@ -1,14 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.snapshot.restore({
repository: "my_repository",
snapshot: "my_snapshot_2099.05.06",
indices: "my-index,logs-my_app-default",
rename_pattern: "(.+)",
rename_replacement: "restored-$1",
});
console.log(response);
----

View File

@ -1,11 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.sql.query({
format: "txt",
query: "SELECT * FROM library ORDER BY page_count DESC LIMIT 5",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.connector.updateApiKeyId({
connector_id: "my-connector",
api_key_id: "my-api-key-id",
api_key_secret_id: "my-connector-secret-id",
});
console.log(response);
----

View File

@ -1,12 +0,0 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.eql.search({
index: "my-data-stream",
query: '\n process where process.name == "regsvr32.exe"\n ',
size: 50,
});
console.log(response);
----

Some files were not shown because too many files have changed in this diff Show More