Compare commits
75 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f2d2cfc6dd | |||
| 5835505e21 | |||
| 919094a77b | |||
| 684bc29927 | |||
| 7f45fff571 | |||
| 36e4d8aa8d | |||
| 49159dba33 | |||
| d7157bfe87 | |||
| 10bdb39a68 | |||
| ab8ff69eac | |||
| fca53568d2 | |||
| d0f0b376c1 | |||
| 07b4c9966c | |||
| a2b68958c3 | |||
| b750c61686 | |||
| ccf98d126b | |||
| 49eaea0f69 | |||
| c562a730d0 | |||
| 56879a0642 | |||
| ab72356061 | |||
| 71f85b94cd | |||
| b34188ceac | |||
| db6d7bd2cc | |||
| f90f4306c2 | |||
| c5da6683f7 | |||
| 09d802a0ac | |||
| 096ef47d6a | |||
| 38e4b23831 | |||
| 04634af552 | |||
| f79f4e8f25 | |||
| 560dfd3763 | |||
| ebbc2961e0 | |||
| 6ccdab5280 | |||
| 8f9ed67fda | |||
| 4ebffbc0e8 | |||
| 282c76fb6a | |||
| 0a510b15f9 | |||
| 24890fd11d | |||
| be54dcd301 | |||
| 33e0873ecf | |||
| 27748779c6 | |||
| 697b594ea2 | |||
| 911af982b2 | |||
| 651165d842 | |||
| c58e93a77a | |||
| c4e793ca71 | |||
| 8604da555f | |||
| 57426c968b | |||
| 96b5b8eaba | |||
| eac00e1200 | |||
| 77c1ef36aa | |||
| 720b5b449b | |||
| a298517692 | |||
| 53ccd17dbf | |||
| f6c14b7268 | |||
| 4f1713c894 | |||
| af97ece807 | |||
| 46b2c99b7c | |||
| e0f54c789b | |||
| 6d2774d2a0 | |||
| 25d9afbc0b | |||
| d7e5ff5191 | |||
| 904c3bb28c | |||
| 33c4630a34 | |||
| 3e79c8e825 | |||
| d01582803c | |||
| e7c5b3dafa | |||
| 10277e217c | |||
| 03ecf707c9 | |||
| ca6948fb82 | |||
| 82cf15097d | |||
| 759138c375 | |||
| a0c5c98a99 | |||
| 451a805ecd | |||
| 071a6ba4bb |
14
.buildkite/Dockerfile
Normal file
14
.buildkite/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
ARG NODE_VERSION=${NODE_VERSION:-18}
|
||||
FROM node:$NODE_VERSION
|
||||
|
||||
# Install required tools
|
||||
RUN apt-get clean -y && \
|
||||
apt-get -qy update && \
|
||||
apt-get -y install zip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY . .
|
||||
RUN npm install --production=false
|
||||
21
.buildkite/certs/testnode.crt
Executable file
21
.buildkite/certs/testnode.crt
Executable file
@ -0,0 +1,21 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDYjCCAkqgAwIBAgIVAIClHav09e9XGWJrnshywAjUHTnXMA0GCSqGSIb3DQEB
|
||||
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
|
||||
ZXJhdGVkIENBMB4XDTIzMDMyODE3MDIzOVoXDTI2MDMyNzE3MDIzOVowEzERMA8G
|
||||
A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCV
|
||||
+t5/g6u2r3awCtzqp17KG0hRxzkVoJoF8DYzVh+Rv9ymxQW0C/U8dQihAjkZHaIA
|
||||
n49lSyNLkwWtmqQgPcimV4d6XuTYx2ahDixXYtjmoOSwH5dRtovKPCNKDPkUj9Vq
|
||||
NwMW0uB1VxniMKI4DnYFqBgHL9kQKhQqvas6Gx0X6ptGRCLYCtVxeFcau6nnkZJt
|
||||
urb+HNV5waOh0uTmsqnnslK3NjCQ/f030vPKxM5fOqOU5ajUHpZFJ6ZFmS32074H
|
||||
l+mZoRT/GtbnVtIg+CJXsWThF3/L4iBImv+rkY9MKX5fyMLJgmIJG68S90IQGR8c
|
||||
Z2lZYzC0J7zjMsYlODbDAgMBAAGjgYswgYgwHQYDVR0OBBYEFIDIcECn3AVHc3jk
|
||||
MpQ4r7Kc3WCsMB8GA1UdIwQYMBaAFJYCWKn16g+acbing4Vl45QGUBs0MDsGA1Ud
|
||||
EQQ0MDKCCWxvY2FsaG9zdIIIaW5zdGFuY2WHBH8AAAGHEAAAAAAAAAAAAAAAAAAA
|
||||
AAGCA2VzMTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBCwUAA4IBAQBtX3RQ5ATpfORM
|
||||
lrnhaUPGOWkjnb3p3BrdAWUaWoh136QhaXqxKiALQQhTtTerkXOcuquy9MmAyYvS
|
||||
9fDdGvLCAO8pPCXjnzonCHerCLGdS7f/eqvSFWCdy7LPHzTAFYfVWVvbZed+83TL
|
||||
bDY63AMwIexj34vJEStMapuFwWx05fstE8qZWIbYCL87sF5H/MRhzlz3ScAhQ1N7
|
||||
tODH7zvLzSxFGGEzCIKZ0iPFKbd3Y0wE6SptDSKhOqlnC8kkNeI2GjWsqVfHKsoF
|
||||
pDFmri7IfOucuvalXJ6xiHPr9RDbuxEXs0u8mteT5nFQo7EaEGdHpg1pNGbfBOzP
|
||||
lmj/dRS9
|
||||
-----END CERTIFICATE-----
|
||||
27
.buildkite/certs/testnode.key
Executable file
27
.buildkite/certs/testnode.key
Executable file
@ -0,0 +1,27 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEowIBAAKCAQEAlfref4Ortq92sArc6qdeyhtIUcc5FaCaBfA2M1Yfkb/cpsUF
|
||||
tAv1PHUIoQI5GR2iAJ+PZUsjS5MFrZqkID3IpleHel7k2MdmoQ4sV2LY5qDksB+X
|
||||
UbaLyjwjSgz5FI/VajcDFtLgdVcZ4jCiOA52BagYBy/ZECoUKr2rOhsdF+qbRkQi
|
||||
2ArVcXhXGrup55GSbbq2/hzVecGjodLk5rKp57JStzYwkP39N9LzysTOXzqjlOWo
|
||||
1B6WRSemRZkt9tO+B5fpmaEU/xrW51bSIPgiV7Fk4Rd/y+IgSJr/q5GPTCl+X8jC
|
||||
yYJiCRuvEvdCEBkfHGdpWWMwtCe84zLGJTg2wwIDAQABAoIBAAEP7HYNNnDWdYMD
|
||||
+WAtYM12X/W5s/wUP94juaBI4u4iZH2EZodlixEdZUCTXgq43WsDUhxX05s7cE+p
|
||||
H5DuSCHtoo2WHvGKAposwRDm2f3YVWQ2Xyb2ahNt69LYHHWrO+XQ60YYTa3r8Gn3
|
||||
7dFR3I016/jyn5DeEVaglvS1dfj2UG4ybR4KkMfcKd94X0rKvz3wzAhHIh+hwMtv
|
||||
sVk7V4vSnKf2mJXwIVECTolnEJEkCjWjjymgUJYKT8yN7JnAsHRcvMa6kWwIGrLp
|
||||
oQCEaJwYM6ynCRS989pLt3vA2iu5VkYhiHXJ9Ds/5b5yzhzmj+ymzKbFKrrUUrmn
|
||||
+2Jp1K0CgYEAw8BchALsD/+JuoXjinA14MH7PZjIsXyhtPk+c4pk42iMNyg1J8XF
|
||||
Y/ITepLYsl2bZqQI1jOJdDqsTwIsva9r749lsmkYI3VOxhi7+qBK0sThR66C87lX
|
||||
iU2QpnZ9NloC6ort4a3MEvZ/gRQcXdBrNlNoza2p7PHAVDTnsdSrNKUCgYEAxCQV
|
||||
uo85oZyfnMufn/gcI9IeYOgiB0tO3a8cAFX2wQW1y935t6Z13ApUQc4EnCOH7ZBc
|
||||
td5kT+xGdRWnfPZ38FM1dd5MBdGE69s3q8pJDUExSgNLqaF6/5bD32qui66L3ugu
|
||||
eMjxrzqJsc2uQTPCs18SGsyRmf54DpY8HglOmUcCgYAGRDgx+a347SNJl1OrcOAo
|
||||
q80RMbzrAaRjmL8JD9se9I/YjC73cPtasbsx51WMkDaTWJj30nqJ//7YIKeyAtWf
|
||||
u6Vzyq19JRo6eTw7T7pVePwFQW7rwnks6hDBY3WqscL6IyxuVxP7X2zBgxVNY4ir
|
||||
Gox2WSLhdPPFPlRUewxoCQKBgAJvqE1u5fpZ5ame5dao0ECppXLyrymEB/C88g4X
|
||||
Az+WgJGNqkJbsO8QuccvdeMylcefmWcw4fIULzPZFwF4VjkH74wNPMh9t7buPBzI
|
||||
IGwnuSMAM3ph5RMzni8yNgTKIDaej6U0abwRcBBjS5zHtc1giusGS3CsNnWH7Cs7
|
||||
VlyVAoGBAK+prq9t9x3tC3NfCZH8/Wfs/X0T1qm11RiL5+tOhmbguWAqSSBy8OjX
|
||||
Yh8AOXrFuMGldcaTXxMeiKvI2cyybnls1MFsPoeV/fSMJbex7whdeJeTi66NOSKr
|
||||
oftUHvkHS0Vv/LicMEOufFGslb4T9aPJ7oyhoSlz9CfAutDWk/q/
|
||||
-----END RSA PRIVATE KEY-----
|
||||
0
.ci/functions/cleanup.sh → .buildkite/functions/cleanup.sh
Normal file → Executable file
0
.ci/functions/cleanup.sh → .buildkite/functions/cleanup.sh
Normal file → Executable file
0
.ci/functions/imports.sh → .buildkite/functions/imports.sh
Normal file → Executable file
0
.ci/functions/imports.sh → .buildkite/functions/imports.sh
Normal file → Executable file
0
.ci/functions/wait-for-container.sh → .buildkite/functions/wait-for-container.sh
Normal file → Executable file
0
.ci/functions/wait-for-container.sh → .buildkite/functions/wait-for-container.sh
Normal file → Executable file
31
.buildkite/pipeline.yml
Normal file
31
.buildkite/pipeline.yml
Normal file
@ -0,0 +1,31 @@
|
||||
steps:
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
env:
|
||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||
TEST_SUITE: "{{ matrix.suite }}"
|
||||
STACK_VERSION: 8.8.0-SNAPSHOT
|
||||
matrix:
|
||||
setup:
|
||||
suite:
|
||||
- "free"
|
||||
- "platinum"
|
||||
nodejs:
|
||||
- "14"
|
||||
- "16"
|
||||
- "18"
|
||||
- "20"
|
||||
command: ./.buildkite/run-tests.sh
|
||||
artifact_paths: "./junit-output/junit-*.xml"
|
||||
- wait: ~
|
||||
continue_on_failure: true
|
||||
- label: ":junit: Test results"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
plugins:
|
||||
- junit-annotate#v2.4.1:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
job-uuid-file-pattern: 'junit-(.*).xml'
|
||||
fail-build-on-error: true
|
||||
31
.buildkite/run-client.sh
Executable file
31
.buildkite/run-client.sh
Executable file
@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Once called Elasticsearch should be up and running
|
||||
#
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
set -euo pipefail
|
||||
repo=$(pwd)
|
||||
|
||||
export NODE_VERSION=${NODE_VERSION:-18}
|
||||
|
||||
echo "--- :javascript: Building Docker image"
|
||||
docker build \
|
||||
--file "$script_path/Dockerfile" \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_VERSION="$NODE_VERSION" \
|
||||
.
|
||||
|
||||
echo "--- :javascript: Running $TEST_SUITE tests"
|
||||
mkdir -p "$repo/junit-output"
|
||||
docker run \
|
||||
--network="${network_name}" \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "ELASTIC_PASSWORD=${elastic_password}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--env "ELASTIC_USER=elastic" \
|
||||
--env "BUILDKITE=true" \
|
||||
--volume "$repo/junit-output:/junit-output" \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
@ -26,12 +26,15 @@ script_path=$(dirname $(realpath -s $0))
|
||||
source $script_path/functions/imports.sh
|
||||
set -euo pipefail
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on seperate terminals \033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on separate terminals \033[0m"
|
||||
cleanup_node $es_node_name
|
||||
|
||||
master_node_name=${es_node_name}
|
||||
cluster_name=${moniker}${suffix}
|
||||
|
||||
# Set vm.max_map_count kernel setting to 262144
|
||||
sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
declare -a volumes
|
||||
environment=($(cat <<-END
|
||||
--env ELASTIC_PASSWORD=$elastic_password
|
||||
@ -73,6 +76,7 @@ END
|
||||
))
|
||||
else
|
||||
environment+=($(cat <<-END
|
||||
--env node.roles=data,data_cold,data_content,data_frozen,data_hot,data_warm,ingest,master,ml,remote_cluster_client,transform
|
||||
--env xpack.security.enabled=false
|
||||
--env xpack.security.http.ssl.enabled=false
|
||||
END
|
||||
@ -84,6 +88,13 @@ if [[ "$TEST_SUITE" == "platinum" ]]; then
|
||||
cert_validation_flags="--insecure --cacert /usr/share/elasticsearch/config/certs/ca.crt --resolve ${es_node_name}:443:127.0.0.1"
|
||||
fi
|
||||
|
||||
echo "--- :elasticsearch: Environment setup"
|
||||
echo "TEST_SUITE: $TEST_SUITE"
|
||||
echo "Elasticsearch URL: $elasticsearch_url"
|
||||
echo "Elasticsearch External URL: $external_elasticsearch_url"
|
||||
|
||||
|
||||
echo "--- :elasticsearch: Running container"
|
||||
# Pull the container, retry on failures up to 5 times with
|
||||
# short delays between each attempt. Fixes most transient network errors.
|
||||
docker_pull_attempts=0
|
||||
@ -138,6 +149,4 @@ END
|
||||
if wait_for_container "$es_node_name" "$network_name"; then
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Running on: $node_url\033[0m"
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
16
.buildkite/run-tests.sh
Executable file
16
.buildkite/run-tests.sh
Executable file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Script to run Elasticsearch container and Elasticsearch client integration tests on Buildkite
|
||||
#
|
||||
# Version 0.1
|
||||
#
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
source "$script_path/functions/imports.sh"
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "--- :elasticsearch: Starting Elasticsearch"
|
||||
DETACH=true bash "$script_path/run-elasticsearch.sh"
|
||||
|
||||
echo "+++ :javascript: Run Client"
|
||||
bash "$script_path/run-client.sh"
|
||||
@ -1,4 +1,4 @@
|
||||
ARG NODE_JS_VERSION=10
|
||||
ARG NODE_JS_VERSION=18
|
||||
FROM node:${NODE_JS_VERSION}
|
||||
|
||||
# Create app directory
|
||||
|
||||
@ -1,19 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDIzCCAgugAwIBAgIVAMTO6uVx9dLox2t0lY4IcBKZXb5WMA0GCSqGSIb3DQEB
|
||||
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
|
||||
ZXJhdGVkIENBMB4XDTIwMDIyNjA1NTA1OVoXDTIzMDIyNTA1NTA1OVowEzERMA8G
|
||||
A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDK
|
||||
YLTOikVENiN/qYupOsoXd7VYYnryyfCC/dK4FC2aozkbqjFzBdvPGAasoc4yEiH5
|
||||
CGeXMgJuOjk1maqetmdIsw00j4oHJviYsnGXzxxS5swhD7spcW4Uk4V4tAUzrbfT
|
||||
vW/2WW/yYCLe5phVb2chz0jL+WYb4bBmdfs/t6RtP9RqsplYAmVp3gZ6lt2YNtvE
|
||||
k9gz0TVk3DuO1TquIClfRYUjuywS6xDSvxJ8Jl91EfDWM8QU+9F+YAtiv74xl2U3
|
||||
P0wwMqNvMxf9/3ak3lTQGsgO4L6cwbKpVLMMzxSVunZz/sgl19xy3qHHz1Qr2MjJ
|
||||
/2c2J7vahUL4NPRkjJClAgMBAAGjTTBLMB0GA1UdDgQWBBS2Wn8E2VZv4oenY+pR
|
||||
O8G3zfQXhzAfBgNVHSMEGDAWgBSWAlip9eoPmnG4p4OFZeOUBlAbNDAJBgNVHRME
|
||||
AjAAMA0GCSqGSIb3DQEBCwUAA4IBAQAvwPvCiJJ6v9jYcyvYY8I3gP0oCwrylpRL
|
||||
n91UlgRSHUmuAObyOoVN5518gSV/bTU2SDrstcLkLFxHvnfpoGJoxsQEHuGxwDRI
|
||||
nhYNd62EKLerehNM/F9ILKmvTh8f6QPCzjUuExTXv+63l2Sr6dBS7FHsGs6UKUYO
|
||||
llM/y9wMZ1LCuZuBg9RhtgpFXRSgDM9Z7Begu0d/BPX9od/qAeZg9Arz4rwUiCN4
|
||||
IJOMEBEPi5q1tgeS0Fb1Grpqd0Uz5tZKtEHNKzLG+zSMmkneL62Nk2HsmEFZKwzg
|
||||
u2pU42UaUE596G6o78s1aLn9ICcElPHTjiuZNSiyuu9IzvFDjGQw
|
||||
-----END CERTIFICATE-----
|
||||
@ -1,27 +0,0 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEogIBAAKCAQEAymC0zopFRDYjf6mLqTrKF3e1WGJ68snwgv3SuBQtmqM5G6ox
|
||||
cwXbzxgGrKHOMhIh+QhnlzICbjo5NZmqnrZnSLMNNI+KByb4mLJxl88cUubMIQ+7
|
||||
KXFuFJOFeLQFM623071v9llv8mAi3uaYVW9nIc9Iy/lmG+GwZnX7P7ekbT/UarKZ
|
||||
WAJlad4GepbdmDbbxJPYM9E1ZNw7jtU6riApX0WFI7ssEusQ0r8SfCZfdRHw1jPE
|
||||
FPvRfmALYr++MZdlNz9MMDKjbzMX/f92pN5U0BrIDuC+nMGyqVSzDM8Ulbp2c/7I
|
||||
Jdfcct6hx89UK9jIyf9nNie72oVC+DT0ZIyQpQIDAQABAoIBADAh7f7NjgnaInlD
|
||||
ds8KB3SraPsbeQhzlPtiqRJU4j/MIFH/GYG03AGWQkget67a9y+GmzSvlTpoKKEh
|
||||
6h2TXl9BDpv4o6ht0WRn1HJ5tM/Wyqf2WNpTew3zxCPgFPikkXsPrChYPzLTQJfp
|
||||
GkP/mfTFmxfAOlPZSp4j41zVLYs53eDkAegFPVfKSr1XNNJ3QODLPcIBfxBYsiC9
|
||||
oU+jRW8xYuj31cEl5k5UqrChJ1rm3mt6cguqXKbISuoSvi13gXI6DccqhuLAU+Kr
|
||||
ib2XYrRP+pWocZo/pM9WUVoNGtFxfY88sAQtvG6gDKo2AURtFyq84Ow0h9mdixV/
|
||||
gRIDPcECgYEA5nEqE3OKuG9WuUFGXvjtn4C0F6JjflYWh7AbX51S4F6LKrW6/XHL
|
||||
Rg4BtF+XReT7OQ6llsV8kZeUxsUckkgDLzSaA8lysNDV5KkhAWHfRqH//QKFbqZi
|
||||
JL9t3x63Qt81US8s2hQk3khPYTRM8ZB3xHiXvZYSGC/0x/DxfEO3QJECgYEA4NK5
|
||||
sxtrat8sFz6SK9nWEKimPjDVzxJ0hxdX4tRq/JdOO5RncawVqt6TNP9gTuxfBvhW
|
||||
MhJYEsQj8iUoL1dxo9d1eP8HEANNV0iX5OBvJNmgBp+2OyRSyr+PA55+wAxYuAE7
|
||||
QKaitOjW57fpArNRt2hQyiSzTuqUFRWTWJHCWNUCgYAEurPTXF6vdFGCUc2g61jt
|
||||
GhYYGhQSpq+lrz6Qksj9o9MVWE9zHh++21C7o+6V16I0RJGva3QoBMVf4vG4KtQt
|
||||
5tV2WG8LI+4P2Ey+G4UajP6U8bVNVQrUmD0oBBhcvfn5JY+1Fg6/pRpD82/U0VMz
|
||||
7AmpMWhDqNBMPiymkTk0kQKBgCuWb05cSI0ly4SOKwS5bRk5uVFhYnKNH255hh6C
|
||||
FGP4acB/WzbcqC7CjEPAJ0nl5d6SExQOHmk1AcsWjR3wlCWxxiK5PwNJwJrlhh1n
|
||||
reS1FKN0H36D4lFQpkeLWQOe4Sx7gKNeKzlr0w6Fx3Uwku0+Gju2tdTdAey8jB6l
|
||||
08opAoGAEe1AuR/OFp2xw6V8TH9UHkkpGxy+OrXI6PX6tgk29PgB+uiMu4RwbjVz
|
||||
1di1KKq2XecAilVbnyqY+edADxYGbSnci9x5wQRIebfMi3VXKtV8NQBv2as6qwtW
|
||||
JDcQUWotOHjpdvmfJWWkcBhbAKrgX8ukww00ZI/lC3/rmkGnBBg=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
@ -1,7 +0,0 @@
|
||||
ARG NODE_JS_VERSION=10
|
||||
FROM node:${NODE_JS_VERSION}-alpine
|
||||
|
||||
RUN apk --no-cache add git
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
@ -1,81 +0,0 @@
|
||||
---
|
||||
|
||||
##### GLOBAL METADATA
|
||||
|
||||
- meta:
|
||||
cluster: clients-ci
|
||||
|
||||
##### JOB DEFAULTS
|
||||
|
||||
- job:
|
||||
project-type: matrix
|
||||
logrotate:
|
||||
daysToKeep: 30
|
||||
numToKeep: 100
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/main
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
properties:
|
||||
- github:
|
||||
url: https://github.com/elastic/elasticsearch-js/
|
||||
- inject:
|
||||
properties-content: HOME=$JENKINS_HOME
|
||||
concurrent: true
|
||||
node: flyweight
|
||||
scm:
|
||||
- git:
|
||||
name: origin
|
||||
credentials-id: f6c7695a-671e-4f4f-a331-acdce44ff9ba
|
||||
reference-repo: /var/lib/jenkins/.git-references/elasticsearch-js.git
|
||||
branches:
|
||||
- ${branch_specifier}
|
||||
url: https://github.com/elastic/elasticsearch-js.git
|
||||
basedir: ''
|
||||
wipe-workspace: 'True'
|
||||
triggers:
|
||||
- github
|
||||
vault:
|
||||
# vault read auth/approle/role/clients-ci/role-id
|
||||
role_id: ddbd0d44-0e51-105b-177a-c8fdfd445126
|
||||
axes:
|
||||
- axis:
|
||||
type: slave
|
||||
name: label
|
||||
values:
|
||||
- linux
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: STACK_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: NODE_JS_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: TEST_SUITE
|
||||
yaml-strategy:
|
||||
exclude-key: exclude
|
||||
filename: .ci/test-matrix.yml
|
||||
wrappers:
|
||||
- ansicolor
|
||||
- timeout:
|
||||
type: absolute
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
- workspace-cleanup
|
||||
builders:
|
||||
- shell: |-
|
||||
#!/usr/local/bin/runbld
|
||||
.ci/run-tests
|
||||
publishers:
|
||||
- email:
|
||||
recipients: build-lang-clients@elastic.co
|
||||
- junit:
|
||||
results: "**/*-junit.xml"
|
||||
allow-empty-results: true
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+5.x
|
||||
display-name: 'elastic / elasticsearch-js # 5.x'
|
||||
description: Testing the elasticsearch-js 5.x branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/5.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@weekly'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+6.x
|
||||
display-name: 'elastic / elasticsearch-js # 6.x'
|
||||
description: Testing the elasticsearch-js 6.x branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/6.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+7.16
|
||||
display-name: 'elastic / elasticsearch-js # 7.16'
|
||||
description: Testing the elasticsearch-js 7.16 branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/7.16
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+7.17
|
||||
display-name: 'elastic / elasticsearch-js # 7.17'
|
||||
description: Testing the elasticsearch-js 7.17 branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/7.17
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+8.0
|
||||
display-name: 'elastic / elasticsearch-js # 8.0'
|
||||
description: Testing the elasticsearch-js 8.0 branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads8.0
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+main
|
||||
display-name: 'elastic / elasticsearch-js # main'
|
||||
description: Testing the elasticsearch-js main branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/main
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,19 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+pull-request
|
||||
display-name: 'elastic / elasticsearch-js # pull-request'
|
||||
description: Testing of elasticsearch-js pull requests.
|
||||
junit_results: "*-junit.xml"
|
||||
scm:
|
||||
- git:
|
||||
branches:
|
||||
- ${ghprbActualCommit}
|
||||
refspec: +refs/pull/*:refs/remotes/origin/pr/*
|
||||
triggers:
|
||||
- github-pull-request:
|
||||
org-list:
|
||||
- elastic
|
||||
allow-whitelist-orgs-as-admins: true
|
||||
github-hooks: true
|
||||
status-context: clients-ci
|
||||
cancel-builds-on-update: true
|
||||
127
.ci/make.mjs
Normal file
127
.ci/make.mjs
Normal file
@ -0,0 +1,127 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* global $ argv */
|
||||
|
||||
'use strict'
|
||||
|
||||
import 'zx/globals'
|
||||
|
||||
import { readFile, writeFile } from 'fs/promises'
|
||||
import assert from 'assert'
|
||||
import { join } from 'desm'
|
||||
import semver from 'semver'
|
||||
|
||||
assert(typeof argv.task === 'string', 'Missing task parameter')
|
||||
|
||||
switch (argv.task) {
|
||||
case 'release':
|
||||
release(argv._).catch(onError)
|
||||
break
|
||||
case 'bump':
|
||||
bump(argv._).catch(onError)
|
||||
break
|
||||
case 'codegen':
|
||||
codegen(argv._).catch(onError)
|
||||
break
|
||||
default:
|
||||
console.log(`Unknown task: ${argv.task}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
async function release (args) {
|
||||
assert(args.length === 2, 'Release task expects two parameters')
|
||||
let [version, outputFolder] = args
|
||||
|
||||
if (process.env.WORKFLOW === 'snapshot' && !version.endsWith('SNAPSHOT')) {
|
||||
version = `${version}-SNAPSHOT`
|
||||
}
|
||||
|
||||
await bump([version])
|
||||
|
||||
const packageJson = JSON.parse(await readFile(
|
||||
join(import.meta.url, '..', 'package.json'),
|
||||
'utf8'
|
||||
))
|
||||
|
||||
await $`npm run build`
|
||||
await $`npm pack`
|
||||
await $`zip elasticsearch-js-${version}.zip elastic-elasticsearch-${packageJson.version}.tgz`
|
||||
await $`rm elastic-elasticsearch-${packageJson.version}.tgz`
|
||||
await $`mv ${join(import.meta.url, '..', `elasticsearch-js-${version}.zip`)} ${join(import.meta.url, '..', outputFolder, `elasticsearch-js-${version}.zip`)}`
|
||||
}
|
||||
|
||||
async function bump (args) {
|
||||
assert(args.length === 1, 'Bump task expects one parameter')
|
||||
const [version] = args
|
||||
const packageJson = JSON.parse(await readFile(
|
||||
join(import.meta.url, '..', 'package.json'),
|
||||
'utf8'
|
||||
))
|
||||
|
||||
const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
|
||||
assert(semver.valid(cleanVersion))
|
||||
packageJson.version = cleanVersion
|
||||
packageJson.versionCanary = `${cleanVersion}-canary.0`
|
||||
|
||||
await writeFile(
|
||||
join(import.meta.url, '..', 'package.json'),
|
||||
JSON.stringify(packageJson, null, 2),
|
||||
'utf8'
|
||||
)
|
||||
|
||||
const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'))
|
||||
await writeFile(
|
||||
join(import.meta.url, '..', '.buildkite', 'pipeline.yml'),
|
||||
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: - ${cleanVersion}-SNAPSHOT`), // eslint-disable-line
|
||||
'utf8'
|
||||
)
|
||||
}
|
||||
|
||||
// this command can only be executed locally for now
|
||||
async function codegen (args) {
|
||||
assert(args.length === 1, 'Codegen task expects one parameter')
|
||||
const [version] = args
|
||||
|
||||
const clientGeneratorPath = join(import.meta.url, '..', '..', 'elastic-client-generator-js')
|
||||
const isGeneratorCloned = await $`[[ -d ${clientGeneratorPath} ]]`.exitCode === 0
|
||||
assert(isGeneratorCloned, 'You must clone the elastic-client-generator-js first')
|
||||
|
||||
await $`npm install --prefix ${clientGeneratorPath}`
|
||||
|
||||
// generate elasticsearch client. this command will take a while!
|
||||
if (version === 'main') {
|
||||
await $`npm run elasticsearch --prefix ${clientGeneratorPath} -- --version main`
|
||||
} else {
|
||||
await $`npm run elasticsearch --prefix ${clientGeneratorPath} -- --version ${version.split('.').slice(0, 2).join('.')}`
|
||||
}
|
||||
// clean up fixable linter issues
|
||||
await $`npm run fix --prefix ${clientGeneratorPath}`
|
||||
|
||||
await $`rm -rf ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mkdir ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
|
||||
await $`npm run build`
|
||||
}
|
||||
|
||||
function onError (err) {
|
||||
console.log(err)
|
||||
process.exit(1)
|
||||
}
|
||||
187
.ci/make.sh
Executable file
187
.ci/make.sh
Executable file
@ -0,0 +1,187 @@
|
||||
#!/usr/bin/env bash
|
||||
# ------------------------------------------------------- #
|
||||
#
|
||||
# Build entry script for elasticsearch-js
|
||||
#
|
||||
# Must be called: ./.ci/make.sh <target> <params>
|
||||
#
|
||||
# Version: 1.1.0
|
||||
#
|
||||
# Targets:
|
||||
# ---------------------------
|
||||
# assemble <VERSION> : build client artifacts with version
|
||||
# bump <VERSION> : bump client internals to version
|
||||
# bumpmatrix <VERSION> : bump stack version in test matrix to version
|
||||
# codegen : generate endpoints
|
||||
# docsgen <VERSION> : generate documentation
|
||||
# examplegen : generate the doc examples
|
||||
# clean : clean workspace
|
||||
#
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Bootstrap
|
||||
# ------------------------------------------------------- #
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
repo=$(realpath "$script_path/../")
|
||||
generator=$(realpath "$script_path/../../elastic-client-generator-js")
|
||||
|
||||
# shellcheck disable=SC1090
|
||||
CMD=$1
|
||||
TASK=$1
|
||||
TASK_ARGS=()
|
||||
VERSION=$2
|
||||
STACK_VERSION=$VERSION
|
||||
set -euo pipefail
|
||||
|
||||
product="elastic/elasticsearch-js"
|
||||
output_folder=".ci/output"
|
||||
codegen_folder=".ci/output"
|
||||
OUTPUT_DIR="$repo/${output_folder}"
|
||||
# REPO_BINDING="${OUTPUT_DIR}:/sln/${output_folder}"
|
||||
NODE_JS_VERSION=18
|
||||
WORKFLOW=${WORKFLOW-staging}
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
|
||||
|
||||
case $CMD in
|
||||
clean)
|
||||
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
|
||||
rm -rf "$output_folder"
|
||||
echo -e "\033[32;1mdone.\033[0m"
|
||||
exit 0
|
||||
;;
|
||||
assemble)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: assemble artefact $VERSION\033[0m"
|
||||
TASK=release
|
||||
TASK_ARGS=("$VERSION" "$output_folder")
|
||||
;;
|
||||
codegen)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: codegen -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
|
||||
TASK=codegen
|
||||
# VERSION is BRANCH here for now
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
docsgen)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
|
||||
TASK=codegen
|
||||
# VERSION is BRANCH here for now
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
examplesgen)
|
||||
echo -e "\033[36;1mTARGET: generate examples\033[0m"
|
||||
TASK=codegen
|
||||
# VERSION is BRANCH here for now
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
bump)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
|
||||
TASK=bump
|
||||
# VERSION is BRANCH here for now
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
bumpmatrix)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
|
||||
TASK=bumpmatrix
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
*)
|
||||
echo -e "\n'$CMD' is not supported right now\n"
|
||||
echo -e "\nUsage:"
|
||||
echo -e "\t $0 release \$VERSION\n"
|
||||
echo -e "\t $0 bump \$VERSION"
|
||||
echo -e "\t $0 codegen \$VERSION"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Build Container
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
echo -e "\033[34;1mINFO: building $product container\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag "$product" \
|
||||
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
|
||||
.
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Run the Container
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
echo -e "\033[34;1mINFO: running $product container\033[0m"
|
||||
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/app" \
|
||||
--volume "$generator:/usr/src/elastic-client-generator-js" \
|
||||
--volume /usr/src/app/node_modules \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
node .ci/make.mjs --task $TASK "${TASK_ARGS[@]}"
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Post Command tasks & checks
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
if [[ "$CMD" == "assemble" ]]; then
|
||||
if compgen -G ".ci/output/*" > /dev/null; then
|
||||
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "bump" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "codegen" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "docsgen" ]]; then
|
||||
echo "TODO"
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "examplesgen" ]]; then
|
||||
echo "TODO"
|
||||
fi
|
||||
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /usr/local/bin/bash_standard_lib.sh
|
||||
|
||||
DOCKER_IMAGES="node:16-alpine
|
||||
node:14-alpine
|
||||
node:12-alpine
|
||||
"
|
||||
|
||||
for di in ${DOCKER_IMAGES}
|
||||
do
|
||||
(retry 2 docker pull "${di}") || echo "Error pulling ${di} Docker image, we continue"
|
||||
done
|
||||
|
||||
@ -1,43 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# parameters are available to this script
|
||||
|
||||
# STACK_VERSION -- version e.g Major.Minor.Patch(-Prelease)
|
||||
# TEST_SUITE -- which test suite to run: free or platinum
|
||||
# ELASTICSEARCH_URL -- The url at which elasticsearch is reachable, a default is composed based on STACK_VERSION and TEST_SUITE
|
||||
# NODE_JS_VERSION -- node js version (defined in test-matrix.yml, a default is hardcoded here)
|
||||
script_path=$(dirname $(realpath -s $0))
|
||||
source $script_path/functions/imports.sh
|
||||
set -euo pipefail
|
||||
|
||||
NODE_JS_VERSION=${NODE_JS_VERSION-12}
|
||||
ELASTICSEARCH_URL=${ELASTICSEARCH_URL-"$elasticsearch_url"}
|
||||
elasticsearch_container=${elasticsearch_container-}
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m TEST_SUITE ${TEST_SUITE}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m URL ${ELASTICSEARCH_URL}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m CONTAINER ${elasticsearch_container}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m NODE_JS_VERSION ${NODE_JS_VERSION}\033[0m"
|
||||
|
||||
echo -e "\033[1m>>>>> Build docker container >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
|
||||
.
|
||||
|
||||
echo -e "\033[1m>>>>> NPM run test:integration >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
repo=$(realpath $(dirname $(realpath -s $0))/../)
|
||||
|
||||
docker run \
|
||||
--network=${network_name} \
|
||||
--env "TEST_ES_SERVER=${ELASTICSEARCH_URL}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run test:integration
|
||||
@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Version 1.1
|
||||
# - Moved to .ci folder and seperated out `run-repository.sh`
|
||||
# - Add `$RUNSCRIPTS` env var for running Elasticsearch dependent products
|
||||
script_path=$(dirname $(realpath -s $0))
|
||||
source $script_path/functions/imports.sh
|
||||
set -euo pipefail
|
||||
|
||||
echo -e "\033[1m>>>>> Start [$STACK_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
DETACH=true bash .ci/run-elasticsearch.sh
|
||||
|
||||
if [[ -n "$RUNSCRIPTS" ]]; then
|
||||
for RUNSCRIPT in ${RUNSCRIPTS//,/ } ; do
|
||||
echo -e "\033[1m>>>>> Running run-$RUNSCRIPT.sh >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
CONTAINER_NAME=${RUNSCRIPT} \
|
||||
DETACH=true \
|
||||
bash .ci/run-${RUNSCRIPT}.sh
|
||||
done
|
||||
fi
|
||||
|
||||
echo -e "\033[1m>>>>> Repository specific tests >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
bash .ci/run-repository.sh
|
||||
@ -1,14 +0,0 @@
|
||||
---
|
||||
STACK_VERSION:
|
||||
- 8.1.0-SNAPSHOT
|
||||
|
||||
NODE_JS_VERSION:
|
||||
- 16
|
||||
- 14
|
||||
- 12
|
||||
|
||||
TEST_SUITE:
|
||||
- free
|
||||
- platinum
|
||||
|
||||
exclude: ~
|
||||
1
.github/ISSUE_TEMPLATE/regression.md
vendored
1
.github/ISSUE_TEMPLATE/regression.md
vendored
@ -51,5 +51,6 @@ Paste the results here:
|
||||
|
||||
- *node version*: 6,8,10
|
||||
- `@elastic/elasticsearch` *version*: >=7.0.0
|
||||
- *typescript version*: 4.x (if applicable)
|
||||
- *os*: Mac, Windows, Linux
|
||||
- *any other relevant information*
|
||||
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@ -11,6 +11,6 @@ jobs:
|
||||
name: Backport
|
||||
steps:
|
||||
- name: Backport
|
||||
uses: tibdex/backport@v1
|
||||
uses: tibdex/backport@v2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
142
.github/workflows/nodejs.yml
vendored
142
.github/workflows/nodejs.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12.x, 14.x, 16.x]
|
||||
node-version: [14.x, 16.x, 18.x, 20.x]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
@ -32,151 +32,13 @@ jobs:
|
||||
run: |
|
||||
npm run test:unit
|
||||
|
||||
# - name: Acceptance test
|
||||
# run: |
|
||||
# npm run test:acceptance
|
||||
|
||||
# helpers-integration-test:
|
||||
# name: Helpers integration test
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# strategy:
|
||||
# matrix:
|
||||
# node-version: [12.x, 14.x, 16.x]
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Configure sysctl limits
|
||||
# run: |
|
||||
# sudo swapoff -a
|
||||
# sudo sysctl -w vm.swappiness=1
|
||||
# sudo sysctl -w fs.file-max=262144
|
||||
# sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
# - name: Runs Elasticsearch
|
||||
# uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
# with:
|
||||
# stack-version: 8.0.0-SNAPSHOT
|
||||
|
||||
# - name: Use Node.js ${{ matrix.node-version }}
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: ${{ matrix.node-version }}
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
|
||||
# - name: Integration test
|
||||
# run: |
|
||||
# npm run test:integration:helpers
|
||||
|
||||
# bundler-support:
|
||||
# name: Bundler support
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Configure sysctl limits
|
||||
# run: |
|
||||
# sudo swapoff -a
|
||||
# sudo sysctl -w vm.swappiness=1
|
||||
# sudo sysctl -w fs.file-max=262144
|
||||
# sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
# - name: Runs Elasticsearch
|
||||
# uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
# with:
|
||||
# stack-version: 8.0.0-SNAPSHOT
|
||||
|
||||
# - name: Use Node.js 14.x
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 14.x
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
# npm install --prefix test/bundlers/parcel-test
|
||||
# npm install --prefix test/bundlers/rollup-test
|
||||
# npm install --prefix test/bundlers/webpack-test
|
||||
|
||||
# - name: Build
|
||||
# run: |
|
||||
# npm run build --prefix test/bundlers/parcel-test
|
||||
# npm run build --prefix test/bundlers/rollup-test
|
||||
# npm run build --prefix test/bundlers/webpack-test
|
||||
|
||||
# - name: Run bundle
|
||||
# run: |
|
||||
# npm start --prefix test/bundlers/parcel-test
|
||||
# npm start --prefix test/bundlers/rollup-test
|
||||
# npm start --prefix test/bundlers/webpack-test
|
||||
|
||||
# mock-support:
|
||||
# name: Mock support
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Use Node.js 14.x
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 14.x
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
# npm install --prefix test/mock
|
||||
|
||||
# - name: Run test
|
||||
# run: |
|
||||
# npm test --prefix test/mock
|
||||
|
||||
# code-coverage:
|
||||
# name: Code coverage
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# strategy:
|
||||
# matrix:
|
||||
# node-version: [14.x]
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Use Node.js ${{ matrix.node-version }}
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: ${{ matrix.node-version }}
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
|
||||
# - name: Code coverage report
|
||||
# run: |
|
||||
# npm run test:coverage-report
|
||||
|
||||
# - name: Upload coverage to Codecov
|
||||
# uses: codecov/codecov-action@v1
|
||||
# with:
|
||||
# file: ./coverage.lcov
|
||||
# fail_ci_if_error: true
|
||||
|
||||
# - name: Code coverage 100%
|
||||
# run: |
|
||||
# npm run test:coverage-100
|
||||
|
||||
license:
|
||||
name: License check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [14.x]
|
||||
node-version: [20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
@ -72,7 +72,3 @@ CODE_OF_CONDUCT.md
|
||||
CONTRIBUTING.md
|
||||
|
||||
src
|
||||
|
||||
# CANARY-PACKAGE
|
||||
lib/api/kibana.*
|
||||
# /CANARY-PACKAGE
|
||||
|
||||
11
Makefile
Normal file
11
Makefile
Normal file
@ -0,0 +1,11 @@
|
||||
.PHONY: integration-setup
|
||||
integration-setup: integration-cleanup
|
||||
DETACH=true .ci/run-elasticsearch.sh
|
||||
|
||||
.PHONY: integration-cleanup
|
||||
integration-cleanup:
|
||||
docker container rm --force --volumes instance || true
|
||||
|
||||
.PHONY: integration
|
||||
integration: integration-setup
|
||||
npm run test:integration
|
||||
99
README.md
99
README.md
@ -2,7 +2,7 @@
|
||||
|
||||
# Elasticsearch Node.js client
|
||||
|
||||
[](http://standardjs.com/) [](https://clients-ci.elastic.co/view/Javascript/job/elastic+elasticsearch-js+main/) [](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml) [](https://codecov.io/gh/elastic/elasticsearch-js) [](https://www.npmjs.com/package/@elastic/elasticsearch)
|
||||
[](http://standardjs.com/) [](https://clients-ci.elastic.co/view/JavaScript/job/elastic+elasticsearch-js+main/) [](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml) [](https://codecov.io/gh/elastic/elasticsearch-js) [](https://www.npmjs.com/package/@elastic/elasticsearch)
|
||||
|
||||
The official Node.js client for Elasticsearch.
|
||||
|
||||
@ -22,7 +22,7 @@ npm install @elastic/elasticsearch
|
||||
|
||||
### Node.js support
|
||||
|
||||
NOTE: The minimum supported version of Node.js is `v12`.
|
||||
NOTE: The minimum supported version of Node.js is `v14`.
|
||||
|
||||
The client versioning follows the Elastic Stack versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
@ -44,7 +44,9 @@ of `^7.10.0`).
|
||||
| Node.js Version | Node.js EOL date | End of support |
|
||||
| --------------- |------------------| ---------------------- |
|
||||
| `8.x` | `December 2019` | `7.11` (early 2021) |
|
||||
| `10.x` | `April 2021` | `7.12` (mid 2021) |
|
||||
| `10.x` | `April 2021` | `7.12` (mid 2021) |
|
||||
| `12.x` | `April 2022` | `8.2` (early 2022) |
|
||||
| `14.x` | `April 2023` | `8.8` (early 2023) |
|
||||
|
||||
### Compatibility
|
||||
|
||||
@ -53,7 +55,7 @@ Elasticsearch language clients are only backwards compatible with default distri
|
||||
|
||||
| Elasticsearch Version | Client Version |
|
||||
| --------------------- |----------------|
|
||||
| `main` | `main` |
|
||||
| `8.x` | `8.x` |
|
||||
| `7.x` | `7.x` |
|
||||
| `6.x` | `6.x` |
|
||||
| `5.x` | `5.x` |
|
||||
@ -74,11 +76,9 @@ We recommend that you write a lightweight proxy that uses this client instead, y
|
||||
- [Usage](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#client-usage)
|
||||
- [Client configuration](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-configuration.html)
|
||||
- [API reference](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/api-reference.html)
|
||||
- [Breaking changes coming from the old client](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/breaking-changes.html)
|
||||
- [Authentication](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html#authentication)
|
||||
- [Observability](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html)
|
||||
- [Creating a child client](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/child.html)
|
||||
- [Extend the client](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/extend.html)
|
||||
- [Client helpers](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html)
|
||||
- [Typescript support](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/typescript.html)
|
||||
- [Testing](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-testing.html)
|
||||
@ -86,60 +86,20 @@ We recommend that you write a lightweight proxy that uses this client instead, y
|
||||
|
||||
## Quick start
|
||||
|
||||
First of all, require the client and initialize it:
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
```
|
||||
|
||||
You can use both the callback-style API and the promise-style API, both behave the same way.
|
||||
```js
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
```
|
||||
The returned value of **every** API call is formed as follows:
|
||||
```ts
|
||||
{
|
||||
body: object | boolean
|
||||
statusCode: number
|
||||
headers: object
|
||||
warnings: [string]
|
||||
meta: object
|
||||
}
|
||||
```
|
||||
|
||||
Let's see a complete example!
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
// Let's start by indexing some data
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
@ -147,8 +107,7 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
body: {
|
||||
document: {
|
||||
character: 'Daenerys Targaryen',
|
||||
quote: 'I am the blood of the dragon.'
|
||||
}
|
||||
@ -156,8 +115,7 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
body: {
|
||||
document: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A mind needs books like a sword needs a whetstone.'
|
||||
}
|
||||
@ -168,17 +126,14 @@ async function run () {
|
||||
await client.indices.refresh({ index: 'game-of-thrones' })
|
||||
|
||||
// Let's search!
|
||||
const { body } = await client.search({
|
||||
const result= await client.search({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
body: {
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
}
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
console.log(result.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
@ -208,18 +163,24 @@ You will require the packages from your code by using the alias you have defined
|
||||
const { Client: Client6 } = require('es6')
|
||||
const { Client: Client7 } = require('es7')
|
||||
|
||||
const client6 = new Client6({ node: 'http://localhost:9200' })
|
||||
const client7 = new Client7({ node: 'http://localhost:9201' })
|
||||
const client6 = new Client6({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const client7 = new Client7({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
client6.info(console.log)
|
||||
client7.info(console.log)
|
||||
client6.info().then(console.log, console.log)
|
||||
client7.info().then(console.log, console.log)
|
||||
```
|
||||
|
||||
Finally, if you want to install the client for the next version of Elasticsearch *(the one that lives in Elasticsearch’s master branch)*, you can use the following command:
|
||||
Finally, if you want to install the client for the next version of Elasticsearch *(the one that lives in Elasticsearch’s main branch)*, you can use the following command:
|
||||
```sh
|
||||
npm install esmaster@github:elastic/elasticsearch-js
|
||||
npm install esmain@github:elastic/elasticsearch-js
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This software is licensed under the [Apache 2 license](./LICENSE).
|
||||
This software is licensed under the [Apache License 2.0](./LICENSE).
|
||||
|
||||
53
catalog-info.yaml
Normal file
53
catalog-info.yaml
Normal file
@ -0,0 +1,53 @@
|
||||
---
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/catalog-info.json
|
||||
apiVersion: backstage.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: elasticsearch-js
|
||||
spec:
|
||||
type: library
|
||||
owner: group:clients-team
|
||||
lifecycle: production
|
||||
|
||||
---
|
||||
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
|
||||
apiVersion: backstage.io/v1alpha1
|
||||
kind: Resource
|
||||
metadata:
|
||||
name: elasticsearch-js-integration-tests
|
||||
description: Elasticsearch JavaScript client integration tests
|
||||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:clients-team
|
||||
system: buildkite
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: Elasticsearch JavaScript client integration tests
|
||||
spec:
|
||||
repository: elastic/elasticsearch-js
|
||||
pipeline_file: .buildkite/pipeline.yml
|
||||
teams:
|
||||
clients-team:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
everyone:
|
||||
access_level: READ_ONLY
|
||||
cancel_intermediate_builds: true
|
||||
cancel_intermediate_builds_branch_filter: '!main'
|
||||
schedules:
|
||||
main_semi_daily:
|
||||
branch: 'main'
|
||||
cronline: '*/12 * * *'
|
||||
8_8_semi_daily:
|
||||
branch: '8.8'
|
||||
cronline: '*/12 * * *'
|
||||
8_7_daily:
|
||||
branch: '8.7'
|
||||
cronline: '@daily'
|
||||
8_6_daily:
|
||||
branch: '8.6'
|
||||
cronline: '@daily'
|
||||
7_17_daily:
|
||||
branch: '7.17'
|
||||
cronline: '@daily'
|
||||
@ -30,7 +30,9 @@ class MyConnectionPool extends ConnectionPool {
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
ConnectionPool: MyConnectionPool
|
||||
ConnectionPool: MyConnectionPool,
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
----
|
||||
|
||||
@ -45,16 +47,18 @@ is performed here, this means that if you want to swap the default HTTP client
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client, Connection } = require('@elastic/elasticsearch')
|
||||
const { Client, BaseConnection } = require('@elastic/elasticsearch')
|
||||
|
||||
class MyConnection extends Connection {
|
||||
class MyConnection extends BaseConnection {
|
||||
request (params, callback) {
|
||||
// your code
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
Connection: MyConnection
|
||||
Connection: MyConnection,
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
----
|
||||
|
||||
@ -81,7 +85,9 @@ class MySerializer extends Serializer {
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
Serializer: MySerializer
|
||||
Serializer: MySerializer,
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
----
|
||||
|
||||
@ -89,12 +95,12 @@ const client = new Client({
|
||||
==== Migrate to v8
|
||||
|
||||
The Node.js client can be configured to emit an HTTP header
|
||||
``Accept: application/vnd.elasticsearch+json; compatible-with=7``
|
||||
`Accept: application/vnd.elasticsearch+json; compatible-with=7`
|
||||
which signals to Elasticsearch that the client is requesting
|
||||
``7.x`` version of request and response bodies. This allows for
|
||||
`7.x` version of request and response bodies. This allows for
|
||||
upgrading from 7.x to 8.x version of Elasticsearch without upgrading
|
||||
everything at once. Elasticsearch should be upgraded first after
|
||||
the compatibility header is configured and clients should be upgraded
|
||||
second.
|
||||
To enable to setting, configure the environment variable
|
||||
``ELASTIC_CLIENT_APIVERSIONING`` to ``true``.
|
||||
`ELASTIC_CLIENT_APIVERSIONING` to `true`.
|
||||
|
||||
@ -10,7 +10,8 @@ offers.
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
maxRetries: 5,
|
||||
requestTimeout: 60000,
|
||||
sniffOnStart: true
|
||||
@ -32,7 +33,7 @@ Or it can be an object (or an array of objects) that represents the node:
|
||||
----
|
||||
node: {
|
||||
url: new URL('http://localhost:9200'),
|
||||
ssl: 'ssl options',
|
||||
tls: 'tls options',
|
||||
agent: 'http agent options',
|
||||
id: 'custom node id',
|
||||
headers: { 'custom': 'headers' }
|
||||
@ -48,8 +49,7 @@ node: {
|
||||
|`auth`
|
||||
a|Your authentication data. You can use both basic authentication and
|
||||
{ref}/security-api-create-api-key.html[ApiKey]. +
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
for more details. +
|
||||
See <<authentication,Authentication>> for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
Basic authentication:
|
||||
@ -118,8 +118,8 @@ _Default:_ `false`
|
||||
_Options:_ `'gzip'`, `false` +
|
||||
_Default:_ `false`
|
||||
|
||||
|`ssl`
|
||||
|`http.SecureContextOptions` - ssl https://nodejs.org/api/tls.html[configuraton]. +
|
||||
|`tls`
|
||||
|`http.SecureContextOptions` - tls https://nodejs.org/api/tls.html[configuraton]. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`proxy`
|
||||
@ -242,7 +242,7 @@ _Cloud configuration example:_
|
||||
----
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA=='
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
@ -267,24 +267,4 @@ _Default:_ `null`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
==== Performances considerations
|
||||
|
||||
By default, the client will protection you against prototype poisoning attacks.
|
||||
Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more.
|
||||
If needed you can disable prototype poisoning protection entirely or one of the two checks.
|
||||
Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more.
|
||||
|
||||
While it's good to be safe, you should know that security always comes with a cost.
|
||||
With big enough payloads, this security check could causea drop in the overall performances,
|
||||
which might be a problem for your application.
|
||||
If you know you can trust the data stored in Elasticsearch, you can safely disable this check.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
disablePrototypePoisoningProtection: true
|
||||
})
|
||||
----
|
||||
|===
|
||||
@ -1,334 +0,0 @@
|
||||
[[breaking-changes]]
|
||||
=== Breaking changes coming from the old client
|
||||
|
||||
If you were already using the previous version of this client – the one you used
|
||||
to install with `npm install elasticsearch` – you will encounter some breaking
|
||||
changes.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Don’t panic!
|
||||
|
||||
Every breaking change was carefully weighed, and each is justified. Furthermore,
|
||||
the new codebase has been rewritten with modern JavaScript and has been
|
||||
carefully designed to be easy to maintain.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Breaking changes
|
||||
|
||||
* Minimum supported version of Node.js is `v8`.
|
||||
|
||||
* Everything has been rewritten using ES6 classes to help users extend the
|
||||
defaults more easily.
|
||||
|
||||
* There is no longer an integrated logger. The client now is an event emitter
|
||||
that emits the following events: `request`, `response`, and `error`.
|
||||
|
||||
* The code is no longer shipped with all the versions of the API, but only that
|
||||
of the package’s major version. This means that if you are using {es} `v6`, you
|
||||
are required to install `@elastic/elasticsearch@6`, and so on.
|
||||
|
||||
* The internals are completely different, so if you used to tweak them a lot,
|
||||
you will need to refactor your code. The public API should be almost the same.
|
||||
|
||||
* There is no longer browser support, for that will be distributed via another
|
||||
module: `@elastic/elasticsearch-browser`. This module is intended for Node.js
|
||||
only.
|
||||
|
||||
* The returned value of an API call will no longer be the `body`, `statusCode`,
|
||||
and `headers` for callbacks, and only the `body` for promises. The new returned
|
||||
value will be a unique object containing the `body`, `statusCode`, `headers`,
|
||||
`warnings`, and `meta`, for both callback and promises.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const body = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, (err, body, statusCode, headers) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
|
||||
// after
|
||||
const { body, statusCode, headers, warnings } = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, (err, { body, statusCode, headers, warnings }) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
* Errors: there is no longer a custom error class for every HTTP status code
|
||||
(such as `BadRequest` or `NotFound`). There is instead a single `ResponseError`.
|
||||
Every error class has been renamed, and now each is suffixed with `Error` at the
|
||||
end.
|
||||
|
||||
* Removed errors: `RequestTypeError`, `Generic`, and all the status code
|
||||
specific errors (such as `BadRequest` or `NotFound`).
|
||||
|
||||
* Added errors: `ConfigurationError` (in case of bad configurations) and
|
||||
`ResponseError` that contains all the data you may need to handle the specific
|
||||
error, such as `statusCode`, `headers`, `body`, and `message`.
|
||||
|
||||
|
||||
* Renamed errors:
|
||||
|
||||
** `RequestTimeout` (408 statusCode) => `TimeoutError`
|
||||
** `ConnectionFault` => `ConnectionError`
|
||||
** `NoConnections` => `NoLivingConnectionsError`
|
||||
** `Serialization` => `SerializationError`
|
||||
** `Serialization` => `DeserializationError`
|
||||
|
||||
* You must specify the port number in the configuration. In the previous
|
||||
version, you can specify the host and port in a variety of ways. With the new
|
||||
client, there is only one way to do it, via the `node` parameter.
|
||||
|
||||
* Certificates are verified by default, if you want to disable certificates verification, you should set the `rejectUnauthorized` option to `false` inside the `ssl` configuration:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
ssl: { rejectUnauthorized: false }
|
||||
})
|
||||
----
|
||||
|
||||
* The `plugins` option has been removed. If you want to extend the client now,
|
||||
you should use the `client.extend` API.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const { Client } = require('elasticsearch')
|
||||
const client = new Client({ plugins: [...] })
|
||||
|
||||
// after
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ ... })
|
||||
client.extend(...)
|
||||
----
|
||||
|
||||
* There is a clear distinction between the API related parameters and the client
|
||||
related configurations. The parameters `ignore`, `headers`, `requestTimeout` and
|
||||
`maxRetries` are no longer part of the API object and you need to specify them
|
||||
in a second option object.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const body = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' },
|
||||
ignore: [404]
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' },
|
||||
ignore: [404]
|
||||
}, (err, body, statusCode, headers) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
|
||||
// after
|
||||
const { body, statusCode, headers, warnings } = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
ignore: [404]
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
ignore: [404]
|
||||
}, (err, { body, statusCode, headers, warnings }) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
* The `transport.request` method no longer accepts the `query` key. Use the
|
||||
`querystring` key instead (which can be a string or an object). You also
|
||||
need to send a bulk-like request instead of the `body` key, use the `bulkBody`
|
||||
key. In this method, the client specific parameters should be passed as a second
|
||||
object.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const body = await client.transport.request({
|
||||
method: 'GET',
|
||||
path: '/my-index/_search',
|
||||
body: { foo: 'bar' },
|
||||
query: { bar: 'baz' }
|
||||
ignore: [404]
|
||||
})
|
||||
|
||||
client.transport.request({
|
||||
method: 'GET',
|
||||
path: '/my-index/_search',
|
||||
body: { foo: 'bar' },
|
||||
query: { bar: 'baz' }
|
||||
ignore: [404]
|
||||
}, (err, body, statusCode, headers) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
|
||||
// after
|
||||
const { body, statusCode, headers, warnings } = await client.transport.request({
|
||||
method: 'GET',
|
||||
path: '/my-index/_search',
|
||||
body: { foo: 'bar' },
|
||||
querystring: { bar: 'baz' }
|
||||
}, {
|
||||
ignore: [404]
|
||||
})
|
||||
|
||||
client.transport.request({
|
||||
method: 'GET',
|
||||
path: '/my-index/_search',
|
||||
body: { foo: 'bar' },
|
||||
querystring: { bar: 'baz' }
|
||||
}, {
|
||||
ignore: [404]
|
||||
}, (err, { body, statusCode, headers, warnings }) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Talk is cheap. Show me the code.
|
||||
|
||||
You can find a code snippet with the old client below followed by the same code
|
||||
logic but with the new client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client, errors } = require('elasticsearch')
|
||||
const client = new Client({
|
||||
host: 'http://localhost:9200',
|
||||
plugins: [utility]
|
||||
})
|
||||
|
||||
async function run () {
|
||||
try {
|
||||
const body = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
}
|
||||
}
|
||||
ignore: [404]
|
||||
})
|
||||
console.log(body)
|
||||
} catch (err) {
|
||||
if (err instanceof errors.BadRequest) {
|
||||
console.log('Bad request')
|
||||
} else {
|
||||
console.log(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function utility (Client, config, components) {
|
||||
const ca = components.clientAction.factory
|
||||
Client.prototype.utility = components.clientAction.namespaceFactory()
|
||||
const utility = Client.prototype.utility.prototype
|
||||
|
||||
utility.index = ca({
|
||||
params: {
|
||||
refresh: {
|
||||
type: 'enum',
|
||||
options: [
|
||||
'true',
|
||||
'false',
|
||||
'wait_for',
|
||||
''
|
||||
]
|
||||
},
|
||||
},
|
||||
urls: [
|
||||
{
|
||||
fmt: '/<%=index%>/_doc',
|
||||
req: {
|
||||
index: {
|
||||
type: 'string',
|
||||
required: true
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
needBody: true,
|
||||
method: 'POST'
|
||||
})
|
||||
})
|
||||
----
|
||||
|
||||
And now with the new client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client, errors } = require('@elastic/elasticsearch')
|
||||
// NOTE: `host` has been renamed to `node`,
|
||||
// and `plugins` is no longer supported
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
try {
|
||||
// NOTE: we are using the destructuring assignment
|
||||
const { body } = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
}
|
||||
}
|
||||
// NOTE: `ignore` now is in a separated object
|
||||
}, {
|
||||
ignore: [404]
|
||||
})
|
||||
console.log(body)
|
||||
} catch (err) {
|
||||
// NOTE: we are checking the `statusCode` property
|
||||
if (err.statusCode === 400) {
|
||||
console.log('Bad request')
|
||||
} else {
|
||||
console.log(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: we can still extend the client, but with a different API.
|
||||
// This new API is a little bit more verbose, since you must write
|
||||
// your own validations, but it's way more flexible.
|
||||
client.extend('utility.index', ({ makeRequest, ConfigurationError }) => {
|
||||
return function utilityIndex (params, options) {
|
||||
const { body, index, ...querystring } = params
|
||||
if (body == null) throw new ConfigurationError('Missing body')
|
||||
if (index == null) throw new ConfigurationError('Missing index')
|
||||
const requestParams = {
|
||||
method: 'POST',
|
||||
path: `/${index}/_doc`,
|
||||
body: body,
|
||||
querystring
|
||||
}
|
||||
return makeRequest(requestParams, options)
|
||||
}
|
||||
})
|
||||
----
|
||||
494
docs/changelog.asciidoc
Normal file
494
docs/changelog.asciidoc
Normal file
@ -0,0 +1,494 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.7.3
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.7.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.6.1
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.6.0
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to 8.3.1+ https://github.com/elastic/elasticsearch-js/pull/1802[#1802]
|
||||
|
||||
The `@elastic/transport` dependency has been bumped to `~8.3.1` to ensure
|
||||
fixes to the `maxResponseSize` option are available in the client.
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.6.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.6/release-notes-8.6.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.5.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.5.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.5/release-notes-8.5.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.4.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.4.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.4/release-notes-8.4.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.2.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.2.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.1.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Fix ndjson APIs https://github.com/elastic/elasticsearch-js/pull/1688[#1688]
|
||||
|
||||
The previous release contained a bug that broken ndjson APIs.
|
||||
We have released `v8.2.0-patch.1` to address this.
|
||||
This fix is the same as the one we have released and we strongly recommend upgrading to this version.
|
||||
|
||||
[discrete]
|
||||
===== Fix node shutdown apis https://github.com/elastic/elasticsearch-js/pull/1697[#1697]
|
||||
|
||||
The shutdown APIs wheren't complete, this fix completes them.
|
||||
|
||||
[discrete]
|
||||
==== Types: move query keys to body https://github.com/elastic/elasticsearch-js/pull/1693[#1693]
|
||||
|
||||
The types definitions where wrongly representing the types of fields present in both query and body.
|
||||
|
||||
[discrete]
|
||||
=== 8.2.0
|
||||
|
||||
[discrete]
|
||||
==== Breaking changes
|
||||
|
||||
[discrete]
|
||||
===== Drop Node.js v12 https://github.com/elastic/elasticsearch-js/pull/1670[#1670]
|
||||
|
||||
According to our https://github.com/elastic/elasticsearch-js#nodejs-support[Node.js support matrix].
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.2`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== More lenient parameter checks https://github.com/elastic/elasticsearch-js/pull/1662[#1662]
|
||||
|
||||
When creating a new client, an `undefined` `caFingerprint` no longer trigger an error for a http connection.
|
||||
|
||||
[discrete]
|
||||
===== Update TypeScript docs and export estypes https://github.com/elastic/elasticsearch-js/pull/1675[#1675]
|
||||
|
||||
You can import the full TypeScript requests & responses definitions as it follows:
|
||||
[source,ts]
|
||||
----
|
||||
import { estypes } from '@elastic/elasticsearch'
|
||||
----
|
||||
|
||||
If you need the legacy definitions with the body, you can do the following:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import { estypesWithBody } from '@elastic/elasticsearch'
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Updated hpagent to the latest version https://github.com/elastic/elastic-transport-js/pull/49[transport/#49]
|
||||
|
||||
You can fing the related changes https://github.com/delvedor/hpagent/releases/tag/v1.0.0[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.1.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.1/release-notes-8.1.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Export SniffingTransport https://github.com/elastic/elasticsearch-js/pull/1653[#1653]
|
||||
|
||||
Now the client exports the SniffingTransport class.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix onFlushTimeout timer not being cleared when upstream errors https://github.com/elastic/elasticsearch-js/pull/1616[#1616]
|
||||
|
||||
Fixes a memory leak caused by an error in the upstream dataset of the bulk helper.
|
||||
|
||||
[discrete]
|
||||
===== Cleanup abort listener https://github.com/elastic/elastic-transport-js/pull/42[transport/#42]
|
||||
|
||||
The legacy http client was not cleaning up the abort listener, which could cause a memory leak.
|
||||
|
||||
[discrete]
|
||||
===== Improve undici performances https://github.com/elastic/elastic-transport-js/pull/41[transport/#41]
|
||||
|
||||
Improve the stream body collection and keep alive timeout.
|
||||
|
||||
[discrete]
|
||||
=== 8.0.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.0/release-notes-8.0.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Drop old typescript definitions
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Medium*
|
||||
|
||||
The current TypeScript definitions will be removed from the client, and the new definitions, which contain request and response definitions as well will be shipped by default.
|
||||
|
||||
[discrete]
|
||||
===== Drop callback-style API
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Large*
|
||||
|
||||
Maintaining both API styles is not a problem per se, but it makes error handling more convoluted due to async stack traces.
|
||||
Moving to a full-promise API will solve this issue.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// callback-style api
|
||||
client.search({ params }, { options }, (err, result) => {
|
||||
console.log(err || result)
|
||||
})
|
||||
|
||||
// promise-style api
|
||||
client.search({ params }, { options })
|
||||
.then(console.log)
|
||||
.catch(console.log)
|
||||
|
||||
// async-style (sugar syntax on top of promises)
|
||||
const response = await client.search({ params }, { options })
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
If you are already using the promise-style API, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Remove the current abort API and use the new AbortController standard
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The old abort API makes sense for callbacks but it's annoying to use with promises
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// callback-style api
|
||||
const request = client.search({ params }, { options }, (err, result) => {
|
||||
console.log(err) // RequestAbortedError
|
||||
})
|
||||
|
||||
request.abort()
|
||||
|
||||
// promise-style api
|
||||
const promise = client.search({ params }, { options })
|
||||
|
||||
promise
|
||||
.then(console.log)
|
||||
.catch(console.log) // RequestAbortedError
|
||||
|
||||
promise.abort()
|
||||
----
|
||||
|
||||
Node v12 has added the standard https://nodejs.org/api/globals.html#globals_class_abortcontroller[`AbortController`] API which is designed to work well with both callbacks and promises.
|
||||
[source,js]
|
||||
----
|
||||
const ac = new AbortController()
|
||||
client.search({ params }, { signal: ac.signal })
|
||||
.then(console.log)
|
||||
.catch(console.log) // RequestAbortedError
|
||||
|
||||
ac.abort()
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove the body key from the request
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Thanks to the new types we are developing now we know exactly where a parameter should go.
|
||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
||||
|
||||
This could be a rather big breaking change, so a double solution could be used during the 8.x lifecycle. (accepting body keys without them being wrapped in the body as well as the current solution).
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// to
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Migrate to new separate transport
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
The separated transport has been rewritten in TypeScript and has already dropped the callback style API.
|
||||
Given that now is separated, most of the Elasticsearch specific concepts have been removed, and the client will likely need to extend parts of it for reintroducing them.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== The returned value of API calls is the body and not the HTTP related keys
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
||||
The client will expose a new request-specific option to still get the full response details.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
||||
|
||||
// to
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
})
|
||||
console.log(response) // SearchResponse
|
||||
|
||||
// with a bit of TypeScript and JavaScript magic...
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}, {
|
||||
meta: true
|
||||
})
|
||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Use a weighted connection pool
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
Move from the current cluster connection pool to a weight-based implementation.
|
||||
This new implementation offers better performances and runs less code in the background, the old connection pool can still be used.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Migrate to the "undici" http client
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
By default, the HTTP client will no longer be the default Node.js HTTP client, but https://github.com/nodejs/undici[undici] instead.
|
||||
Undici is a brand new HTTP client written from scratch, it offers vastly improved performances and has better support for promises.
|
||||
Furthermore, it offers comprehensive and predictable error handling. The old HTTP client can still be used.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Drop support for old camelCased keys
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Medium*
|
||||
|
||||
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
|
||||
This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense.
|
||||
If you are already using `snake_case` keys, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Rename `ssl` option to `tls`
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
People usually refers to this as `tls`, furthermore, internally we use the tls API and Node.js refers to it as tls everywhere.
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
ssl: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
|
||||
// after
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
tls: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove prototype poisoning protection
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Prototype poisoning protection is very useful, but it can cause performances issues with big payloads.
|
||||
In v8 it will be removed, and the documentation will show how to add it back with a custom serializer.
|
||||
|
||||
[discrete]
|
||||
===== Remove client extensions API
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Large*
|
||||
|
||||
Nowadays the client support the entire Elasticsearch API, and the `transport.request` method can be used if necessary. The client extensions API have no reason to exist.
|
||||
[source,js]
|
||||
----
|
||||
client.extend('utility.index', ({ makeRequest }) => {
|
||||
return function _index (params, options) {
|
||||
// your code
|
||||
}
|
||||
})
|
||||
|
||||
client.utility.index(...)
|
||||
----
|
||||
|
||||
If you weren't using client extensions, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Move to TypeScript
|
||||
|
||||
*Breaking: No* | *Migration effort: None*
|
||||
|
||||
The new separated transport is already written in TypeScript, and it makes sense that the client v8 will be fully written in TypeScript as well.
|
||||
|
||||
[discrete]
|
||||
===== Move from emitter-like interface to a diagnostic method
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Currently, the client offers a subset of methods of the `EventEmitter` class, v8 will ship with a `diagnostic` property which will be a proper event emitter.
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
client.on('request', console.log)
|
||||
|
||||
// to
|
||||
client.diagnostic.on('request', console.log)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove username & password properties from Cloud configuration
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The Cloud configuration does not support ApiKey and Bearer auth, while the `auth` options does.
|
||||
There is no need to keep the legacy basic auth support in the cloud configuration.
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>',
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
|
||||
// after
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
If you are already passing the basic auth options in the `auth` configuration, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Calling `client.close` will reject new requests
|
||||
|
||||
Once you call `client.close` every new request after that will be rejected with a `NoLivingConnectionsError`. In-flight requests will be executed normally unless an in-flight request requires a retry, in which case it will be rejected.
|
||||
|
||||
[discrete]
|
||||
===== Parameters rename
|
||||
|
||||
- `ilm.delete_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `ilm.get_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `ilm.put_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `snapshot.cleanup_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.create_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.delete_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.get_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.verify_repository`: `repository` parameter has been renamed to `name`
|
||||
|
||||
[discrete]
|
||||
===== Removal of snake_cased methods
|
||||
|
||||
The v7 client provided snake_cased methods, such as `client.delete_by_query`. This is no longer supported, now only camelCased method are present.
|
||||
So `client.delete_by_query` can be accessed with `client.deleteByQuery`
|
||||
|
||||
@ -22,12 +22,15 @@ will be closed.
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const child = client.child({
|
||||
headers: { 'x-foo': 'bar' },
|
||||
requestTimeout: 1000
|
||||
})
|
||||
|
||||
client.info(console.log)
|
||||
child.info(console.log)
|
||||
client.info().then(console.log, console.log)
|
||||
child.info().then(console.log, console.log)
|
||||
----
|
||||
@ -8,5 +8,4 @@ section, you can see the possible options that you can use to configure it.
|
||||
* <<basic-config>>
|
||||
* <<advanced-config>>
|
||||
* <<child>>
|
||||
* <<extend>>
|
||||
* <<client-testing>>
|
||||
|
||||
@ -6,14 +6,14 @@ This page contains the information you need to connect and use the Client with
|
||||
|
||||
**On this page**
|
||||
|
||||
* <<auth-reference, Authentication options>>
|
||||
* <<authentication, Authentication options>>
|
||||
* <<client-usage, Using the client>>
|
||||
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
|
||||
* <<client-connect-proxy, Connecting through a proxy>>
|
||||
* <<client-error-handling, Handling errors>>
|
||||
* <<product-check, Automatic product check>>
|
||||
|
||||
[[auth-reference]]
|
||||
[[authentication]]
|
||||
[discrete]
|
||||
=== Authentication
|
||||
|
||||
@ -32,7 +32,7 @@ the `auth` option.
|
||||
|
||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable
|
||||
both request and response compression by default, since it yields significant
|
||||
throughput improvements. Moreover, the client will also set the ssl option
|
||||
throughput improvements. Moreover, the client will also set the tls option
|
||||
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
|
||||
override this option by configuring them.
|
||||
|
||||
@ -46,7 +46,7 @@ to know more.
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==',
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
@ -55,6 +55,152 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[connect-self-managed-new]]
|
||||
=== Connecting to a self-managed cluster
|
||||
|
||||
By default {es} will start with security features like authentication and TLS
|
||||
enabled. To connect to the {es} cluster you'll need to configure the Node.js {es}
|
||||
client to use HTTPS with the generated CA certificate in order to make requests
|
||||
successfully.
|
||||
|
||||
If you're just getting started with {es} we recommend reading the documentation
|
||||
on https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html[configuring]
|
||||
and
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html[starting {es}]
|
||||
to ensure your cluster is running as expected.
|
||||
|
||||
When you start {es} for the first time you'll see a distinct block like the one
|
||||
below in the output from {es} (you may have to scroll up if it's been a while):
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
|
||||
-> Elasticsearch security features have been automatically configured!
|
||||
-> Authentication is enabled and cluster connections are encrypted.
|
||||
|
||||
-> Password for the elastic user (reset with `bin/elasticsearch-reset-password -u elastic`):
|
||||
lhQpLELkjkrawaBoaz0Q
|
||||
|
||||
-> HTTP CA certificate SHA-256 fingerprint:
|
||||
a52dd93511e8c6045e21f16654b77c9ee0f34aea26d9f40320b531c474676228
|
||||
...
|
||||
|
||||
----
|
||||
|
||||
Depending on the circumstances there are two options for verifying the HTTPS
|
||||
connection, either verifying with the CA certificate itself or via the HTTP CA
|
||||
certificate fingerprint.
|
||||
|
||||
[discrete]
|
||||
[[auth-tls]]
|
||||
==== TLS configuration
|
||||
|
||||
The generated root CA certificate can be found in the `certs` directory in your
|
||||
{es} config location (`$ES_CONF_PATH/certs/http_ca.crt`). If you're running {es}
|
||||
in Docker there is
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html[additional documentation for retrieving the CA certificate].
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, and
|
||||
the certificates used to sign these requests will be verified. To turn off
|
||||
certificate verification, you must specify an `tls` object in the top level
|
||||
config and set `rejectUnauthorized: false`. The default `tls` values are the
|
||||
same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
||||
uses.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
tls: {
|
||||
ca: fs.readFileSync('./http_ca.crt'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-ca-fingerprint]]
|
||||
==== CA fingerprint
|
||||
|
||||
You can configure the client to only trust certificates that are signed by a specific CA certificate
|
||||
(CA certificate pinning) by providing a `caFingerprint` option.
|
||||
This will verify that the fingerprint of the CA certificate that has signed
|
||||
the certificate of the server matches the supplied value.
|
||||
You must configure a SHA256 digest.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://example.com'
|
||||
auth: { ... },
|
||||
// the fingerprint (SHA256) of the CA certificate that is used to sign
|
||||
// the certificate that the Elasticsearch node presents for TLS.
|
||||
caFingerprint: '20:0D:CA:FA:76:...',
|
||||
tls: {
|
||||
// might be required if it's a self-signed certificate
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
The certificate fingerprint can be calculated using `openssl x509` with the
|
||||
certificate file:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
openssl x509 -fingerprint -sha256 -noout -in /path/to/http_ca.crt
|
||||
----
|
||||
|
||||
If you don't have access to the generated CA file from {es} you can use the
|
||||
following script to output the root CA fingerprint of the {es} instance with
|
||||
`openssl s_client`:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
# Replace the values of 'localhost' and '9200' to the
|
||||
# corresponding host and port values for the cluster.
|
||||
openssl s_client -connect localhost:9200 -servername localhost -showcerts </dev/null 2>/dev/null \
|
||||
| openssl x509 -fingerprint -sha256 -noout -in /dev/stdin
|
||||
----
|
||||
|
||||
The output of `openssl x509` will look something like this:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
SHA256 Fingerprint=A5:2D:D9:35:11:E8:C6:04:5E:21:F1:66:54:B7:7C:9E:E0:F3:4A:EA:26:D9:F4:03:20:B5:31:C4:74:67:62:28
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[connect-no-security]]
|
||||
=== Connecting without security enabled
|
||||
|
||||
WARNING: Running {es} without security enabled is not recommended.
|
||||
|
||||
If your cluster is configured with
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html[security explicitly disabled]
|
||||
then you can connect via HTTP:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://example.com'
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-strategies]]
|
||||
=== Authentication strategies
|
||||
|
||||
Following you can find all the supported authentication strategies.
|
||||
|
||||
[discrete]
|
||||
[[auth-apikey]]
|
||||
@ -150,57 +296,6 @@ const client = new Client({
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-ssl]]
|
||||
==== SSL configuration
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, and
|
||||
the certificates used to sign these requests will be verified. To turn off
|
||||
certificate verification, you must specify an `ssl` object in the top level
|
||||
config and set `rejectUnauthorized: false`. The default `ssl` values are the
|
||||
same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
||||
uses.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
ssl: {
|
||||
ca: fs.readFileSync('./cacert.pem'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-ca-fingerprint]]
|
||||
==== CA fingerprint
|
||||
|
||||
You can configure the client to only trust certificates that are signed by a specific CA certificate ( CA certificate pinning ) by providing a `caFingerprint` option. This will verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied value.
|
||||
a `caFingerprint` option, which will verify the supplied certificate authority fingerprint.
|
||||
You must configure a SHA256 digest.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://example.com'
|
||||
auth: { ... },
|
||||
// the fingerprint (SHA256) of the CA certificate that is used to sign the certificate that the Elasticsearch node presents for TLS.
|
||||
caFingerprint: '20:0D:CA:FA:76:...',
|
||||
ssl: {
|
||||
// might be required if it's a self-signed certificate
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-usage]]
|
||||
=== Usage
|
||||
@ -212,33 +307,40 @@ and every method exposes the same signature.
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
The returned value of every API call is designed as follows:
|
||||
The returned value of every API call is the response body from {es}.
|
||||
If you need to access additonal metadata, such as the status code or headers,
|
||||
you must specify `meta: true` in the request options:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, { meta: true })
|
||||
----
|
||||
|
||||
In this case, the result will be:
|
||||
[source,ts]
|
||||
----
|
||||
{
|
||||
@ -252,44 +354,10 @@ The returned value of every API call is designed as follows:
|
||||
|
||||
NOTE: The body is a boolean value when you use `HEAD` APIs.
|
||||
|
||||
The above value is returned even if there is an error during the execution of
|
||||
the request, this means that you can safely use the
|
||||
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Destructuring_assignment[destructuring assignment].
|
||||
|
||||
The `meta` key contains all the information about the request, such as attempt,
|
||||
options, and the connection that has been used.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// promise API
|
||||
const { body } = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, (err, { body }) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Aborting a request
|
||||
|
||||
If needed, you can abort a running request by calling the `request.abort()`
|
||||
method returned by the API.
|
||||
If needed, you can abort a running request by using the `AbortController` standard.
|
||||
|
||||
CAUTION: If you abort a request, the request will fail with a
|
||||
`RequestAbortedError`.
|
||||
@ -297,51 +365,24 @@ CAUTION: If you abort a request, the request will fail with a
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const request = client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
}, (err, result) => {
|
||||
if (err) {
|
||||
console.log(err) // RequestAbortedError
|
||||
} else {
|
||||
console.log(result)
|
||||
}
|
||||
const AbortController = require('node-abort-controller')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
request.abort()
|
||||
----
|
||||
const abortController = new AbortController()
|
||||
setImmediate(() => abortController.abort())
|
||||
|
||||
The same behavior is valid for the promise style API as well.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const request = client.search({
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
})
|
||||
|
||||
request
|
||||
.then(result => console.log(result))
|
||||
.catch(err => console.log(err)) // RequestAbortedError
|
||||
|
||||
request.abort()
|
||||
}, { signal: abortController.signal })
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Request specific options
|
||||
|
||||
@ -349,7 +390,6 @@ If needed you can pass request specific options in a second object:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
@ -361,21 +401,6 @@ const result = await client.search({
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
}, (err, { body }) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
@ -427,6 +452,10 @@ _Default:_ `null`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`signal`
|
||||
|`AbortSignal` - The AbortSignal instance to allow request abortion. +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
|
||||
@ -9,12 +9,15 @@ data.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
const { body: bulkResponse } = await client.bulk({
|
||||
const bulkResponse = await client.bulk({
|
||||
refresh: true,
|
||||
body: [
|
||||
operations: [
|
||||
// operation to perform
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
// the document to index
|
||||
@ -43,13 +46,11 @@ async function run () {
|
||||
}
|
||||
|
||||
// Let's search!
|
||||
const { body } = await client.search({
|
||||
const result = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
quote: 'winter'
|
||||
}
|
||||
query: {
|
||||
match: {
|
||||
quote: 'winter'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
@ -59,17 +60,17 @@ async function run () {
|
||||
// stream async iteration, available in Node.js ≥ 10
|
||||
let payload = ''
|
||||
body.setEncoding('utf8')
|
||||
for await (const chunk of body) {
|
||||
for await (const chunk of result) {
|
||||
payload += chunk
|
||||
}
|
||||
console.log(JSON.parse(payload))
|
||||
|
||||
// classic stream callback style
|
||||
let payload = ''
|
||||
body.setEncoding('utf8')
|
||||
body.on('data', chunk => { payload += chunk })
|
||||
body.on('error', console.log)
|
||||
body.on('end', () => {
|
||||
result.setEncoding('utf8')
|
||||
result.on('data', chunk => { payload += chunk })
|
||||
result.on('error', console.log)
|
||||
result.on('end', () => {
|
||||
console.log(JSON.parse(payload))
|
||||
})
|
||||
}
|
||||
@ -85,15 +86,19 @@ send it directly to another source.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const fastify = require('fastify')()
|
||||
|
||||
fastify.post('/search/:index', async (req, reply) => {
|
||||
const { body, statusCode, headers } = await client.search({
|
||||
index: req.params.index,
|
||||
body: req.body
|
||||
...req.body
|
||||
}, {
|
||||
asStream: true
|
||||
asStream: true,
|
||||
meta: true
|
||||
})
|
||||
|
||||
reply.code(statusCode).headers(headers)
|
||||
|
||||
@ -13,13 +13,14 @@ NOTE: Did you know that we provide an helper for sending bulk request? You can f
|
||||
require('array.prototype.flatmap').shim()
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.indices.create({
|
||||
index: 'tweets',
|
||||
body: {
|
||||
operations: {
|
||||
mappings: {
|
||||
properties: {
|
||||
id: { type: 'integer' },
|
||||
@ -58,9 +59,9 @@ async function run () {
|
||||
date: new Date()
|
||||
}]
|
||||
|
||||
const body = dataset.flatMap(doc => [{ index: { _index: 'tweets' } }, doc])
|
||||
const operations = dataset.flatMap(doc => [{ index: { _index: 'tweets' } }, doc])
|
||||
|
||||
const { body: bulkResponse } = await client.bulk({ refresh: true, body })
|
||||
const bulkResponse = await client.bulk({ refresh: true, operations })
|
||||
|
||||
if (bulkResponse.errors) {
|
||||
const erroredDocuments = []
|
||||
@ -76,15 +77,15 @@ async function run () {
|
||||
// fix the document before to try it again.
|
||||
status: action[operation].status,
|
||||
error: action[operation].error,
|
||||
operation: body[i * 2],
|
||||
document: body[i * 2 + 1]
|
||||
operation: operations[i * 2],
|
||||
document: operations[i * 2 + 1]
|
||||
})
|
||||
}
|
||||
})
|
||||
console.log(erroredDocuments)
|
||||
}
|
||||
|
||||
const { body: count } = await client.count({ index: 'tweets' })
|
||||
const count = await client.count({ index: 'tweets' })
|
||||
console.log(count)
|
||||
}
|
||||
|
||||
|
||||
@ -10,24 +10,27 @@ NOTE: Since this API uses the `HEAD` method, the body value will be boolean.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.exists({
|
||||
const exists = await client.exists({
|
||||
index: 'game-of-thrones',
|
||||
id: 1
|
||||
})
|
||||
|
||||
console.log(body) // true
|
||||
console.log(exists) // true
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -10,24 +10,27 @@ The following example gets a JSON document from an index called
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
const document = await client.get({
|
||||
index: 'game-of-thrones',
|
||||
id: '1'
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
console.log(document)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -8,12 +8,15 @@ HTTP status codes which should not be considered errors for this request.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
const { body: bulkResponse } = await client.bulk({
|
||||
const bulkResponse = await client.bulk({
|
||||
refresh: true,
|
||||
body: [
|
||||
operations: [
|
||||
// operation to perform
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
// the document to index
|
||||
@ -42,7 +45,7 @@ async function run () {
|
||||
}
|
||||
|
||||
// Let's search!
|
||||
const { body } = await client.search({
|
||||
const result = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: {
|
||||
@ -55,7 +58,7 @@ async function run () {
|
||||
ignore: [404]
|
||||
})
|
||||
|
||||
console.log(body) // ResponseError
|
||||
console.log(result) // ResponseError
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -17,7 +17,6 @@ Following you can find some examples on how to use the client.
|
||||
* Executing a <<search_examples,search>> request;
|
||||
* I need <<suggest_examples,suggestions>>;
|
||||
* How to use the <<transport_request_examples,transport.request>> method;
|
||||
* How to use <<typescript_examples,TypeScript>>;
|
||||
|
||||
include::asStream.asciidoc[]
|
||||
include::bulk.asciidoc[]
|
||||
@ -29,7 +28,6 @@ include::scroll.asciidoc[]
|
||||
include::search.asciidoc[]
|
||||
include::suggest.asciidoc[]
|
||||
include::transport.request.asciidoc[]
|
||||
include::typescript.asciidoc[]
|
||||
include::sql.query.asciidoc[]
|
||||
include::update.asciidoc[]
|
||||
include::update_by_query.asciidoc[]
|
||||
|
||||
@ -9,12 +9,15 @@ API.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
const { body: bulkResponse } = await client.bulk({
|
||||
const bulkResponse = await client.bulk({
|
||||
refresh: true,
|
||||
body: [
|
||||
operations: [
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
{
|
||||
character: 'Ned Stark',
|
||||
@ -40,8 +43,8 @@ async function run () {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const { body } = await client.msearch({
|
||||
body: [
|
||||
const result = await client.msearch({
|
||||
searches: [
|
||||
{ index: 'game-of-thrones' },
|
||||
{ query: { match: { character: 'Daenerys' } } },
|
||||
|
||||
@ -50,7 +53,7 @@ async function run () {
|
||||
]
|
||||
})
|
||||
|
||||
console.log(body.responses)
|
||||
console.log(result.responses)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -70,17 +70,15 @@ module.exports = async (req, res) => {
|
||||
// expose you to the risk that a malicious user
|
||||
// could overload your cluster by crafting
|
||||
// expensive queries.
|
||||
body: {
|
||||
_source: ['id', 'url', 'name'], // the fields you want to show in the autocompletion
|
||||
size: 0,
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-completion.html
|
||||
suggest: {
|
||||
suggestions: {
|
||||
prefix: req.query.q,
|
||||
completion: {
|
||||
field: 'suggest',
|
||||
size: 5
|
||||
}
|
||||
_source: ['id', 'url', 'name'], // the fields you want to show in the autocompletion
|
||||
size: 0,
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-completion.html
|
||||
suggest: {
|
||||
suggestions: {
|
||||
prefix: req.query.q,
|
||||
completion: {
|
||||
field: 'suggest',
|
||||
size: 5
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -93,7 +91,7 @@ module.exports = async (req, res) => {
|
||||
// It might be useful to configure http control caching headers
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
|
||||
// res.setHeader('stale-while-revalidate', '30')
|
||||
res.json(response.body)
|
||||
res.json(response)
|
||||
} catch (err) {
|
||||
res.status(err.statusCode || 500)
|
||||
res.json({
|
||||
|
||||
@ -62,7 +62,7 @@ module.exports = async (req, res) => {
|
||||
}
|
||||
})
|
||||
|
||||
res.json(response.body)
|
||||
res.json(response)
|
||||
} catch (err) {
|
||||
res.status(err.statusCode || 500)
|
||||
res.json({
|
||||
|
||||
@ -56,11 +56,12 @@ module.exports = async (req, res) => {
|
||||
const response = await client.index({
|
||||
index: INDEX,
|
||||
id: req.query.id,
|
||||
body: req.body
|
||||
document: req.body
|
||||
}, {
|
||||
headers: {
|
||||
Authorization: `ApiKey ${token}`
|
||||
}
|
||||
},
|
||||
meta: true
|
||||
})
|
||||
|
||||
res.status(response.statusCode)
|
||||
|
||||
@ -60,10 +60,8 @@ module.exports = async (req, res) => {
|
||||
// expose you to the risk that a malicious user
|
||||
// could overload your cluster by crafting
|
||||
// expensive queries.
|
||||
body: {
|
||||
query: {
|
||||
match: { field: req.body.text }
|
||||
}
|
||||
query: {
|
||||
match: { field: req.body.text }
|
||||
}
|
||||
}, {
|
||||
headers: {
|
||||
@ -74,7 +72,7 @@ module.exports = async (req, res) => {
|
||||
// It might be useful to configure http control caching headers
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
|
||||
// res.setHeader('stale-while-revalidate', '30')
|
||||
res.json(response.body)
|
||||
res.json(response)
|
||||
} catch (err) {
|
||||
res.status(err.statusCode || 500)
|
||||
res.json({
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
"author": "Tomas Della Vedova",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@elastic/elasticsearch": "^7.10.0"
|
||||
"@elastic/elasticsearch": "^8.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"standard": "^16.0.3"
|
||||
|
||||
@ -43,21 +43,19 @@ async function generateApiKeys (opts) {
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.security.createApiKey({
|
||||
body: {
|
||||
name: 'elasticsearch-proxy',
|
||||
role_descriptors: {
|
||||
'elasticsearch-proxy-users': {
|
||||
index: [{
|
||||
names: indexNames,
|
||||
privileges
|
||||
}]
|
||||
}
|
||||
const result = await client.security.createApiKey({
|
||||
name: 'elasticsearch-proxy',
|
||||
role_descriptors: {
|
||||
'elasticsearch-proxy-users': {
|
||||
index: [{
|
||||
names: indexNames,
|
||||
privileges
|
||||
}]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return Buffer.from(`${body.id}:${body.api_key}`).toString('base64')
|
||||
return Buffer.from(`${result.id}:${result.api_key}`).toString('base64')
|
||||
}
|
||||
|
||||
generateApiKeys()
|
||||
|
||||
@ -15,12 +15,15 @@ the house Stark and remove the `house` field from the document source.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
house: 'stark'
|
||||
@ -29,7 +32,7 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.',
|
||||
house: 'stark'
|
||||
@ -39,7 +42,7 @@ async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
document: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A Lannister always pays his debts.',
|
||||
house: 'lannister'
|
||||
@ -47,33 +50,29 @@ async function run () {
|
||||
})
|
||||
|
||||
await client.reindex({
|
||||
waitForCompletion: true,
|
||||
wait_for_completion: true,
|
||||
refresh: true,
|
||||
body: {
|
||||
source: {
|
||||
index: 'game-of-thrones',
|
||||
query: {
|
||||
match: { character: 'stark' }
|
||||
}
|
||||
},
|
||||
dest: {
|
||||
index: 'stark-index'
|
||||
},
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source.remove("house")'
|
||||
source: {
|
||||
index: 'game-of-thrones',
|
||||
query: {
|
||||
match: { character: 'stark' }
|
||||
}
|
||||
},
|
||||
dest: {
|
||||
index: 'stark-index'
|
||||
},
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source.remove("house")'
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.search({
|
||||
const result = await client.search({
|
||||
index: 'stark-index',
|
||||
body: {
|
||||
query: { match_all: {} }
|
||||
}
|
||||
query: { match_all: {} }
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
console.log(result.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -26,19 +26,22 @@ NOTE: Did you know that we provide an helper for sending scroll requests? You ca
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
const allQuotes = []
|
||||
const responseQueue = []
|
||||
|
||||
// Let's index some data!
|
||||
const { body: bulkResponse } = await client.bulk({
|
||||
const bulkResponse = await client.bulk({
|
||||
// here we are forcing an index refresh,
|
||||
// otherwise we will not get any result
|
||||
// in the consequent search
|
||||
refresh: true,
|
||||
body: [
|
||||
operations: [
|
||||
// operation to perform
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
// the document to index
|
||||
@ -76,17 +79,15 @@ async function run () {
|
||||
size: 1,
|
||||
// filter the source to only include the quote field
|
||||
_source: ['quote'],
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
})
|
||||
|
||||
responseQueue.push(response)
|
||||
|
||||
while (responseQueue.length) {
|
||||
const { body } = responseQueue.shift()
|
||||
const body = responseQueue.shift()
|
||||
|
||||
// collect the titles from this response
|
||||
body.hits.hits.forEach(function (hit) {
|
||||
@ -102,7 +103,7 @@ async function run () {
|
||||
// get the next response if there are more quotes to fetch
|
||||
responseQueue.push(
|
||||
await client.scroll({
|
||||
scrollId: body._scroll_id,
|
||||
scroll_id: body._scroll_id,
|
||||
scroll: '30s'
|
||||
})
|
||||
)
|
||||
@ -120,14 +121,17 @@ async iteration!
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
// Scroll utility
|
||||
async function * scrollSearch (params) {
|
||||
let response = await client.search(params)
|
||||
|
||||
while (true) {
|
||||
const sourceHits = response.body.hits.hits
|
||||
const sourceHits = response.hits.hits
|
||||
|
||||
if (sourceHits.length === 0) {
|
||||
break
|
||||
@ -137,12 +141,12 @@ async function * scrollSearch (params) {
|
||||
yield hit
|
||||
}
|
||||
|
||||
if (!response.body._scroll_id) {
|
||||
if (!response._scroll_id) {
|
||||
break
|
||||
}
|
||||
|
||||
response = await client.scroll({
|
||||
scrollId: response.body._scroll_id,
|
||||
scroll_id: response._scroll_id,
|
||||
scroll: params.scroll
|
||||
})
|
||||
}
|
||||
@ -151,7 +155,7 @@ async function * scrollSearch (params) {
|
||||
async function run () {
|
||||
await client.bulk({
|
||||
refresh: true,
|
||||
body: [
|
||||
operations: [
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
{
|
||||
character: 'Ned Stark',
|
||||
@ -177,10 +181,8 @@ async function run () {
|
||||
scroll: '30s',
|
||||
size: 1,
|
||||
_source: ['quote'],
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -12,13 +12,16 @@ https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-request-body.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
// Let's start by indexing some data
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
@ -26,7 +29,7 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Daenerys Targaryen',
|
||||
quote: 'I am the blood of the dragon.'
|
||||
}
|
||||
@ -38,25 +41,23 @@ async function run () {
|
||||
// otherwise we will not get any result
|
||||
// in the consequent search
|
||||
refresh: true,
|
||||
body: {
|
||||
document: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A mind needs books like a sword needs a whetstone.'
|
||||
}
|
||||
})
|
||||
|
||||
// Let's search!
|
||||
const { body } = await client.search({
|
||||
const result = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
quote: 'winter'
|
||||
}
|
||||
query: {
|
||||
match: {
|
||||
quote: 'winter'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
console.log(result.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -17,12 +17,15 @@ manipulate the result to obtain an object easy to navigate.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
house: 'stark'
|
||||
@ -31,7 +34,7 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.',
|
||||
house: 'stark'
|
||||
@ -41,25 +44,23 @@ async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
document: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A Lannister always pays his debts.',
|
||||
house: 'lannister'
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.sql.query({
|
||||
body: {
|
||||
query: "SELECT * FROM \"game-of-thrones\" WHERE house='stark'"
|
||||
}
|
||||
const result = await client.sql.query({
|
||||
query: "SELECT * FROM \"game-of-thrones\" WHERE house='stark'"
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
console.log(result)
|
||||
|
||||
const data = body.rows.map(row => {
|
||||
const data = result.rows.map(row => {
|
||||
const obj = {}
|
||||
for (let i = 0; i < row.length; i++) {
|
||||
obj[body.columns[i].name] = row[i]
|
||||
obj[result.columns[i].name] = row[i]
|
||||
}
|
||||
return obj
|
||||
})
|
||||
|
||||
@ -12,12 +12,15 @@ request. If the query part is left out, only suggestions are returned.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
const { body: bulkResponse } = await client.bulk({
|
||||
const bulkResponse = await client.bulk({
|
||||
refresh: true,
|
||||
body: [
|
||||
operations: [
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
{
|
||||
character: 'Ned Stark',
|
||||
@ -43,22 +46,20 @@ async function run () {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const { body } = await client.search({
|
||||
const result = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: {
|
||||
match: { quote: 'witner' }
|
||||
},
|
||||
suggest: {
|
||||
gotsuggest: {
|
||||
text: 'witner',
|
||||
term: { field: 'quote' }
|
||||
}
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
},
|
||||
suggest: {
|
||||
gotsuggest: {
|
||||
text: 'winter',
|
||||
term: { field: 'quote' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
console.log(result)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -20,12 +20,15 @@ maintain.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
const { body: bulkResponse } = await client.bulk({
|
||||
const bulkResponse = await client.bulk({
|
||||
refresh: true,
|
||||
body: [
|
||||
operations: [
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
{
|
||||
character: 'Ned Stark',
|
||||
@ -51,7 +54,7 @@ async function run () {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const { body } = await client.transport.request({
|
||||
const response = await client.transport.request({
|
||||
method: 'POST',
|
||||
path: '/game-of-thrones/_search',
|
||||
body: {
|
||||
@ -64,7 +67,7 @@ async function run () {
|
||||
querystring: {}
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
console.log(response)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -1,72 +0,0 @@
|
||||
[[typescript_examples]]
|
||||
=== Typescript
|
||||
|
||||
The client offers a first-class support for TypeScript, since it ships the type
|
||||
definitions for every exposed API.
|
||||
|
||||
NOTE: If you are using TypeScript you will be required to use _snake_case_ style
|
||||
to define the API parameters instead of _camelCase_.
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
import { Client, ApiResponse, RequestParams } from '@elastic/elasticsearch'
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run (): void {
|
||||
// Let's start by indexing some data
|
||||
const doc1: RequestParams.Index = {
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
}
|
||||
await client.index(doc1)
|
||||
|
||||
const doc2: RequestParams.Index = {
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Daenerys Targaryen',
|
||||
quote: 'I am the blood of the dragon.'
|
||||
}
|
||||
}
|
||||
await client.index(doc2)
|
||||
|
||||
const doc3: RequestParams.Index = {
|
||||
index: 'game-of-thrones',
|
||||
// here we are forcing an index refresh,
|
||||
// otherwise we will not get any result
|
||||
// in the consequent search
|
||||
refresh: true,
|
||||
body: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A mind needs books like a sword needs a whetstone.'
|
||||
}
|
||||
}
|
||||
await client.index(doc3)
|
||||
|
||||
// Let's search!
|
||||
const params: RequestParams.Search = {
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
quote: 'winter'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
client
|
||||
.search(params)
|
||||
.then((result: ApiResponse) => {
|
||||
console.log(result.body.hits.hits)
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
console.log(err)
|
||||
})
|
||||
}
|
||||
|
||||
run()
|
||||
----
|
||||
@ -10,13 +10,16 @@ a character has said the given quote, and then we will update the `times` field.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
times: 0
|
||||
@ -26,23 +29,21 @@ async function run () {
|
||||
await client.update({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source.times++'
|
||||
// you can also use parameters
|
||||
// source: 'ctx._source.times += params.count',
|
||||
// params: { count: 1 }
|
||||
}
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source.times++'
|
||||
// you can also use parameters
|
||||
// source: 'ctx._source.times += params.count',
|
||||
// params: { count: 1 }
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
const document = await client.get({
|
||||
index: 'game-of-thrones',
|
||||
id: '1'
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
console.log(document)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
@ -56,13 +57,16 @@ With the update API, you can also run a partial update of a document.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
isAlive: true
|
||||
@ -72,19 +76,17 @@ async function run () {
|
||||
await client.update({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
doc: {
|
||||
isAlive: false
|
||||
}
|
||||
doc: {
|
||||
isAlive: false
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
const document = await client.get({
|
||||
index: 'game-of-thrones',
|
||||
id: '1'
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
console.log(document)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -10,12 +10,15 @@ property or some other online mapping change.
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
@ -24,7 +27,7 @@ async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
document: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.'
|
||||
}
|
||||
@ -33,27 +36,23 @@ async function run () {
|
||||
await client.updateByQuery({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source["house"] = "stark"'
|
||||
},
|
||||
query: {
|
||||
match: {
|
||||
character: 'stark'
|
||||
}
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source["house"] = "stark"'
|
||||
},
|
||||
query: {
|
||||
match: {
|
||||
character: 'stark'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.search({
|
||||
const result = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: { match_all: {} }
|
||||
}
|
||||
query: { match_all: {} }
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
console.log(result.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
@ -1,72 +0,0 @@
|
||||
[[extend]]
|
||||
=== Extend the client
|
||||
|
||||
Sometimes you need to reuse the same logic, or you want to build a custom API to
|
||||
allow you simplify your code. The easiest way to achieve that is by extending
|
||||
the client.
|
||||
|
||||
NOTE: If you want to override existing methods, you should specify the
|
||||
`{ force: true }` option.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
client.extend('supersearch', ({ makeRequest, ConfigurationError }) => {
|
||||
return function supersearch (params, options) {
|
||||
const {
|
||||
body,
|
||||
index,
|
||||
method,
|
||||
...querystring
|
||||
} = params
|
||||
|
||||
// params validation
|
||||
if (body == null) {
|
||||
throw new ConfigurationError('Missing required parameter: body')
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method: method || 'POST',
|
||||
path: `/${encodeURIComponent(index)}/_search_`,
|
||||
body,
|
||||
querystring
|
||||
}
|
||||
|
||||
// build request options object
|
||||
const requestOptions = {
|
||||
ignore: options.ignore || null,
|
||||
requestTimeout: options.requestTimeout || null,
|
||||
maxRetries: options.maxRetries || null,
|
||||
asStream: options.asStream || false,
|
||||
headers: options.headers || null
|
||||
}
|
||||
|
||||
return makeRequest(request, requestOptions)
|
||||
}
|
||||
})
|
||||
|
||||
client.extend('utility.index', ({ makeRequest }) => {
|
||||
return function _index (params, options) {
|
||||
// your code
|
||||
}
|
||||
})
|
||||
|
||||
client.extend('utility.delete', ({ makeRequest }) => {
|
||||
return function _delete (params, options) {
|
||||
// your code
|
||||
}
|
||||
})
|
||||
|
||||
client.extend('indices.delete', { force: true }, ({ makeRequest }) => {
|
||||
return function _delete (params, options) {
|
||||
// your code
|
||||
}
|
||||
})
|
||||
|
||||
client.supersearch(...)
|
||||
client.utility.index(...)
|
||||
client.utility.delete(...)
|
||||
----
|
||||
@ -27,7 +27,10 @@ const { createReadStream } = require('fs')
|
||||
const split = require('split2')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split()),
|
||||
onDocument (doc) {
|
||||
@ -248,7 +251,10 @@ const { createReadStream } = require('fs')
|
||||
const split = require('split2')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const b = client.helpers.bulk({
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split()),
|
||||
onDocument (doc) {
|
||||
@ -304,7 +310,10 @@ async function * generator () {
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: generator(),
|
||||
onDocument (doc) {
|
||||
@ -338,26 +347,18 @@ sources.
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
// promise style API
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body)) // or result.documents
|
||||
.catch(err => console.error(err))
|
||||
|
||||
// callback style API
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'ruby' } } },
|
||||
(err, result) => {
|
||||
if (err) console.error(err)
|
||||
console.log(result.body)) // or result.documents
|
||||
}
|
||||
)
|
||||
----
|
||||
|
||||
To create a new instance of the multi search (msearch) helper, you should access
|
||||
@ -438,7 +439,10 @@ running will not be stopped.
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(
|
||||
@ -474,11 +478,9 @@ the query string.
|
||||
----
|
||||
const documents = await client.helpers.search({
|
||||
index: 'stackoverflow',
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -505,11 +507,9 @@ the `429` error and uses the `maxRetries` option of the client.
|
||||
----
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'stackoverflow',
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -564,11 +564,9 @@ automatically adds `filter_path=hits.hits._source` to the query string.
|
||||
----
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: 'stackoverflow',
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
185
docs/index-custom-title-page.html
Normal file
185
docs/index-custom-title-page.html
Normal file
@ -0,0 +1,185 @@
|
||||
<style>
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.card {
|
||||
cursor: pointer;
|
||||
padding: 16px;
|
||||
text-align: left;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.card:hover {
|
||||
box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2);
|
||||
padding: 16px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
#guide a.no-text-decoration:hover {
|
||||
text-decoration: none!important;
|
||||
}
|
||||
|
||||
.icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
background-position: bottom;
|
||||
background-size: contain;
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
|
||||
.ul-col-1 {
|
||||
columns: 1;
|
||||
-webkit-columns: 1;
|
||||
-moz-columns: 1;
|
||||
}
|
||||
|
||||
@media (min-width:769px) {
|
||||
.ul-col-md-2 {
|
||||
columns: 2;
|
||||
-webkit-columns: 2;
|
||||
-moz-columns: 2;
|
||||
}
|
||||
}
|
||||
|
||||
#guide h3.gtk {
|
||||
margin-top: 16px;
|
||||
}
|
||||
|
||||
.mb-4, .my-4 {
|
||||
margin-bottom: 0!important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<div class="legalnotice"></div>
|
||||
|
||||
<div class="row my-4">
|
||||
<div class="col-md-6 col-12">
|
||||
<p></p>
|
||||
<p>
|
||||
<h2>Documentation</h2>
|
||||
</p>
|
||||
<p>
|
||||
The official Node.js client provides one-to-one mapping with Elasticsearch REST APIs.
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/installation.html">
|
||||
<button class="btn btn-primary">Get started</button>
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
<div class="col-md-6 col-12">
|
||||
<img class="w-100" src="https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blt3826dd36882ab258/641c4e9d542f593a7e8ba656/js-es-lp-hero.png" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h3 class="gtk">Get to know the JavaScript client</h3>
|
||||
|
||||
<div class="my-5">
|
||||
<div class="d-flex align-items-center mb-3">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/bltfd59779217093221/641ae0c8db18f61d68e9c377/64x64_Color_icon-connected-circles64-color.png');"></span>
|
||||
Connecting
|
||||
</h4>
|
||||
</div>
|
||||
<ul class="ul-col-md-2 ul-col-1">
|
||||
<li>
|
||||
<a href="introduction.html">Introduction to the client</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="installation.html">Installing the client</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="client-connecting.html">Connecting to Elasticsearch</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="client-configuration.html">Configuration options</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="my-5">
|
||||
<div class="d-flex align-items-center mb-3">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/bltca09fd8c807816ce/641ae17733e7f95594918557/icon-monitor-cog-64-color.png');"></span>
|
||||
Using the JS client
|
||||
</h4>
|
||||
</div>
|
||||
<ul class="ul-col-md-2 ul-col-1">
|
||||
<li>
|
||||
<a href="bulk_examples.html">Bulk indexing</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="get_examples.html">Getting documents</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="search_examples.html">Searching</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="examples.html">More examples</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="my-5">
|
||||
<div class="d-flex align-items-center mb-3">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blteacd058910f155d8/636925a6e0ff7c532db636d7/64x64_Color_icon-dev-tools-64-color.png');"></span>
|
||||
API and developer docs
|
||||
</h4>
|
||||
</div>
|
||||
<ul class="ul-col-md-2 ul-col-1">
|
||||
<li>
|
||||
<a href="api-reference.html">API reference</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="client-helpers.html">Client helpers</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="integrations.html">Integrations</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="changelog-client.html">Release notes</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<h3 class="explore">Explore by use case</h3>
|
||||
|
||||
<div class="row my-4">
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<a class="no-text-decoration" href="https://www.elastic.co/guide/en/enterprise-search/current/start.html">
|
||||
<div class="card h-100">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blt11200907c1c033aa/634d9da119d8652169cf9b2b/enterprise-search-logo-color-32px.png');"></span>
|
||||
Search my data
|
||||
</h4>
|
||||
<p>Create search experiences for your content, wherever it lives.</p>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<a class="no-text-decoration" href="https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions/current/getting-started-observability.html">
|
||||
<div class="card h-100">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/bltaa08b370a00bbecc/634d9da14e565f1cdce27f7c/observability-logo-color-32px.png');"></span>
|
||||
Observe my data
|
||||
</h4>
|
||||
<p>Follow our guides to monitor logs, metrics, and traces.</p>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<a class="no-text-decoration" href="https://www.elastic.co/guide/en/security/current/es-overview.html">
|
||||
<div class="card h-100">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blt5e0e0ad9a13e6b8c/634d9da18473831f96bbdf1e/security-logo-color-32px.png');"></span>
|
||||
Protect my environment
|
||||
</h4>
|
||||
<p>Learn how to defend against threats across your environment.</p>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p class="my-4"><a href="https://www.elastic.co/guide/index.html">View all Elastic docs</a></p>
|
||||
@ -1,16 +1,16 @@
|
||||
= Elasticsearch JavaScript Client
|
||||
|
||||
:branch: master
|
||||
include::{asciidoc-dir}/../../shared/versions/stack/{source_branch}.asciidoc[]
|
||||
include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
|
||||
|
||||
include::introduction.asciidoc[]
|
||||
include::changelog.asciidoc[]
|
||||
include::installation.asciidoc[]
|
||||
include::connecting.asciidoc[]
|
||||
include::configuration.asciidoc[]
|
||||
include::basic-config.asciidoc[]
|
||||
include::advanced-config.asciidoc[]
|
||||
include::child.asciidoc[]
|
||||
include::extend.asciidoc[]
|
||||
include::testing.asciidoc[]
|
||||
include::integrations.asciidoc[]
|
||||
include::observability.asciidoc[]
|
||||
@ -19,3 +19,4 @@ include::typescript.asciidoc[]
|
||||
include::reference.asciidoc[]
|
||||
include::examples/index.asciidoc[]
|
||||
include::helpers.asciidoc[]
|
||||
include::redirects.asciidoc[]
|
||||
|
||||
@ -24,7 +24,7 @@ To learn more about the supported major versions, please refer to the
|
||||
[[nodejs-support]]
|
||||
=== Node.js support
|
||||
|
||||
NOTE: The minimum supported version of Node.js is `v12`.
|
||||
NOTE: The minimum supported version of Node.js is `v14`.
|
||||
|
||||
The client versioning follows the {stack} versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
@ -56,6 +56,14 @@ of `^7.10.0`).
|
||||
|`10.x`
|
||||
|April 2021
|
||||
|`7.12` (mid 2021)
|
||||
|
||||
|`12.x`
|
||||
|April 2022
|
||||
|`8.2` (early 2022)
|
||||
|
||||
|`14.x`
|
||||
|April 2023
|
||||
|`8.8` (early 2023)
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
@ -70,8 +78,8 @@ Elasticsearch language clients are only backwards compatible with default distri
|
||||
|{es} Version
|
||||
|Client Version
|
||||
|
||||
|`main`
|
||||
|`main`
|
||||
|`8.x`
|
||||
|`8.x`
|
||||
|
||||
|`7.x`
|
||||
|`7.x`
|
||||
|
||||
@ -4,9 +4,6 @@
|
||||
This is the official Node.js client for {es}. This page gives a quick overview
|
||||
about the features of the client.
|
||||
|
||||
Refer to <<breaking-changes,this page>> for breaking changes coming from the old
|
||||
client.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Features
|
||||
@ -23,72 +20,21 @@ client.
|
||||
[discrete]
|
||||
=== Quick start
|
||||
|
||||
First of all, require, then initialize the client:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
----
|
||||
|
||||
|
||||
You can use both the callback API and the promise API, both behave the same way.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
The returned value of **every** API call is formed as follows:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
{
|
||||
body: object | boolean
|
||||
statusCode: number
|
||||
headers: object
|
||||
warnings: [string]
|
||||
meta: object
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
Let's see a complete example!
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
async function run () {
|
||||
// Let's start by indexing some data
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
@ -96,8 +42,7 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
document: {
|
||||
character: 'Daenerys Targaryen',
|
||||
quote: 'I am the blood of the dragon.'
|
||||
}
|
||||
@ -105,29 +50,25 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
document: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A mind needs books like a sword needs a whetstone.'
|
||||
}
|
||||
})
|
||||
|
||||
// We need to force an index refresh at this point, otherwise we will not
|
||||
// here we are forcing an index refresh, otherwise we will not
|
||||
// get any result in the consequent search
|
||||
await client.indices.refresh({ index: 'game-of-thrones' })
|
||||
|
||||
// Let's search!
|
||||
const { body } = await client.search({
|
||||
const result= await client.search({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
}
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
console.log(result.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
@ -178,23 +119,26 @@ Require the packages from your code by using the alias you have defined.
|
||||
const { Client: Client6 } = require('es6')
|
||||
const { Client: Client7 } = require('es7')
|
||||
|
||||
const client6 = new Client6({ node: 'http://localhost:9200' })
|
||||
const client7 = new Client7({ node: 'http://localhost:9201' })
|
||||
const client6 = new Client6({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const client7 = new Client7({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
client6.info(console.log)
|
||||
client7.info(console.log)
|
||||
client6.info().then(console.log, console.log)
|
||||
client7.info().then(console.log, console.log)
|
||||
----
|
||||
|
||||
|
||||
Finally, if you want to install the client for the next version of {es} (the one
|
||||
that lives in the {es} master branch), use the following command:
|
||||
that lives in the {es} main branch), use the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install esmaster@github:elastic/elasticsearch-js
|
||||
npm install esmain@github:elastic/elasticsearch-js
|
||||
----
|
||||
WARNING: This command installs the master branch of the client which is not
|
||||
considered stable.
|
||||
|
||||
|
||||
include::breaking-changes.asciidoc[]
|
||||
WARNING: This command installs the main branch of the client which is not
|
||||
considered stable.
|
||||
@ -2,7 +2,7 @@
|
||||
=== Observability
|
||||
|
||||
The client does not provide a default logger, but instead it offers an event
|
||||
emitter interfaces to hook into internal events, such as `request` and
|
||||
emitter interface to hook into internal events, such as `request` and
|
||||
`response`.
|
||||
|
||||
Correlating those events can be hard, especially if your applications have a
|
||||
@ -34,9 +34,12 @@ response and error that is happening during the use of the client.
|
||||
----
|
||||
const logger = require('my-logger')()
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
client.on('response', (err, result) => {
|
||||
client.diagnostic.on('response', (err, result) => {
|
||||
if (err) {
|
||||
logger.error(err)
|
||||
} else {
|
||||
@ -53,7 +56,7 @@ The client emits the following events:
|
||||
a|Emitted before starting serialization and compression. If you want to measure this phase duration, you should measure the time elapsed between this event and `request`.
|
||||
[source,js]
|
||||
----
|
||||
client.on('serialization', (err, result) => {
|
||||
client.diagnostic.on('serialization', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
@ -62,7 +65,7 @@ client.on('serialization', (err, result) => {
|
||||
a|Emitted before sending the actual request to {es} _(emitted multiple times in case of retries)_.
|
||||
[source,js]
|
||||
----
|
||||
client.on('request', (err, result) => {
|
||||
client.diagnostic.on('request', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
@ -71,7 +74,7 @@ client.on('request', (err, result) => {
|
||||
a|Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. _(This event might not be emitted in certain situations)_.
|
||||
[source,js]
|
||||
----
|
||||
client.on('deserialization', (err, result) => {
|
||||
client.diagnostic.on('deserialization', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
@ -80,7 +83,7 @@ client.on('deserialization', (err, result) => {
|
||||
a|Emitted once {es} response has been received and parsed.
|
||||
[source,js]
|
||||
----
|
||||
client.on('response', (err, result) => {
|
||||
client.diagnostic.on('response', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
@ -89,7 +92,7 @@ client.on('response', (err, result) => {
|
||||
a|Emitted when the client ends a sniffing request.
|
||||
[source,js]
|
||||
----
|
||||
client.on('sniff', (err, result) => {
|
||||
client.diagnostic.on('sniff', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
@ -98,7 +101,7 @@ client.on('sniff', (err, result) => {
|
||||
a|Emitted if the client is able to resurrect a dead node.
|
||||
[source,js]
|
||||
----
|
||||
client.on('resurrect', (err, result) => {
|
||||
client.diagnostic.on('resurrect', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
@ -151,9 +154,6 @@ request: {
|
||||
|
||||
The event order is described in the following graph, in some edge cases, the
|
||||
order is not guaranteed.
|
||||
You can find in
|
||||
https://github.com/elastic/elasticsearch-js/blob/master/test/acceptance/events-order.test.js[`test/acceptance/events-order.test.js`]
|
||||
how the order changes based on the situation.
|
||||
|
||||
[source]
|
||||
----
|
||||
@ -183,16 +183,19 @@ handle this problem.
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
client.on('request', (err, result) => {
|
||||
client.diagnostic.on('request', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
if (err) {
|
||||
console.log({ error: err, reqId: id })
|
||||
}
|
||||
})
|
||||
|
||||
client.on('response', (err, result) => {
|
||||
client.diagnostic.on('response', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
if (err) {
|
||||
console.log({ error: err, reqId: id })
|
||||
@ -201,10 +204,8 @@ client.on('response', (err, result) => {
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
query: { match_all: {} }
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
|
||||
|
||||
@ -215,7 +216,8 @@ By default the id is an incremental integer, but you can configure it with the
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
// it takes two parameters, the request parameters and options
|
||||
generateRequestId: function (params, options) {
|
||||
// your id generation logic
|
||||
@ -232,12 +234,10 @@ You can also specify a custom id per request:
|
||||
----
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
id: 'custom-id'
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
|
||||
|
||||
@ -250,9 +250,12 @@ can do that via the `context` option of a request:
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
client.on('request', (err, result) => {
|
||||
client.diagnostic.on('request', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { context } = result.meta
|
||||
if (err) {
|
||||
@ -260,7 +263,7 @@ client.on('request', (err, result) => {
|
||||
}
|
||||
})
|
||||
|
||||
client.on('response', (err, result) => {
|
||||
client.diagnostic.on('response', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { winter } = result.meta.context
|
||||
if (err) {
|
||||
@ -270,12 +273,10 @@ client.on('response', (err, result) => {
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
context: { winter: 'is coming' }
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
|
||||
The context object can also be configured as a global option in the client
|
||||
@ -286,11 +287,12 @@ merged, and the API level object will take precedence.
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
context: { winter: 'is coming' }
|
||||
})
|
||||
|
||||
client.on('request', (err, result) => {
|
||||
client.diagnostic.on('request', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { context } = result.meta
|
||||
if (err) {
|
||||
@ -298,7 +300,7 @@ client.on('request', (err, result) => {
|
||||
}
|
||||
})
|
||||
|
||||
client.on('response', (err, result) => {
|
||||
client.diagnostic.on('response', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { winter } = result.meta.context
|
||||
if (err) {
|
||||
@ -308,12 +310,10 @@ client.on('response', (err, result) => {
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
context: { winter: 'has come' }
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
|
||||
|
||||
@ -329,7 +329,8 @@ options help you in this regard.
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
name: 'parent-client' // default to 'elasticsearch-js'
|
||||
})
|
||||
|
||||
@ -339,7 +340,7 @@ const child = client.child({
|
||||
|
||||
console.log(client.name, child.name)
|
||||
|
||||
client.on('request', (err, result) => {
|
||||
client.diagnostic.on('request', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { name } = result.meta
|
||||
if (err) {
|
||||
@ -347,7 +348,7 @@ client.on('request', (err, result) => {
|
||||
}
|
||||
})
|
||||
|
||||
client.on('response', (err, result) => {
|
||||
client.diagnostic.on('response', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { name } = result.meta
|
||||
if (err) {
|
||||
@ -357,17 +358,13 @@ client.on('response', (err, result) => {
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
query: { match_all: {} }
|
||||
}).then(console.log, console.log)
|
||||
|
||||
child.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
query: { match_all: {} }
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
|
||||
|
||||
@ -377,9 +374,9 @@ child.search({
|
||||
To improve observability, the client offers an easy way to configure the
|
||||
`X-Opaque-Id` header. If you set the `X-Opaque-Id` in a specific request, this
|
||||
allows you to discover this identifier in the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/master/logging.html#deprecation-logging[deprecation logs],
|
||||
helps you with https://www.elastic.co/guide/en/elasticsearch/reference/master/index-modules-slowlog.html#_identifying_search_slow_log_origin[identifying search slow log origin]
|
||||
as well as https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html#_identifying_running_tasks[identifying running tasks].
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/logging.html#deprecation-logging[deprecation logs],
|
||||
helps you with https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules-slowlog.html#_identifying_search_slow_log_origin[identifying search slow log origin]
|
||||
as well as https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html#_identifying_running_tasks[identifying running tasks].
|
||||
|
||||
The `X-Opaque-Id` should be configured in each request, for doing that you can
|
||||
use the `opaqueId` option, as you can see in the following example. The
|
||||
@ -389,7 +386,8 @@ resulting header will be `{ 'X-Opaque-Id': 'my-search' }`.
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
client.search({
|
||||
@ -397,9 +395,7 @@ client.search({
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
opaqueId: 'my-search'
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
|
||||
Sometimes it may be useful to prefix all the `X-Opaque-Id` headers with a
|
||||
@ -412,7 +408,8 @@ doing this, the client offers a top-level configuration option:
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
opaqueIdPrefix: 'proxy-client::'
|
||||
})
|
||||
|
||||
@ -421,8 +418,6 @@ client.search({
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
opaqueId: 'my-search'
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
|
||||
|
||||
17
docs/redirects.asciidoc
Normal file
17
docs/redirects.asciidoc
Normal file
@ -0,0 +1,17 @@
|
||||
["appendix",role="exclude",id="redirects"]
|
||||
= Deleted pages
|
||||
|
||||
The following pages have moved or been deleted.
|
||||
|
||||
[role="exclude",id="auth-reference"]
|
||||
== Authentication
|
||||
|
||||
This page has moved. See <<client-connecting>>.
|
||||
|
||||
[role="exclude",id="breaking-changes"]
|
||||
== Breaking changes
|
||||
|
||||
For information about migrating from the legacy elasticsearch.js client to the
|
||||
new Elasticsearch JavaScript client, refer to the
|
||||
https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/7.17/breaking-changes.html[7.17
|
||||
JavaScript client migration guide].
|
||||
15120
docs/reference.asciidoc
15120
docs/reference.asciidoc
File diff suppressed because it is too large
Load Diff
@ -61,7 +61,8 @@ const Mock = require('@elastic/elasticsearch-mock')
|
||||
|
||||
const mock = new Mock()
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
Connection: mock.getConnection()
|
||||
})
|
||||
|
||||
@ -72,7 +73,7 @@ mock.add({
|
||||
return { status: 'ok' }
|
||||
})
|
||||
|
||||
client.info(console.log)
|
||||
client.info().then(console.log, console.log)
|
||||
----
|
||||
|
||||
As you can see it works closely with the client itself, once you have created a
|
||||
@ -129,8 +130,8 @@ mock.add({
|
||||
return { count: 42 }
|
||||
})
|
||||
|
||||
client.count({ index: 'foo' }, console.log) // => { count: 42 }
|
||||
client.count({ index: 'bar' }, console.log) // => { count: 42 }
|
||||
client.count({ index: 'foo' }).then(console.log, console.log) // => { count: 42 }
|
||||
client.count({ index: 'bar' }).then(console.log, console.log) // => { count: 42 }
|
||||
----
|
||||
|
||||
And wildcards are supported as well.
|
||||
|
||||
@ -6,7 +6,8 @@ errors, it also handles sniffing.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client, Transport } = require('@elastic/elasticsearch')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const { Transport } = require('@elastic/transport')
|
||||
|
||||
class MyTransport extends Transport {
|
||||
request (params, options, callback) {
|
||||
|
||||
@ -4,274 +4,87 @@
|
||||
The client offers a first-class support for TypeScript, shipping a complete set
|
||||
of type definitions of Elasticsearch's API surface.
|
||||
|
||||
|
||||
NOTE: If you are using TypeScript you need to use _snake_case_ style to define
|
||||
the API parameters instead of _camelCase_.
|
||||
|
||||
Currently the client exposes two type definitions, the legacy one, which is the default
|
||||
and the new one, which will be the default in the next major.
|
||||
We strongly recommend to migrate to the new one as soon as possible, as the new types
|
||||
are offering a vastly improved developer experience and guarantee you that your code
|
||||
will always be in sync with the latest Elasticsearch features.
|
||||
|
||||
[discrete]
|
||||
==== New type definitions
|
||||
|
||||
The new type definition is more advanced compared to the legacy one. In the legacy
|
||||
type definitions you were expected to configure via generics both request and response
|
||||
bodies. The new type definitions comes with a complete type definition for every
|
||||
Elasticsearch endpoint.
|
||||
|
||||
For example:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
// legacy definitions
|
||||
const response = await client.search<SearchResponse<Source>, SearchBody>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// new definitions
|
||||
const response = await client.search<Source>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
The types are not 100% complete yet. Some APIs are missing (the newest ones, e.g. EQL),
|
||||
and others may contain some errors, but we are continuously pushing fixes & improvements.
|
||||
|
||||
[discrete]
|
||||
==== Request & Response types
|
||||
|
||||
Once you migrate to the new types, those are automatically integrated into the Elasticsearch client, you will get them out of the box.
|
||||
If everything works, meaning that you won’t get compiler errors, you are good to go!
|
||||
The types are already correct, and there is nothing more to do.
|
||||
|
||||
If a type is incorrect, you should add a comment `// @ts-expect-error @elastic/elasticsearch`
|
||||
telling TypeScript that you are aware of the warning and you would like to temporarily suppress it.
|
||||
In this way, your code will compile until the type is fixed, and when it happens, you’ll only need to remove the
|
||||
`// @ts-expect-error @elastic/elasticsearch` comment (TypeScript will let you know when it is time).
|
||||
Finally, if the type you need is missing, you’ll see that the client method returns (or defines as a parameter)
|
||||
a `TODO` type, which accepts any object.
|
||||
|
||||
Open an issue in the client repository letting us know if you encounter any problem!
|
||||
|
||||
If needed you can import the request and response types.
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import { Client, estypes } from '@elastic/elasticsearch'
|
||||
import type { Client as NewTypes } from '@elastic/elasticsearch/api/new'
|
||||
|
||||
// @ts-expect-error @elastic/elasticsearch
|
||||
const client: NewTypes = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
|
||||
const request: estypes.IndexRequest<Source> = {
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
}
|
||||
|
||||
await client.index(request)
|
||||
----
|
||||
NOTE: The client is developed against the https://www.npmjs.com/package/typescript?activeTab=versions[latest]
|
||||
version of TypeScript. Furthermore, unless you have set `skipLibCheck` to `true`,
|
||||
you should configure `esModuleInterop` to `true`.
|
||||
|
||||
[discrete]
|
||||
===== How to migrate to the new type definitions
|
||||
|
||||
Since the new type definitions can be considered a breaking change we couldn't add the directly to the client.
|
||||
Following you will find a snippet that shows you how to override the default types with the new ones.
|
||||
==== Example
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import { Client } from '@elastic/elasticsearch'
|
||||
import type { Client as NewTypes } from '@elastic/elasticsearch/api/new'
|
||||
|
||||
// @ts-expect-error @elastic/elasticsearch
|
||||
const client: NewTypes = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
|
||||
// try the new code completion when building a query!
|
||||
const response = await client.search<Source>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// try the new code completion when traversing a response!
|
||||
const results = response.body.hits.hits.map(hit => hit._source)
|
||||
// results type will be `Source[]`
|
||||
console.log(results)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Legacy type definitions
|
||||
|
||||
By default event API uses
|
||||
https://www.typescriptlang.org/docs/handbook/generics.html[generics] to specify
|
||||
the requests and response bodies and the `meta.context`. Currently, we can't
|
||||
provide those definitions, but we are working to improve this situation.
|
||||
|
||||
You can find a partial definition of the request types by importing
|
||||
`RequestParams`, which is used by default in the client and accepts a body (when
|
||||
needed) as a generic to provide a better specification.
|
||||
|
||||
The body defaults to `RequestBody` and `RequestNDBody`, which are defined as
|
||||
follows:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
type RequestBody<T = Record<string, any>> = T | string | Buffer | ReadableStream
|
||||
type RequestNDBody<T = Record<string, any>[]> = T | string | string[] | Buffer | ReadableStream
|
||||
----
|
||||
|
||||
You can specify the response and request body in each API as follows:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
const response = await client.search<ResponseBody, RequestBody, Context>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(response.body)
|
||||
----
|
||||
|
||||
You don't have to specify all the generics, but the order must be respected.
|
||||
|
||||
|
||||
[discrete]
|
||||
===== A complete example
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import {
|
||||
Client,
|
||||
// Object that contains the type definitions of every API method
|
||||
RequestParams,
|
||||
// Interface of the generic API response
|
||||
ApiResponse,
|
||||
} from '@elastic/elasticsearch'
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// Define the type of the body for the Search request
|
||||
interface SearchBody {
|
||||
query: {
|
||||
match: { foo: string }
|
||||
}
|
||||
}
|
||||
|
||||
// Complete definition of the Search response
|
||||
interface ShardsResponse {
|
||||
total: number;
|
||||
successful: number;
|
||||
failed: number;
|
||||
skipped: number;
|
||||
}
|
||||
|
||||
interface Explanation {
|
||||
value: number;
|
||||
description: string;
|
||||
details: Explanation[];
|
||||
}
|
||||
|
||||
interface SearchResponse<T> {
|
||||
took: number;
|
||||
timed_out: boolean;
|
||||
_scroll_id?: string;
|
||||
_shards: ShardsResponse;
|
||||
hits: {
|
||||
total: number;
|
||||
max_score: number;
|
||||
hits: Array<{
|
||||
_index: string;
|
||||
_type: string;
|
||||
_id: string;
|
||||
_score: number;
|
||||
_source: T;
|
||||
_version?: number;
|
||||
_explanation?: Explanation;
|
||||
fields?: any;
|
||||
highlight?: any;
|
||||
inner_hits?: any;
|
||||
matched_queries?: string[];
|
||||
sort?: string[];
|
||||
}>;
|
||||
};
|
||||
aggregations?: any;
|
||||
}
|
||||
|
||||
// Define the interface of the source object
|
||||
interface Source {
|
||||
foo: string
|
||||
interface Document {
|
||||
character: string
|
||||
quote: string
|
||||
}
|
||||
|
||||
async function run () {
|
||||
// All of the examples below are valid code, by default,
|
||||
// the request body will be `RequestBody` and response will be `Record<string, any>`.
|
||||
let response = await client.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
// Let's start by indexing some data
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
document: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
})
|
||||
// body here is `ResponseBody`
|
||||
console.log(response.body)
|
||||
|
||||
// The first generic is the response body
|
||||
response = await client.search<SearchResponse<Source>>({
|
||||
index: 'test',
|
||||
// Here the body must follow the `RequestBody` interface
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
document: {
|
||||
character: 'Daenerys Targaryen',
|
||||
quote: 'I am the blood of the dragon.'
|
||||
}
|
||||
})
|
||||
// body here is `SearchResponse<Source>`
|
||||
console.log(response.body)
|
||||
|
||||
response = await client.search<SearchResponse<Source>, SearchBody>({
|
||||
index: 'test',
|
||||
// Here the body must follow the `SearchBody` interface
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
document: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A mind needs books like a sword needs a whetstone.'
|
||||
}
|
||||
})
|
||||
// body here is `SearchResponse<Source>`
|
||||
console.log(response.body)
|
||||
|
||||
// here we are forcing an index refresh, otherwise we will not
|
||||
// get any result in the consequent search
|
||||
await client.indices.refresh({ index: 'game-of-thrones' })
|
||||
|
||||
// Let's search!
|
||||
const result= await client.search<Document>({
|
||||
index: 'game-of-thrones',
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
}
|
||||
})
|
||||
|
||||
console.log(result.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Request & Response types
|
||||
|
||||
You can import the full TypeScript requests & responses definitions as it follows:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import { estypes } from '@elastic/elasticsearch'
|
||||
----
|
||||
|
||||
If you need the legacy definitions with the body, you can do the following:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import { estypesWithBody } from '@elastic/elasticsearch'
|
||||
----
|
||||
5
index.d.ts
vendored
5
index.d.ts
vendored
@ -18,7 +18,10 @@
|
||||
*/
|
||||
|
||||
import Client from './lib/client'
|
||||
import SniffingTransport from './lib/sniffingTransport'
|
||||
|
||||
export * from '@elastic/transport'
|
||||
export { Client }
|
||||
export * as estypes from './lib/api/types'
|
||||
export * as estypesWithBody from './lib/api/typesWithBodyKey'
|
||||
export { Client, SniffingTransport }
|
||||
export type { ClientOptions, NodeOptions } from './lib/client'
|
||||
|
||||
2
index.js
2
index.js
@ -35,9 +35,11 @@ const {
|
||||
} = require('@elastic/transport')
|
||||
|
||||
const { default: Client } = require('./lib/client')
|
||||
const { default: SniffingTransport } = require('./lib/sniffingTransport')
|
||||
|
||||
module.exports = {
|
||||
Client,
|
||||
SniffingTransport,
|
||||
Diagnostic,
|
||||
Transport,
|
||||
WeightedConnectionPool,
|
||||
|
||||
66
package.json
66
package.json
@ -1,17 +1,16 @@
|
||||
{
|
||||
"name": "@elastic/elasticsearch",
|
||||
"version": "8.1.0-beta.1",
|
||||
"versionCanary": "8.1.0-canary.2",
|
||||
"version": "8.7.3",
|
||||
"versionCanary": "8.7.3-canary.0",
|
||||
"description": "The official Elasticsearch client for Node.js",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"scripts": {
|
||||
"test": "npm run build && npm run lint && tap test/{unit,acceptance}/{*,**/*}.test.ts",
|
||||
"test": "npm run build && npm run lint && tap test/unit/{*,**/*}.test.ts",
|
||||
"test:unit": "npm run build && tap test/unit/{*,**/*}.test.ts",
|
||||
"test:acceptance": "npm run build && tap test/acceptance/*.test.ts",
|
||||
"test:coverage-100": "npm run build && tap test/{unit,acceptance}/{*,**/*}.test.ts --coverage --100",
|
||||
"test:coverage-report": "npm run build && tap test/{unit,acceptance}/{*,**/*}.test.ts --coverage && nyc report --reporter=text-lcov > coverage.lcov",
|
||||
"test:coverage-ui": "npm run build && tap test/{unit,acceptance}/{*,**/*}.test.ts --coverage --coverage-report=html",
|
||||
"test:coverage-100": "npm run build && tap test/unit/{*,**/*}.test.ts --coverage --100",
|
||||
"test:coverage-report": "npm run build && tap test/unit/{*,**/*}.test.ts --coverage && nyc report --reporter=text-lcov > coverage.lcov",
|
||||
"test:coverage-ui": "npm run build && tap test/unit/{*,**/*}.test.ts --coverage --coverage-report=html",
|
||||
"test:integration": "tsc && node test/integration/index.js",
|
||||
"lint": "ts-standard src",
|
||||
"lint:fix": "ts-standard --fix src",
|
||||
@ -31,10 +30,16 @@
|
||||
"client",
|
||||
"index"
|
||||
],
|
||||
"author": {
|
||||
"name": "Tomas Della Vedova",
|
||||
"company": "Elastic BV"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Tomas Della Vedova",
|
||||
"company": "Elastic BV"
|
||||
},
|
||||
{
|
||||
"name": "Josh Mock",
|
||||
"company": "Elastic BV"
|
||||
}
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -45,43 +50,44 @@
|
||||
},
|
||||
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
"node": ">=14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1",
|
||||
"@types/debug": "^4.1.6",
|
||||
"@types/debug": "^4.1.7",
|
||||
"@types/ms": "^0.7.31",
|
||||
"@types/node": "^16.4.1",
|
||||
"@types/sinonjs__fake-timers": "^6.0.3",
|
||||
"@types/node": "^17.0.31",
|
||||
"@types/sinonjs__fake-timers": "^8.1.2",
|
||||
"@types/split2": "^3.2.1",
|
||||
"@types/stoppable": "^1.1.1",
|
||||
"@types/tap": "^15.0.5",
|
||||
"@types/tap": "^15.0.7",
|
||||
"cross-zip": "^4.0.0",
|
||||
"desm": "^1.2.0",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"into-stream": "^6.0.0",
|
||||
"into-stream": "^7.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"license-checker": "^25.0.1",
|
||||
"minimist": "^1.2.5",
|
||||
"minimist": "^1.2.6",
|
||||
"ms": "^2.1.3",
|
||||
"node-abort-controller": "^2.0.0",
|
||||
"node-fetch": "^2.6.2",
|
||||
"node-abort-controller": "^3.0.1",
|
||||
"node-fetch": "^2.6.7",
|
||||
"ora": "^5.4.1",
|
||||
"proxy": "^1.0.2",
|
||||
"rimraf": "^3.0.2",
|
||||
"semver": "^7.3.5",
|
||||
"split2": "^3.2.2",
|
||||
"standard": "^16.0.3",
|
||||
"semver": "^7.3.7",
|
||||
"split2": "^4.1.0",
|
||||
"stoppable": "^1.1.0",
|
||||
"tap": "^15.0.9",
|
||||
"ts-node": "^10.1.0",
|
||||
"ts-standard": "^10.0.0",
|
||||
"typescript": "^4.3.5",
|
||||
"tap": "^16.1.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-standard": "^11.0.0",
|
||||
"typescript": "^4.6.4",
|
||||
"workq": "^3.0.0",
|
||||
"xmlbuilder2": "^3.0.2"
|
||||
"xmlbuilder2": "^3.0.2",
|
||||
"zx": "^6.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@elastic/transport": "^8.1.0-beta.1",
|
||||
"tslib": "^2.3.0"
|
||||
"@elastic/transport": "~8.3.1",
|
||||
"tslib": "^2.4.0"
|
||||
},
|
||||
"tap": {
|
||||
"ts": true,
|
||||
|
||||
@ -31,7 +31,6 @@ async function release (opts) {
|
||||
const originalVersion = packageJson.version
|
||||
const currentCanaryVersion = packageJson.versionCanary
|
||||
const originalTypes = packageJson.types
|
||||
const originalNpmIgnore = await readFile(join(__dirname, '..', '.npmignore'), 'utf8')
|
||||
|
||||
const newCanaryInteger = opts.reset ? 1 : (Number(currentCanaryVersion.split('-')[1].split('.')[1]) + 1)
|
||||
const newCanaryVersion = `${originalVersion.split('-')[0]}-canary.${newCanaryInteger}`
|
||||
@ -49,15 +48,6 @@ async function release (opts) {
|
||||
'utf8'
|
||||
)
|
||||
|
||||
// update the npmignore to publish the kibana types as well
|
||||
const newNpmIgnore = originalNpmIgnore.slice(0, originalNpmIgnore.indexOf('# CANARY-PACKAGE')) +
|
||||
originalNpmIgnore.slice(originalNpmIgnore.indexOf('# /CANARY-PACKAGE') + 17)
|
||||
await writeFile(
|
||||
join(__dirname, '..', '.npmignore'),
|
||||
newNpmIgnore,
|
||||
'utf8'
|
||||
)
|
||||
|
||||
// confirm the package.json changes with the user
|
||||
const diff = execSync('git diff').toString().split('\n').map(colorDiff).join('\n')
|
||||
console.log(diff)
|
||||
@ -81,12 +71,6 @@ async function release (opts) {
|
||||
JSON.stringify(packageJson, null, 2) + '\n',
|
||||
'utf8'
|
||||
)
|
||||
|
||||
await writeFile(
|
||||
join(__dirname, '..', '.npmignore'),
|
||||
originalNpmIgnore,
|
||||
'utf8'
|
||||
)
|
||||
}
|
||||
|
||||
function confirm (question) {
|
||||
|
||||
@ -228,7 +228,7 @@ function generateSingleApi (version, spec, common) {
|
||||
|
||||
${genUrlValidation(paths, api)}
|
||||
|
||||
let { ${genQueryBlacklist(false)}, ...querystring } = params
|
||||
let { ${genQueryDenylist(false)}, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
@ -316,20 +316,20 @@ function generateSingleApi (version, spec, common) {
|
||||
}, {})
|
||||
}
|
||||
|
||||
function genQueryBlacklist (addQuotes = true) {
|
||||
function genQueryDenylist (addQuotes = true) {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
const blacklist = ['method', 'body']
|
||||
const denylist = ['method', 'body']
|
||||
parts.forEach(p => {
|
||||
const camelStr = toCamelCase(p)
|
||||
if (camelStr !== p) blacklist.push(`${camelStr}`)
|
||||
blacklist.push(`${p}`)
|
||||
if (camelStr !== p) denylist.push(`${camelStr}`)
|
||||
denylist.push(`${p}`)
|
||||
})
|
||||
return addQuotes ? blacklist.map(q => `'${q}'`) : blacklist
|
||||
return addQuotes ? denylist.map(q => `'${q}'`) : denylist
|
||||
}
|
||||
|
||||
function buildPath () {
|
||||
|
||||
@ -65,10 +65,10 @@ export default class AsyncSearch {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async get<TDocument = unknown> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.AsyncSearchGetResponse<TDocument>>
|
||||
async get<TDocument = unknown> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchGetResponse<TDocument>, unknown>>
|
||||
async get<TDocument = unknown> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchGetResponse<TDocument>>
|
||||
async get<TDocument = unknown> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
async get<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.AsyncSearchGetResponse<TDocument, TAggregations>>
|
||||
async get<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchGetResponse<TDocument, TAggregations>, unknown>>
|
||||
async get<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchGetResponse<TDocument, TAggregations>>
|
||||
async get<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['id']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
@ -87,10 +87,10 @@ export default class AsyncSearch {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async status<TDocument = unknown> (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.AsyncSearchStatusResponse<TDocument>>
|
||||
async status<TDocument = unknown> (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchStatusResponse<TDocument>, unknown>>
|
||||
async status<TDocument = unknown> (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchStatusResponse<TDocument>>
|
||||
async status<TDocument = unknown> (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
async status (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.AsyncSearchStatusResponse>
|
||||
async status (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchStatusResponse, unknown>>
|
||||
async status (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchStatusResponse>
|
||||
async status (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['id']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
@ -109,12 +109,12 @@ export default class AsyncSearch {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async submit<TDocument = unknown> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.AsyncSearchSubmitResponse<TDocument>>
|
||||
async submit<TDocument = unknown> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchSubmitResponse<TDocument>, unknown>>
|
||||
async submit<TDocument = unknown> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchSubmitResponse<TDocument>>
|
||||
async submit<TDocument = unknown> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
async submit<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.AsyncSearchSubmitResponse<TDocument, TAggregations>>
|
||||
async submit<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchSubmitResponse<TDocument, TAggregations>, unknown>>
|
||||
async submit<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchSubmitResponse<TDocument, TAggregations>>
|
||||
async submit<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats']
|
||||
const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'knn', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
@ -130,7 +130,8 @@ export default class AsyncSearch {
|
||||
if (acceptedBody.includes(key)) {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
if (key === 'sort' && typeof params[key] === 'string' && params[key].includes(':')) {
|
||||
if (key === 'sort' && typeof params[key] === 'string' && params[key].includes(':')) { // eslint-disable-line
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
|
||||
@ -37,10 +37,10 @@ import * as T from '../types'
|
||||
import * as TB from '../typesWithBodyKey'
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default async function BulkApi<TSource = unknown> (this: That, params: T.BulkRequest<TSource> | TB.BulkRequest<TSource>, options?: TransportRequestOptionsWithOutMeta): Promise<T.BulkResponse>
|
||||
export default async function BulkApi<TSource = unknown> (this: That, params: T.BulkRequest<TSource> | TB.BulkRequest<TSource>, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.BulkResponse, unknown>>
|
||||
export default async function BulkApi<TSource = unknown> (this: That, params: T.BulkRequest<TSource> | TB.BulkRequest<TSource>, options?: TransportRequestOptions): Promise<T.BulkResponse>
|
||||
export default async function BulkApi<TSource = unknown> (this: That, params: T.BulkRequest<TSource> | TB.BulkRequest<TSource>, options?: TransportRequestOptions): Promise<any> {
|
||||
export default async function BulkApi<TDocument = unknown, TPartialDocument = unknown> (this: That, params: T.BulkRequest<TDocument, TPartialDocument> | TB.BulkRequest<TDocument, TPartialDocument>, options?: TransportRequestOptionsWithOutMeta): Promise<T.BulkResponse>
|
||||
export default async function BulkApi<TDocument = unknown, TPartialDocument = unknown> (this: That, params: T.BulkRequest<TDocument, TPartialDocument> | TB.BulkRequest<TDocument, TPartialDocument>, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.BulkResponse, unknown>>
|
||||
export default async function BulkApi<TDocument = unknown, TPartialDocument = unknown> (this: That, params: T.BulkRequest<TDocument, TPartialDocument> | TB.BulkRequest<TDocument, TPartialDocument>, options?: TransportRequestOptions): Promise<T.BulkResponse>
|
||||
export default async function BulkApi<TDocument = unknown, TPartialDocument = unknown> (this: That, params: T.BulkRequest<TDocument, TPartialDocument> | TB.BulkRequest<TDocument, TPartialDocument>, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['operations']
|
||||
const querystring: Record<string, any> = {}
|
||||
|
||||
@ -103,6 +103,36 @@ export default class Cat {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest | TB.CatComponentTemplatesRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.CatComponentTemplatesResponse>
|
||||
async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest | TB.CatComponentTemplatesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatComponentTemplatesResponse, unknown>>
|
||||
async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest | TB.CatComponentTemplatesRequest, options?: TransportRequestOptions): Promise<T.CatComponentTemplatesResponse>
|
||||
async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest | TB.CatComponentTemplatesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['name']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
let method = ''
|
||||
let path = ''
|
||||
if (params.name != null) {
|
||||
method = 'GET'
|
||||
path = `/_cat/component_templates/${encodeURIComponent(params.name.toString())}`
|
||||
} else {
|
||||
method = 'GET'
|
||||
path = '/_cat/component_templates'
|
||||
}
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async count (this: That, params?: T.CatCountRequest | TB.CatCountRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.CatCountResponse>
|
||||
async count (this: That, params?: T.CatCountRequest | TB.CatCountRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatCountResponse, unknown>>
|
||||
async count (this: That, params?: T.CatCountRequest | TB.CatCountRequest, options?: TransportRequestOptions): Promise<T.CatCountResponse>
|
||||
|
||||
@ -279,7 +279,7 @@ export default class Cluster {
|
||||
async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest | TB.ClusterPutComponentTemplateRequest, options?: TransportRequestOptions): Promise<T.ClusterPutComponentTemplateResponse>
|
||||
async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest | TB.ClusterPutComponentTemplateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['template', 'aliases', 'mappings', 'settings', 'version', '_meta']
|
||||
const acceptedBody: string[] = ['template', 'version', '_meta', 'allow_auto_create']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -122,7 +122,7 @@ export default class Enrich {
|
||||
async putPolicy (this: That, params: T.EnrichPutPolicyRequest | TB.EnrichPutPolicyRequest, options?: TransportRequestOptions): Promise<T.EnrichPutPolicyResponse>
|
||||
async putPolicy (this: That, params: T.EnrichPutPolicyRequest | TB.EnrichPutPolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['geo_match', 'match']
|
||||
const acceptedBody: string[] = ['geo_match', 'match', 'range']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -114,7 +114,7 @@ export default class Eql {
|
||||
async search<TEvent = unknown> (this: That, params: T.EqlSearchRequest | TB.EqlSearchRequest, options?: TransportRequestOptions): Promise<T.EqlSearchResponse<TEvent>>
|
||||
async search<TEvent = unknown> (this: That, params: T.EqlSearchRequest | TB.EqlSearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['query', 'case_sensitive', 'event_category_field', 'tiebreaker_field', 'timestamp_field', 'fetch_size', 'filter', 'keep_alive', 'keep_on_completion', 'wait_for_completion_timeout', 'size', 'fields', 'result_position']
|
||||
const acceptedBody: string[] = ['query', 'case_sensitive', 'event_category_field', 'tiebreaker_field', 'timestamp_field', 'fetch_size', 'filter', 'keep_alive', 'keep_on_completion', 'wait_for_completion_timeout', 'size', 'fields', 'result_position', 'runtime_mappings']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -42,7 +42,7 @@ export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequ
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise<T.FieldCapsResponse>
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['index_filter', 'runtime_mappings']
|
||||
const acceptedBody: string[] = ['fields', 'index_filter', 'runtime_mappings']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -65,19 +65,24 @@ export default class Fleet {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async msearch (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async msearch (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async msearch (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async msearch (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async msearch<TDocument = unknown> (this: That, params: T.FleetMsearchRequest | TB.FleetMsearchRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.FleetMsearchResponse<TDocument>>
|
||||
async msearch<TDocument = unknown> (this: That, params: T.FleetMsearchRequest | TB.FleetMsearchRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FleetMsearchResponse<TDocument>, unknown>>
|
||||
async msearch<TDocument = unknown> (this: That, params: T.FleetMsearchRequest | TB.FleetMsearchRequest, options?: TransportRequestOptions): Promise<T.FleetMsearchResponse<TDocument>>
|
||||
async msearch<TDocument = unknown> (this: That, params: T.FleetMsearchRequest | TB.FleetMsearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['searches']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
// @ts-expect-error
|
||||
let body: any = params.body ?? undefined
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
// @ts-expect-error
|
||||
body = params[key]
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
@ -94,19 +99,31 @@ export default class Fleet {
|
||||
return await this.transport.request({ path, method, querystring, bulkBody: body }, options)
|
||||
}
|
||||
|
||||
async search (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async search (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async search (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async search (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async search<TDocument = unknown> (this: That, params: T.FleetSearchRequest | TB.FleetSearchRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.FleetSearchResponse<TDocument>>
|
||||
async search<TDocument = unknown> (this: That, params: T.FleetSearchRequest | TB.FleetSearchRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FleetSearchResponse<TDocument>, unknown>>
|
||||
async search<TDocument = unknown> (this: That, params: T.FleetSearchRequest | TB.FleetSearchRequest, options?: TransportRequestOptions): Promise<T.FleetSearchResponse<TDocument>>
|
||||
async search<TDocument = unknown> (this: That, params: T.FleetSearchRequest | TB.FleetSearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
let body: Record<string, any> | string
|
||||
if (typeof userBody === 'string') {
|
||||
body = userBody
|
||||
} else {
|
||||
body = userBody != null ? { ...userBody } : undefined
|
||||
}
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
68
src/api/api/health_report.ts
Normal file
68
src/api/api/health_report.ts
Normal file
@ -0,0 +1,68 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint-disable import/export */
|
||||
/* eslint-disable @typescript-eslint/no-misused-new */
|
||||
/* eslint-disable @typescript-eslint/no-extraneous-class */
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
|
||||
// This file was automatically generated by elastic/elastic-client-generator-js
|
||||
// DO NOT MODIFY IT BY HAND. Instead, modify the source open api file,
|
||||
// and elastic/elastic-client-generator-js to regenerate this file again.
|
||||
|
||||
import {
|
||||
Transport,
|
||||
TransportRequestOptions,
|
||||
TransportRequestOptionsWithMeta,
|
||||
TransportRequestOptionsWithOutMeta,
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
import * as TB from '../typesWithBodyKey'
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.HealthReportResponse>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.HealthReportResponse, unknown>>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptions): Promise<T.HealthReportResponse>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['feature']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
let method = ''
|
||||
let path = ''
|
||||
if (params.feature != null) {
|
||||
method = 'GET'
|
||||
path = `/_health_report/${encodeURIComponent(params.feature.toString())}`
|
||||
} else {
|
||||
method = 'GET'
|
||||
path = '/_health_report'
|
||||
}
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
@ -140,19 +140,32 @@ export default class Ilm {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async migrateToDataTiers (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async migrateToDataTiers (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async migrateToDataTiers (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async migrateToDataTiers (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async migrateToDataTiers (this: That, params?: T.IlmMigrateToDataTiersRequest | TB.IlmMigrateToDataTiersRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IlmMigrateToDataTiersResponse>
|
||||
async migrateToDataTiers (this: That, params?: T.IlmMigrateToDataTiersRequest | TB.IlmMigrateToDataTiersRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmMigrateToDataTiersResponse, unknown>>
|
||||
async migrateToDataTiers (this: That, params?: T.IlmMigrateToDataTiersRequest | TB.IlmMigrateToDataTiersRequest, options?: TransportRequestOptions): Promise<T.IlmMigrateToDataTiersResponse>
|
||||
async migrateToDataTiers (this: That, params?: T.IlmMigrateToDataTiersRequest | TB.IlmMigrateToDataTiersRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['legacy_template_to_delete', 'node_attribute']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
let body: Record<string, any> | string
|
||||
if (typeof userBody === 'string') {
|
||||
body = userBody
|
||||
} else {
|
||||
body = userBody != null ? { ...userBody } : undefined
|
||||
}
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -418,6 +418,33 @@ export default class Indices {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async downsample (this: That, params: T.IndicesDownsampleRequest | TB.IndicesDownsampleRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IndicesDownsampleResponse>
|
||||
async downsample (this: That, params: T.IndicesDownsampleRequest | TB.IndicesDownsampleRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IndicesDownsampleResponse, unknown>>
|
||||
async downsample (this: That, params: T.IndicesDownsampleRequest | TB.IndicesDownsampleRequest, options?: TransportRequestOptions): Promise<T.IndicesDownsampleResponse>
|
||||
async downsample (this: That, params: T.IndicesDownsampleRequest | TB.IndicesDownsampleRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index', 'target_index']
|
||||
const acceptedBody: string[] = ['config']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
let body: any = params.body ?? undefined
|
||||
|
||||
for (const key in params) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
// @ts-expect-error
|
||||
body = params[key]
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
const method = 'POST'
|
||||
const path = `/${encodeURIComponent(params.index.toString())}/_downsample/${encodeURIComponent(params.target_index.toString())}`
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async exists (this: That, params: T.IndicesExistsRequest | TB.IndicesExistsRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IndicesExistsResponse>
|
||||
async exists (this: That, params: T.IndicesExistsRequest | TB.IndicesExistsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IndicesExistsResponse, unknown>>
|
||||
async exists (this: That, params: T.IndicesExistsRequest | TB.IndicesExistsRequest, options?: TransportRequestOptions): Promise<T.IndicesExistsResponse>
|
||||
@ -513,19 +540,19 @@ export default class Indices {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async fieldUsageStats (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async fieldUsageStats (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async fieldUsageStats (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async fieldUsageStats (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async fieldUsageStats (this: That, params: T.IndicesFieldUsageStatsRequest | TB.IndicesFieldUsageStatsRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IndicesFieldUsageStatsResponse>
|
||||
async fieldUsageStats (this: That, params: T.IndicesFieldUsageStatsRequest | TB.IndicesFieldUsageStatsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IndicesFieldUsageStatsResponse, unknown>>
|
||||
async fieldUsageStats (this: That, params: T.IndicesFieldUsageStatsRequest | TB.IndicesFieldUsageStatsRequest, options?: TransportRequestOptions): Promise<T.IndicesFieldUsageStatsResponse>
|
||||
async fieldUsageStats (this: That, params: T.IndicesFieldUsageStatsRequest | TB.IndicesFieldUsageStatsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
@ -860,19 +887,31 @@ export default class Indices {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async modifyDataStream (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async modifyDataStream (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async modifyDataStream (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async modifyDataStream (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async modifyDataStream (this: That, params: T.IndicesModifyDataStreamRequest | TB.IndicesModifyDataStreamRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IndicesModifyDataStreamResponse>
|
||||
async modifyDataStream (this: That, params: T.IndicesModifyDataStreamRequest | TB.IndicesModifyDataStreamRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IndicesModifyDataStreamResponse, unknown>>
|
||||
async modifyDataStream (this: That, params: T.IndicesModifyDataStreamRequest | TB.IndicesModifyDataStreamRequest, options?: TransportRequestOptions): Promise<T.IndicesModifyDataStreamResponse>
|
||||
async modifyDataStream (this: That, params: T.IndicesModifyDataStreamRequest | TB.IndicesModifyDataStreamRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['actions']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
let body: Record<string, any> | string
|
||||
if (typeof userBody === 'string') {
|
||||
body = userBody
|
||||
} else {
|
||||
body = userBody != null ? { ...userBody } : undefined
|
||||
}
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,7 +42,7 @@ export default async function KnnSearchApi<TDocument = unknown> (this: That, par
|
||||
export default async function KnnSearchApi<TDocument = unknown> (this: That, params: T.KnnSearchRequest | TB.KnnSearchRequest, options?: TransportRequestOptions): Promise<T.KnnSearchResponse<TDocument>>
|
||||
export default async function KnnSearchApi<TDocument = unknown> (this: That, params: T.KnnSearchRequest | TB.KnnSearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['_source', 'docvalue_fields', 'stored_fields', 'fields', 'knn']
|
||||
const acceptedBody: string[] = ['_source', 'docvalue_fields', 'stored_fields', 'fields', 'filter', 'knn']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -135,10 +135,10 @@ export default class License {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicensePostResponse, unknown>>
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicensePostResponse, unknown>>
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['license', 'licenses']
|
||||
const querystring: Record<string, any> = {}
|
||||
@ -151,6 +151,7 @@ export default class License {
|
||||
body = userBody != null ? { ...userBody } : undefined
|
||||
}
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
body = body ?? {}
|
||||
|
||||
@ -82,8 +82,15 @@ export default class Logstash {
|
||||
}
|
||||
}
|
||||
|
||||
const method = 'GET'
|
||||
const path = `/_logstash/pipeline/${encodeURIComponent(params.id.toString())}`
|
||||
let method = ''
|
||||
let path = ''
|
||||
if (params.id != null) {
|
||||
method = 'GET'
|
||||
path = `/_logstash/pipeline/${encodeURIComponent(params.id.toString())}`
|
||||
} else {
|
||||
method = 'GET'
|
||||
path = '/_logstash/pipeline'
|
||||
}
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
|
||||
@ -73,10 +73,10 @@ export default class Migration {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.MigrationGetFeatureUpgradeStatusRequest | TB.MigrationGetFeatureUpgradeStatusRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.MigrationGetFeatureUpgradeStatusResponse>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.MigrationGetFeatureUpgradeStatusRequest | TB.MigrationGetFeatureUpgradeStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MigrationGetFeatureUpgradeStatusResponse, unknown>>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.MigrationGetFeatureUpgradeStatusRequest | TB.MigrationGetFeatureUpgradeStatusRequest, options?: TransportRequestOptions): Promise<T.MigrationGetFeatureUpgradeStatusResponse>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.MigrationGetFeatureUpgradeStatusRequest | TB.MigrationGetFeatureUpgradeStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = []
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
@ -86,6 +86,7 @@ export default class Migration {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
@ -95,10 +96,10 @@ export default class Migration {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async postFeatureUpgrade (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async postFeatureUpgrade (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async postFeatureUpgrade (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async postFeatureUpgrade (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async postFeatureUpgrade (this: That, params?: T.MigrationPostFeatureUpgradeRequest | TB.MigrationPostFeatureUpgradeRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.MigrationPostFeatureUpgradeResponse>
|
||||
async postFeatureUpgrade (this: That, params?: T.MigrationPostFeatureUpgradeRequest | TB.MigrationPostFeatureUpgradeRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MigrationPostFeatureUpgradeResponse, unknown>>
|
||||
async postFeatureUpgrade (this: That, params?: T.MigrationPostFeatureUpgradeRequest | TB.MigrationPostFeatureUpgradeRequest, options?: TransportRequestOptions): Promise<T.MigrationPostFeatureUpgradeResponse>
|
||||
async postFeatureUpgrade (this: That, params?: T.MigrationPostFeatureUpgradeRequest | TB.MigrationPostFeatureUpgradeRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = []
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
@ -108,6 +109,7 @@ export default class Migration {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user