Compare commits
23 Commits
backport-2
...
v8.7.3
| Author | SHA1 | Date | |
|---|---|---|---|
| f2d2cfc6dd | |||
| 5835505e21 | |||
| 919094a77b | |||
| 684bc29927 | |||
| 7f45fff571 | |||
| 36e4d8aa8d | |||
| 49159dba33 | |||
| d7157bfe87 | |||
| 10bdb39a68 | |||
| ab8ff69eac | |||
| fca53568d2 | |||
| d0f0b376c1 | |||
| 07b4c9966c | |||
| a2b68958c3 | |||
| b750c61686 | |||
| ccf98d126b | |||
| 49eaea0f69 | |||
| c562a730d0 | |||
| 56879a0642 | |||
| ab72356061 | |||
| 71f85b94cd | |||
| b34188ceac | |||
| db6d7bd2cc |
14
.buildkite/Dockerfile
Normal file
14
.buildkite/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
ARG NODE_VERSION=${NODE_VERSION:-18}
|
||||
FROM node:$NODE_VERSION
|
||||
|
||||
# Install required tools
|
||||
RUN apt-get clean -y && \
|
||||
apt-get -qy update && \
|
||||
apt-get -y install zip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY . .
|
||||
RUN npm install --production=false
|
||||
21
.buildkite/certs/testnode.crt
Executable file
21
.buildkite/certs/testnode.crt
Executable file
@ -0,0 +1,21 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDYjCCAkqgAwIBAgIVAIClHav09e9XGWJrnshywAjUHTnXMA0GCSqGSIb3DQEB
|
||||
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
|
||||
ZXJhdGVkIENBMB4XDTIzMDMyODE3MDIzOVoXDTI2MDMyNzE3MDIzOVowEzERMA8G
|
||||
A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCV
|
||||
+t5/g6u2r3awCtzqp17KG0hRxzkVoJoF8DYzVh+Rv9ymxQW0C/U8dQihAjkZHaIA
|
||||
n49lSyNLkwWtmqQgPcimV4d6XuTYx2ahDixXYtjmoOSwH5dRtovKPCNKDPkUj9Vq
|
||||
NwMW0uB1VxniMKI4DnYFqBgHL9kQKhQqvas6Gx0X6ptGRCLYCtVxeFcau6nnkZJt
|
||||
urb+HNV5waOh0uTmsqnnslK3NjCQ/f030vPKxM5fOqOU5ajUHpZFJ6ZFmS32074H
|
||||
l+mZoRT/GtbnVtIg+CJXsWThF3/L4iBImv+rkY9MKX5fyMLJgmIJG68S90IQGR8c
|
||||
Z2lZYzC0J7zjMsYlODbDAgMBAAGjgYswgYgwHQYDVR0OBBYEFIDIcECn3AVHc3jk
|
||||
MpQ4r7Kc3WCsMB8GA1UdIwQYMBaAFJYCWKn16g+acbing4Vl45QGUBs0MDsGA1Ud
|
||||
EQQ0MDKCCWxvY2FsaG9zdIIIaW5zdGFuY2WHBH8AAAGHEAAAAAAAAAAAAAAAAAAA
|
||||
AAGCA2VzMTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBCwUAA4IBAQBtX3RQ5ATpfORM
|
||||
lrnhaUPGOWkjnb3p3BrdAWUaWoh136QhaXqxKiALQQhTtTerkXOcuquy9MmAyYvS
|
||||
9fDdGvLCAO8pPCXjnzonCHerCLGdS7f/eqvSFWCdy7LPHzTAFYfVWVvbZed+83TL
|
||||
bDY63AMwIexj34vJEStMapuFwWx05fstE8qZWIbYCL87sF5H/MRhzlz3ScAhQ1N7
|
||||
tODH7zvLzSxFGGEzCIKZ0iPFKbd3Y0wE6SptDSKhOqlnC8kkNeI2GjWsqVfHKsoF
|
||||
pDFmri7IfOucuvalXJ6xiHPr9RDbuxEXs0u8mteT5nFQo7EaEGdHpg1pNGbfBOzP
|
||||
lmj/dRS9
|
||||
-----END CERTIFICATE-----
|
||||
27
.buildkite/certs/testnode.key
Executable file
27
.buildkite/certs/testnode.key
Executable file
@ -0,0 +1,27 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEowIBAAKCAQEAlfref4Ortq92sArc6qdeyhtIUcc5FaCaBfA2M1Yfkb/cpsUF
|
||||
tAv1PHUIoQI5GR2iAJ+PZUsjS5MFrZqkID3IpleHel7k2MdmoQ4sV2LY5qDksB+X
|
||||
UbaLyjwjSgz5FI/VajcDFtLgdVcZ4jCiOA52BagYBy/ZECoUKr2rOhsdF+qbRkQi
|
||||
2ArVcXhXGrup55GSbbq2/hzVecGjodLk5rKp57JStzYwkP39N9LzysTOXzqjlOWo
|
||||
1B6WRSemRZkt9tO+B5fpmaEU/xrW51bSIPgiV7Fk4Rd/y+IgSJr/q5GPTCl+X8jC
|
||||
yYJiCRuvEvdCEBkfHGdpWWMwtCe84zLGJTg2wwIDAQABAoIBAAEP7HYNNnDWdYMD
|
||||
+WAtYM12X/W5s/wUP94juaBI4u4iZH2EZodlixEdZUCTXgq43WsDUhxX05s7cE+p
|
||||
H5DuSCHtoo2WHvGKAposwRDm2f3YVWQ2Xyb2ahNt69LYHHWrO+XQ60YYTa3r8Gn3
|
||||
7dFR3I016/jyn5DeEVaglvS1dfj2UG4ybR4KkMfcKd94X0rKvz3wzAhHIh+hwMtv
|
||||
sVk7V4vSnKf2mJXwIVECTolnEJEkCjWjjymgUJYKT8yN7JnAsHRcvMa6kWwIGrLp
|
||||
oQCEaJwYM6ynCRS989pLt3vA2iu5VkYhiHXJ9Ds/5b5yzhzmj+ymzKbFKrrUUrmn
|
||||
+2Jp1K0CgYEAw8BchALsD/+JuoXjinA14MH7PZjIsXyhtPk+c4pk42iMNyg1J8XF
|
||||
Y/ITepLYsl2bZqQI1jOJdDqsTwIsva9r749lsmkYI3VOxhi7+qBK0sThR66C87lX
|
||||
iU2QpnZ9NloC6ort4a3MEvZ/gRQcXdBrNlNoza2p7PHAVDTnsdSrNKUCgYEAxCQV
|
||||
uo85oZyfnMufn/gcI9IeYOgiB0tO3a8cAFX2wQW1y935t6Z13ApUQc4EnCOH7ZBc
|
||||
td5kT+xGdRWnfPZ38FM1dd5MBdGE69s3q8pJDUExSgNLqaF6/5bD32qui66L3ugu
|
||||
eMjxrzqJsc2uQTPCs18SGsyRmf54DpY8HglOmUcCgYAGRDgx+a347SNJl1OrcOAo
|
||||
q80RMbzrAaRjmL8JD9se9I/YjC73cPtasbsx51WMkDaTWJj30nqJ//7YIKeyAtWf
|
||||
u6Vzyq19JRo6eTw7T7pVePwFQW7rwnks6hDBY3WqscL6IyxuVxP7X2zBgxVNY4ir
|
||||
Gox2WSLhdPPFPlRUewxoCQKBgAJvqE1u5fpZ5ame5dao0ECppXLyrymEB/C88g4X
|
||||
Az+WgJGNqkJbsO8QuccvdeMylcefmWcw4fIULzPZFwF4VjkH74wNPMh9t7buPBzI
|
||||
IGwnuSMAM3ph5RMzni8yNgTKIDaej6U0abwRcBBjS5zHtc1giusGS3CsNnWH7Cs7
|
||||
VlyVAoGBAK+prq9t9x3tC3NfCZH8/Wfs/X0T1qm11RiL5+tOhmbguWAqSSBy8OjX
|
||||
Yh8AOXrFuMGldcaTXxMeiKvI2cyybnls1MFsPoeV/fSMJbex7whdeJeTi66NOSKr
|
||||
oftUHvkHS0Vv/LicMEOufFGslb4T9aPJ7oyhoSlz9CfAutDWk/q/
|
||||
-----END RSA PRIVATE KEY-----
|
||||
0
.ci/functions/cleanup.sh → .buildkite/functions/cleanup.sh
Normal file → Executable file
0
.ci/functions/cleanup.sh → .buildkite/functions/cleanup.sh
Normal file → Executable file
0
.ci/functions/imports.sh → .buildkite/functions/imports.sh
Normal file → Executable file
0
.ci/functions/imports.sh → .buildkite/functions/imports.sh
Normal file → Executable file
0
.ci/functions/wait-for-container.sh → .buildkite/functions/wait-for-container.sh
Normal file → Executable file
0
.ci/functions/wait-for-container.sh → .buildkite/functions/wait-for-container.sh
Normal file → Executable file
31
.buildkite/pipeline.yml
Normal file
31
.buildkite/pipeline.yml
Normal file
@ -0,0 +1,31 @@
|
||||
steps:
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
env:
|
||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||
TEST_SUITE: "{{ matrix.suite }}"
|
||||
STACK_VERSION: 8.8.0-SNAPSHOT
|
||||
matrix:
|
||||
setup:
|
||||
suite:
|
||||
- "free"
|
||||
- "platinum"
|
||||
nodejs:
|
||||
- "14"
|
||||
- "16"
|
||||
- "18"
|
||||
- "20"
|
||||
command: ./.buildkite/run-tests.sh
|
||||
artifact_paths: "./junit-output/junit-*.xml"
|
||||
- wait: ~
|
||||
continue_on_failure: true
|
||||
- label: ":junit: Test results"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
plugins:
|
||||
- junit-annotate#v2.4.1:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
job-uuid-file-pattern: 'junit-(.*).xml'
|
||||
fail-build-on-error: true
|
||||
31
.buildkite/run-client.sh
Executable file
31
.buildkite/run-client.sh
Executable file
@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Once called Elasticsearch should be up and running
|
||||
#
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
set -euo pipefail
|
||||
repo=$(pwd)
|
||||
|
||||
export NODE_VERSION=${NODE_VERSION:-18}
|
||||
|
||||
echo "--- :javascript: Building Docker image"
|
||||
docker build \
|
||||
--file "$script_path/Dockerfile" \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_VERSION="$NODE_VERSION" \
|
||||
.
|
||||
|
||||
echo "--- :javascript: Running $TEST_SUITE tests"
|
||||
mkdir -p "$repo/junit-output"
|
||||
docker run \
|
||||
--network="${network_name}" \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "ELASTIC_PASSWORD=${elastic_password}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--env "ELASTIC_USER=elastic" \
|
||||
--env "BUILDKITE=true" \
|
||||
--volume "$repo/junit-output:/junit-output" \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
@ -26,12 +26,15 @@ script_path=$(dirname $(realpath -s $0))
|
||||
source $script_path/functions/imports.sh
|
||||
set -euo pipefail
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on seperate terminals \033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on separate terminals \033[0m"
|
||||
cleanup_node $es_node_name
|
||||
|
||||
master_node_name=${es_node_name}
|
||||
cluster_name=${moniker}${suffix}
|
||||
|
||||
# Set vm.max_map_count kernel setting to 262144
|
||||
sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
declare -a volumes
|
||||
environment=($(cat <<-END
|
||||
--env ELASTIC_PASSWORD=$elastic_password
|
||||
@ -73,6 +76,7 @@ END
|
||||
))
|
||||
else
|
||||
environment+=($(cat <<-END
|
||||
--env node.roles=data,data_cold,data_content,data_frozen,data_hot,data_warm,ingest,master,ml,remote_cluster_client,transform
|
||||
--env xpack.security.enabled=false
|
||||
--env xpack.security.http.ssl.enabled=false
|
||||
END
|
||||
@ -84,6 +88,13 @@ if [[ "$TEST_SUITE" == "platinum" ]]; then
|
||||
cert_validation_flags="--insecure --cacert /usr/share/elasticsearch/config/certs/ca.crt --resolve ${es_node_name}:443:127.0.0.1"
|
||||
fi
|
||||
|
||||
echo "--- :elasticsearch: Environment setup"
|
||||
echo "TEST_SUITE: $TEST_SUITE"
|
||||
echo "Elasticsearch URL: $elasticsearch_url"
|
||||
echo "Elasticsearch External URL: $external_elasticsearch_url"
|
||||
|
||||
|
||||
echo "--- :elasticsearch: Running container"
|
||||
# Pull the container, retry on failures up to 5 times with
|
||||
# short delays between each attempt. Fixes most transient network errors.
|
||||
docker_pull_attempts=0
|
||||
@ -138,6 +149,4 @@ END
|
||||
if wait_for_container "$es_node_name" "$network_name"; then
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Running on: $node_url\033[0m"
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
16
.buildkite/run-tests.sh
Executable file
16
.buildkite/run-tests.sh
Executable file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Script to run Elasticsearch container and Elasticsearch client integration tests on Buildkite
|
||||
#
|
||||
# Version 0.1
|
||||
#
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
source "$script_path/functions/imports.sh"
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "--- :elasticsearch: Starting Elasticsearch"
|
||||
DETACH=true bash "$script_path/run-elasticsearch.sh"
|
||||
|
||||
echo "+++ :javascript: Run Client"
|
||||
bash "$script_path/run-client.sh"
|
||||
@ -1,4 +1,4 @@
|
||||
ARG NODE_JS_VERSION=16
|
||||
ARG NODE_JS_VERSION=18
|
||||
FROM node:${NODE_JS_VERSION}
|
||||
|
||||
# Create app directory
|
||||
|
||||
@ -1,19 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDIzCCAgugAwIBAgIVAMTO6uVx9dLox2t0lY4IcBKZXb5WMA0GCSqGSIb3DQEB
|
||||
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
|
||||
ZXJhdGVkIENBMB4XDTIwMDIyNjA1NTA1OVoXDTIzMDIyNTA1NTA1OVowEzERMA8G
|
||||
A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDK
|
||||
YLTOikVENiN/qYupOsoXd7VYYnryyfCC/dK4FC2aozkbqjFzBdvPGAasoc4yEiH5
|
||||
CGeXMgJuOjk1maqetmdIsw00j4oHJviYsnGXzxxS5swhD7spcW4Uk4V4tAUzrbfT
|
||||
vW/2WW/yYCLe5phVb2chz0jL+WYb4bBmdfs/t6RtP9RqsplYAmVp3gZ6lt2YNtvE
|
||||
k9gz0TVk3DuO1TquIClfRYUjuywS6xDSvxJ8Jl91EfDWM8QU+9F+YAtiv74xl2U3
|
||||
P0wwMqNvMxf9/3ak3lTQGsgO4L6cwbKpVLMMzxSVunZz/sgl19xy3qHHz1Qr2MjJ
|
||||
/2c2J7vahUL4NPRkjJClAgMBAAGjTTBLMB0GA1UdDgQWBBS2Wn8E2VZv4oenY+pR
|
||||
O8G3zfQXhzAfBgNVHSMEGDAWgBSWAlip9eoPmnG4p4OFZeOUBlAbNDAJBgNVHRME
|
||||
AjAAMA0GCSqGSIb3DQEBCwUAA4IBAQAvwPvCiJJ6v9jYcyvYY8I3gP0oCwrylpRL
|
||||
n91UlgRSHUmuAObyOoVN5518gSV/bTU2SDrstcLkLFxHvnfpoGJoxsQEHuGxwDRI
|
||||
nhYNd62EKLerehNM/F9ILKmvTh8f6QPCzjUuExTXv+63l2Sr6dBS7FHsGs6UKUYO
|
||||
llM/y9wMZ1LCuZuBg9RhtgpFXRSgDM9Z7Begu0d/BPX9od/qAeZg9Arz4rwUiCN4
|
||||
IJOMEBEPi5q1tgeS0Fb1Grpqd0Uz5tZKtEHNKzLG+zSMmkneL62Nk2HsmEFZKwzg
|
||||
u2pU42UaUE596G6o78s1aLn9ICcElPHTjiuZNSiyuu9IzvFDjGQw
|
||||
-----END CERTIFICATE-----
|
||||
@ -1,27 +0,0 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEogIBAAKCAQEAymC0zopFRDYjf6mLqTrKF3e1WGJ68snwgv3SuBQtmqM5G6ox
|
||||
cwXbzxgGrKHOMhIh+QhnlzICbjo5NZmqnrZnSLMNNI+KByb4mLJxl88cUubMIQ+7
|
||||
KXFuFJOFeLQFM623071v9llv8mAi3uaYVW9nIc9Iy/lmG+GwZnX7P7ekbT/UarKZ
|
||||
WAJlad4GepbdmDbbxJPYM9E1ZNw7jtU6riApX0WFI7ssEusQ0r8SfCZfdRHw1jPE
|
||||
FPvRfmALYr++MZdlNz9MMDKjbzMX/f92pN5U0BrIDuC+nMGyqVSzDM8Ulbp2c/7I
|
||||
Jdfcct6hx89UK9jIyf9nNie72oVC+DT0ZIyQpQIDAQABAoIBADAh7f7NjgnaInlD
|
||||
ds8KB3SraPsbeQhzlPtiqRJU4j/MIFH/GYG03AGWQkget67a9y+GmzSvlTpoKKEh
|
||||
6h2TXl9BDpv4o6ht0WRn1HJ5tM/Wyqf2WNpTew3zxCPgFPikkXsPrChYPzLTQJfp
|
||||
GkP/mfTFmxfAOlPZSp4j41zVLYs53eDkAegFPVfKSr1XNNJ3QODLPcIBfxBYsiC9
|
||||
oU+jRW8xYuj31cEl5k5UqrChJ1rm3mt6cguqXKbISuoSvi13gXI6DccqhuLAU+Kr
|
||||
ib2XYrRP+pWocZo/pM9WUVoNGtFxfY88sAQtvG6gDKo2AURtFyq84Ow0h9mdixV/
|
||||
gRIDPcECgYEA5nEqE3OKuG9WuUFGXvjtn4C0F6JjflYWh7AbX51S4F6LKrW6/XHL
|
||||
Rg4BtF+XReT7OQ6llsV8kZeUxsUckkgDLzSaA8lysNDV5KkhAWHfRqH//QKFbqZi
|
||||
JL9t3x63Qt81US8s2hQk3khPYTRM8ZB3xHiXvZYSGC/0x/DxfEO3QJECgYEA4NK5
|
||||
sxtrat8sFz6SK9nWEKimPjDVzxJ0hxdX4tRq/JdOO5RncawVqt6TNP9gTuxfBvhW
|
||||
MhJYEsQj8iUoL1dxo9d1eP8HEANNV0iX5OBvJNmgBp+2OyRSyr+PA55+wAxYuAE7
|
||||
QKaitOjW57fpArNRt2hQyiSzTuqUFRWTWJHCWNUCgYAEurPTXF6vdFGCUc2g61jt
|
||||
GhYYGhQSpq+lrz6Qksj9o9MVWE9zHh++21C7o+6V16I0RJGva3QoBMVf4vG4KtQt
|
||||
5tV2WG8LI+4P2Ey+G4UajP6U8bVNVQrUmD0oBBhcvfn5JY+1Fg6/pRpD82/U0VMz
|
||||
7AmpMWhDqNBMPiymkTk0kQKBgCuWb05cSI0ly4SOKwS5bRk5uVFhYnKNH255hh6C
|
||||
FGP4acB/WzbcqC7CjEPAJ0nl5d6SExQOHmk1AcsWjR3wlCWxxiK5PwNJwJrlhh1n
|
||||
reS1FKN0H36D4lFQpkeLWQOe4Sx7gKNeKzlr0w6Fx3Uwku0+Gju2tdTdAey8jB6l
|
||||
08opAoGAEe1AuR/OFp2xw6V8TH9UHkkpGxy+OrXI6PX6tgk29PgB+uiMu4RwbjVz
|
||||
1di1KKq2XecAilVbnyqY+edADxYGbSnci9x5wQRIebfMi3VXKtV8NQBv2as6qwtW
|
||||
JDcQUWotOHjpdvmfJWWkcBhbAKrgX8ukww00ZI/lC3/rmkGnBBg=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
@ -1,7 +0,0 @@
|
||||
ARG NODE_JS_VERSION=10
|
||||
FROM node:${NODE_JS_VERSION}-alpine
|
||||
|
||||
RUN apk --no-cache add git
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
@ -1,81 +0,0 @@
|
||||
---
|
||||
|
||||
##### GLOBAL METADATA
|
||||
|
||||
- meta:
|
||||
cluster: clients-ci
|
||||
|
||||
##### JOB DEFAULTS
|
||||
|
||||
- job:
|
||||
project-type: matrix
|
||||
logrotate:
|
||||
daysToKeep: 30
|
||||
numToKeep: 100
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/main
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
properties:
|
||||
- github:
|
||||
url: https://github.com/elastic/elasticsearch-js/
|
||||
- inject:
|
||||
properties-content: HOME=$JENKINS_HOME
|
||||
concurrent: true
|
||||
node: flyweight
|
||||
scm:
|
||||
- git:
|
||||
name: origin
|
||||
credentials-id: f6c7695a-671e-4f4f-a331-acdce44ff9ba
|
||||
reference-repo: /var/lib/jenkins/.git-references/elasticsearch-js.git
|
||||
branches:
|
||||
- ${branch_specifier}
|
||||
url: https://github.com/elastic/elasticsearch-js.git
|
||||
basedir: ''
|
||||
wipe-workspace: 'True'
|
||||
triggers:
|
||||
- github
|
||||
vault:
|
||||
# vault read auth/approle/role/clients-ci/role-id
|
||||
role_id: ddbd0d44-0e51-105b-177a-c8fdfd445126
|
||||
axes:
|
||||
- axis:
|
||||
type: slave
|
||||
name: label
|
||||
values:
|
||||
- linux
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: STACK_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: NODE_JS_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: TEST_SUITE
|
||||
yaml-strategy:
|
||||
exclude-key: exclude
|
||||
filename: .ci/test-matrix.yml
|
||||
wrappers:
|
||||
- ansicolor
|
||||
- timeout:
|
||||
type: absolute
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
- workspace-cleanup
|
||||
builders:
|
||||
- shell: |-
|
||||
#!/usr/local/bin/runbld
|
||||
.ci/run-tests
|
||||
publishers:
|
||||
- email:
|
||||
recipients: build-lang-clients@elastic.co
|
||||
- junit:
|
||||
results: "**/*-junit.xml"
|
||||
allow-empty-results: true
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+7.17
|
||||
display-name: 'elastic / elasticsearch-js # 7.17'
|
||||
description: Testing the elasticsearch-js 7.17 branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/7.17
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+8.1
|
||||
display-name: 'elastic / elasticsearch-js # 8.1'
|
||||
description: Testing the elasticsearch-js 8.1 branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/8.1
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+8.2
|
||||
display-name: 'elastic / elasticsearch-js # 8.2'
|
||||
description: Testing the elasticsearch-js 8.2 branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/8.2
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,15 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+main
|
||||
display-name: 'elastic / elasticsearch-js # main'
|
||||
description: Testing the elasticsearch-js main branch.
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/main
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: 'H */12 * * *'
|
||||
@ -1,19 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+pull-request
|
||||
display-name: 'elastic / elasticsearch-js # pull-request'
|
||||
description: Testing of elasticsearch-js pull requests.
|
||||
junit_results: "*-junit.xml"
|
||||
scm:
|
||||
- git:
|
||||
branches:
|
||||
- ${ghprbActualCommit}
|
||||
refspec: +refs/pull/*:refs/remotes/origin/pr/*
|
||||
triggers:
|
||||
- github-pull-request:
|
||||
org-list:
|
||||
- elastic
|
||||
allow-whitelist-orgs-as-admins: true
|
||||
github-hooks: true
|
||||
status-context: clients-ci
|
||||
cancel-builds-on-update: true
|
||||
16
.ci/make.mjs
16
.ci/make.mjs
@ -86,31 +86,33 @@ async function bump (args) {
|
||||
'utf8'
|
||||
)
|
||||
|
||||
const testMatrix = await readFile(join(import.meta.url, 'test-matrix.yml'), 'utf8')
|
||||
const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'))
|
||||
await writeFile(
|
||||
join(import.meta.url, 'test-matrix.yml'),
|
||||
testMatrix.replace(/STACK_VERSION:\s+\- "[0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?"/, `STACK_VERSION:\n - "${cleanVersion}-SNAPSHOT"`), // eslint-disable-line
|
||||
join(import.meta.url, '..', '.buildkite', 'pipeline.yml'),
|
||||
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: - ${cleanVersion}-SNAPSHOT`), // eslint-disable-line
|
||||
'utf8'
|
||||
)
|
||||
}
|
||||
|
||||
// this command can only be executed locally for now
|
||||
async function codegen (args) {
|
||||
assert(args.length === 1, 'Bump task expects one parameter')
|
||||
const clientGeneratorPath = join(import.meta.url, '..', '..', 'elastic-client-generator-js')
|
||||
assert(args.length === 1, 'Codegen task expects one parameter')
|
||||
const [version] = args
|
||||
|
||||
const clientGeneratorPath = join(import.meta.url, '..', '..', 'elastic-client-generator-js')
|
||||
const isGeneratorCloned = await $`[[ -d ${clientGeneratorPath} ]]`.exitCode === 0
|
||||
assert(isGeneratorCloned, 'You must clone the elastic-client-generator-js first')
|
||||
|
||||
await $`npm install --prefix ${clientGeneratorPath}`
|
||||
// this command will take a while!
|
||||
|
||||
// generate elasticsearch client. this command will take a while!
|
||||
if (version === 'main') {
|
||||
await $`npm run elasticsearch --prefix ${clientGeneratorPath} -- --version main`
|
||||
} else {
|
||||
await $`npm run elasticsearch --prefix ${clientGeneratorPath} -- --version ${version.split('.').slice(0, 2).join('.')}`
|
||||
}
|
||||
await $`npm run lint --prefix ${clientGeneratorPath}`
|
||||
// clean up fixable linter issues
|
||||
await $`npm run fix --prefix ${clientGeneratorPath}`
|
||||
|
||||
await $`rm -rf ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mkdir ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
|
||||
59
.ci/make.sh
59
.ci/make.sh
@ -1,9 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
#
|
||||
# Skeleton for common build entry script for all elastic
|
||||
# clients. Needs to be adapted to individual client usage.
|
||||
# Build entry script for elasticsearch-js
|
||||
#
|
||||
# Must be called: ./.ci/make.sh <target> <params>
|
||||
#
|
||||
@ -11,19 +9,19 @@
|
||||
#
|
||||
# Targets:
|
||||
# ---------------------------
|
||||
# assemble <VERSION> : build client artefacts with version
|
||||
# bump <VERSION> : bump client internals to version
|
||||
# codegen <VERSION> : generate endpoints
|
||||
# docsgen <VERSION> : generate documentation
|
||||
# examplegen : generate the doc examples
|
||||
# clean : clean workspace
|
||||
# assemble <VERSION> : build client artifacts with version
|
||||
# bump <VERSION> : bump client internals to version
|
||||
# bumpmatrix <VERSION> : bump stack version in test matrix to version
|
||||
# codegen : generate endpoints
|
||||
# docsgen <VERSION> : generate documentation
|
||||
# examplegen : generate the doc examples
|
||||
# clean : clean workspace
|
||||
#
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Bootstrap
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
repo=$(realpath "$script_path/../")
|
||||
generator=$(realpath "$script_path/../../elastic-client-generator-js")
|
||||
@ -34,24 +32,21 @@ TASK=$1
|
||||
TASK_ARGS=()
|
||||
VERSION=$2
|
||||
STACK_VERSION=$VERSION
|
||||
NODE_JS_VERSION=16
|
||||
WORKFLOW=${WORKFLOW-staging}
|
||||
set -euo pipefail
|
||||
|
||||
product="elastic/elasticsearch-js"
|
||||
output_folder=".ci/output"
|
||||
codegen_folder=".ci/output"
|
||||
OUTPUT_DIR="$repo/${output_folder}"
|
||||
REPO_BINDING="${OUTPUT_DIR}:/sln/${output_folder}"
|
||||
# REPO_BINDING="${OUTPUT_DIR}:/sln/${output_folder}"
|
||||
NODE_JS_VERSION=18
|
||||
WORKFLOW=${WORKFLOW-staging}
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Parse Command
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
case $CMD in
|
||||
clean)
|
||||
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
|
||||
@ -104,8 +99,21 @@ case $CMD in
|
||||
# VERSION is BRANCH here for now
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
bumpmatrix)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
|
||||
TASK=bumpmatrix
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
*)
|
||||
echo -e "\nUsage:\n\t $CMD is not supported right now\n"
|
||||
echo -e "\n'$CMD' is not supported right now\n"
|
||||
echo -e "\nUsage:"
|
||||
echo -e "\t $0 release \$VERSION\n"
|
||||
echo -e "\t $0 bump \$VERSION"
|
||||
echo -e "\t $0 codegen \$VERSION"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
@ -118,10 +126,8 @@ echo -e "\033[34;1mINFO: building $product container\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag ${product} \
|
||||
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
|
||||
--build-arg USER_ID="$(id -u)" \
|
||||
--build-arg GROUP_ID="$(id -g)" \
|
||||
--tag "$product" \
|
||||
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
|
||||
.
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
@ -131,14 +137,15 @@ docker build \
|
||||
echo -e "\033[34;1mINFO: running $product container\033[0m"
|
||||
|
||||
docker run \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume $generator:/usr/src/elastic-client-generator-js \
|
||||
--volume "$repo:/usr/src/app" \
|
||||
--volume "$generator:/usr/src/elastic-client-generator-js" \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--env "WORKFLOW=${WORKFLOW}" \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}
|
||||
node .ci/make.mjs --task $TASK "${TASK_ARGS[@]}"
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Post Command tasks & checks
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /usr/local/bin/bash_standard_lib.sh
|
||||
|
||||
DOCKER_IMAGES="node:17-alpine
|
||||
node:16-alpine
|
||||
node:14-alpine
|
||||
"
|
||||
|
||||
for di in ${DOCKER_IMAGES}
|
||||
do
|
||||
(retry 2 docker pull "${di}") || echo "Error pulling ${di} Docker image, we continue"
|
||||
done
|
||||
|
||||
@ -1,43 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# parameters are available to this script
|
||||
|
||||
# STACK_VERSION -- version e.g Major.Minor.Patch(-Prelease)
|
||||
# TEST_SUITE -- which test suite to run: free or platinum
|
||||
# ELASTICSEARCH_URL -- The url at which elasticsearch is reachable, a default is composed based on STACK_VERSION and TEST_SUITE
|
||||
# NODE_JS_VERSION -- node js version (defined in test-matrix.yml, a default is hardcoded here)
|
||||
script_path=$(dirname $(realpath -s $0))
|
||||
source $script_path/functions/imports.sh
|
||||
set -euo pipefail
|
||||
|
||||
NODE_JS_VERSION=${NODE_JS_VERSION-16}
|
||||
ELASTICSEARCH_URL=${ELASTICSEARCH_URL-"$elasticsearch_url"}
|
||||
elasticsearch_container=${elasticsearch_container-}
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m TEST_SUITE ${TEST_SUITE}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m URL ${ELASTICSEARCH_URL}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m CONTAINER ${elasticsearch_container}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m NODE_JS_VERSION ${NODE_JS_VERSION}\033[0m"
|
||||
|
||||
echo -e "\033[1m>>>>> Build docker container >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
|
||||
.
|
||||
|
||||
echo -e "\033[1m>>>>> NPM run test:integration >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
repo=$(realpath $(dirname $(realpath -s $0))/../)
|
||||
|
||||
docker run \
|
||||
--network=${network_name} \
|
||||
--env "TEST_ES_SERVER=${ELASTICSEARCH_URL}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run test:integration
|
||||
@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Version 1.1
|
||||
# - Moved to .ci folder and seperated out `run-repository.sh`
|
||||
# - Add `$RUNSCRIPTS` env var for running Elasticsearch dependent products
|
||||
script_path=$(dirname $(realpath -s $0))
|
||||
source $script_path/functions/imports.sh
|
||||
set -euo pipefail
|
||||
|
||||
echo -e "\033[1m>>>>> Start [$STACK_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
DETACH=true bash .ci/run-elasticsearch.sh
|
||||
|
||||
if [[ -n "$RUNSCRIPTS" ]]; then
|
||||
for RUNSCRIPT in ${RUNSCRIPTS//,/ } ; do
|
||||
echo -e "\033[1m>>>>> Running run-$RUNSCRIPT.sh >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
CONTAINER_NAME=${RUNSCRIPT} \
|
||||
DETACH=true \
|
||||
bash .ci/run-${RUNSCRIPT}.sh
|
||||
done
|
||||
fi
|
||||
|
||||
echo -e "\033[1m>>>>> Repository specific tests >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
bash .ci/run-repository.sh
|
||||
@ -1,14 +0,0 @@
|
||||
---
|
||||
STACK_VERSION:
|
||||
- "8.7.0-SNAPSHOT"
|
||||
|
||||
NODE_JS_VERSION:
|
||||
- 18
|
||||
- 16
|
||||
- 14
|
||||
|
||||
TEST_SUITE:
|
||||
- free
|
||||
- platinum
|
||||
|
||||
exclude: ~
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@ -11,6 +11,6 @@ jobs:
|
||||
name: Backport
|
||||
steps:
|
||||
- name: Backport
|
||||
uses: tibdex/backport@v1
|
||||
uses: tibdex/backport@v2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
142
.github/workflows/nodejs.yml
vendored
142
.github/workflows/nodejs.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [14.x, 16.x, 18.x]
|
||||
node-version: [14.x, 16.x, 18.x, 20.x]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
@ -32,151 +32,13 @@ jobs:
|
||||
run: |
|
||||
npm run test:unit
|
||||
|
||||
# - name: Acceptance test
|
||||
# run: |
|
||||
# npm run test:acceptance
|
||||
|
||||
# helpers-integration-test:
|
||||
# name: Helpers integration test
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# strategy:
|
||||
# matrix:
|
||||
# node-version: [12.x, 14.x, 16.x]
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Configure sysctl limits
|
||||
# run: |
|
||||
# sudo swapoff -a
|
||||
# sudo sysctl -w vm.swappiness=1
|
||||
# sudo sysctl -w fs.file-max=262144
|
||||
# sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
# - name: Runs Elasticsearch
|
||||
# uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
# with:
|
||||
# stack-version: 8.0.0-SNAPSHOT
|
||||
|
||||
# - name: Use Node.js ${{ matrix.node-version }}
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: ${{ matrix.node-version }}
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
|
||||
# - name: Integration test
|
||||
# run: |
|
||||
# npm run test:integration:helpers
|
||||
|
||||
# bundler-support:
|
||||
# name: Bundler support
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Configure sysctl limits
|
||||
# run: |
|
||||
# sudo swapoff -a
|
||||
# sudo sysctl -w vm.swappiness=1
|
||||
# sudo sysctl -w fs.file-max=262144
|
||||
# sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
# - name: Runs Elasticsearch
|
||||
# uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
# with:
|
||||
# stack-version: 8.0.0-SNAPSHOT
|
||||
|
||||
# - name: Use Node.js 14.x
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 14.x
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
# npm install --prefix test/bundlers/parcel-test
|
||||
# npm install --prefix test/bundlers/rollup-test
|
||||
# npm install --prefix test/bundlers/webpack-test
|
||||
|
||||
# - name: Build
|
||||
# run: |
|
||||
# npm run build --prefix test/bundlers/parcel-test
|
||||
# npm run build --prefix test/bundlers/rollup-test
|
||||
# npm run build --prefix test/bundlers/webpack-test
|
||||
|
||||
# - name: Run bundle
|
||||
# run: |
|
||||
# npm start --prefix test/bundlers/parcel-test
|
||||
# npm start --prefix test/bundlers/rollup-test
|
||||
# npm start --prefix test/bundlers/webpack-test
|
||||
|
||||
# mock-support:
|
||||
# name: Mock support
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Use Node.js 14.x
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 14.x
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
# npm install --prefix test/mock
|
||||
|
||||
# - name: Run test
|
||||
# run: |
|
||||
# npm test --prefix test/mock
|
||||
|
||||
# code-coverage:
|
||||
# name: Code coverage
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# strategy:
|
||||
# matrix:
|
||||
# node-version: [14.x]
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
|
||||
# - name: Use Node.js ${{ matrix.node-version }}
|
||||
# uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: ${{ matrix.node-version }}
|
||||
|
||||
# - name: Install
|
||||
# run: |
|
||||
# npm install
|
||||
|
||||
# - name: Code coverage report
|
||||
# run: |
|
||||
# npm run test:coverage-report
|
||||
|
||||
# - name: Upload coverage to Codecov
|
||||
# uses: codecov/codecov-action@v1
|
||||
# with:
|
||||
# file: ./coverage.lcov
|
||||
# fail_ci_if_error: true
|
||||
|
||||
# - name: Code coverage 100%
|
||||
# run: |
|
||||
# npm run test:coverage-100
|
||||
|
||||
license:
|
||||
name: License check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [16.x]
|
||||
node-version: [20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
11
Makefile
Normal file
11
Makefile
Normal file
@ -0,0 +1,11 @@
|
||||
.PHONY: integration-setup
|
||||
integration-setup: integration-cleanup
|
||||
DETACH=true .ci/run-elasticsearch.sh
|
||||
|
||||
.PHONY: integration-cleanup
|
||||
integration-cleanup:
|
||||
docker container rm --force --volumes instance || true
|
||||
|
||||
.PHONY: integration
|
||||
integration: integration-setup
|
||||
npm run test:integration
|
||||
53
catalog-info.yaml
Normal file
53
catalog-info.yaml
Normal file
@ -0,0 +1,53 @@
|
||||
---
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/catalog-info.json
|
||||
apiVersion: backstage.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: elasticsearch-js
|
||||
spec:
|
||||
type: library
|
||||
owner: group:clients-team
|
||||
lifecycle: production
|
||||
|
||||
---
|
||||
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
|
||||
apiVersion: backstage.io/v1alpha1
|
||||
kind: Resource
|
||||
metadata:
|
||||
name: elasticsearch-js-integration-tests
|
||||
description: Elasticsearch JavaScript client integration tests
|
||||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:clients-team
|
||||
system: buildkite
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: Elasticsearch JavaScript client integration tests
|
||||
spec:
|
||||
repository: elastic/elasticsearch-js
|
||||
pipeline_file: .buildkite/pipeline.yml
|
||||
teams:
|
||||
clients-team:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
everyone:
|
||||
access_level: READ_ONLY
|
||||
cancel_intermediate_builds: true
|
||||
cancel_intermediate_builds_branch_filter: '!main'
|
||||
schedules:
|
||||
main_semi_daily:
|
||||
branch: 'main'
|
||||
cronline: '*/12 * * *'
|
||||
8_8_semi_daily:
|
||||
branch: '8.8'
|
||||
cronline: '*/12 * * *'
|
||||
8_7_daily:
|
||||
branch: '8.7'
|
||||
cronline: '@daily'
|
||||
8_6_daily:
|
||||
branch: '8.6'
|
||||
cronline: '@daily'
|
||||
7_17_daily:
|
||||
branch: '7.17'
|
||||
cronline: '@daily'
|
||||
@ -1,6 +1,29 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.7.3
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.7.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.6.1
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.6.0
|
||||
|
||||
|
||||
@ -103,7 +103,7 @@ async function run () {
|
||||
// get the next response if there are more quotes to fetch
|
||||
responseQueue.push(
|
||||
await client.scroll({
|
||||
scrollId: body._scroll_id,
|
||||
scroll_id: body._scroll_id,
|
||||
scroll: '30s'
|
||||
})
|
||||
)
|
||||
@ -146,7 +146,7 @@ async function * scrollSearch (params) {
|
||||
}
|
||||
|
||||
response = await client.scroll({
|
||||
scrollId: response._scroll_id,
|
||||
scroll_id: response._scroll_id,
|
||||
scroll: params.scroll
|
||||
})
|
||||
}
|
||||
|
||||
185
docs/index-custom-title-page.html
Normal file
185
docs/index-custom-title-page.html
Normal file
@ -0,0 +1,185 @@
|
||||
<style>
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.card {
|
||||
cursor: pointer;
|
||||
padding: 16px;
|
||||
text-align: left;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.card:hover {
|
||||
box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2);
|
||||
padding: 16px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
#guide a.no-text-decoration:hover {
|
||||
text-decoration: none!important;
|
||||
}
|
||||
|
||||
.icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
background-position: bottom;
|
||||
background-size: contain;
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
|
||||
.ul-col-1 {
|
||||
columns: 1;
|
||||
-webkit-columns: 1;
|
||||
-moz-columns: 1;
|
||||
}
|
||||
|
||||
@media (min-width:769px) {
|
||||
.ul-col-md-2 {
|
||||
columns: 2;
|
||||
-webkit-columns: 2;
|
||||
-moz-columns: 2;
|
||||
}
|
||||
}
|
||||
|
||||
#guide h3.gtk {
|
||||
margin-top: 16px;
|
||||
}
|
||||
|
||||
.mb-4, .my-4 {
|
||||
margin-bottom: 0!important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<div class="legalnotice"></div>
|
||||
|
||||
<div class="row my-4">
|
||||
<div class="col-md-6 col-12">
|
||||
<p></p>
|
||||
<p>
|
||||
<h2>Documentation</h2>
|
||||
</p>
|
||||
<p>
|
||||
The official Node.js client provides one-to-one mapping with Elasticsearch REST APIs.
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/installation.html">
|
||||
<button class="btn btn-primary">Get started</button>
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
<div class="col-md-6 col-12">
|
||||
<img class="w-100" src="https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blt3826dd36882ab258/641c4e9d542f593a7e8ba656/js-es-lp-hero.png" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h3 class="gtk">Get to know the JavaScript client</h3>
|
||||
|
||||
<div class="my-5">
|
||||
<div class="d-flex align-items-center mb-3">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/bltfd59779217093221/641ae0c8db18f61d68e9c377/64x64_Color_icon-connected-circles64-color.png');"></span>
|
||||
Connecting
|
||||
</h4>
|
||||
</div>
|
||||
<ul class="ul-col-md-2 ul-col-1">
|
||||
<li>
|
||||
<a href="introduction.html">Introduction to the client</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="installation.html">Installing the client</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="client-connecting.html">Connecting to Elasticsearch</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="client-configuration.html">Configuration options</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="my-5">
|
||||
<div class="d-flex align-items-center mb-3">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/bltca09fd8c807816ce/641ae17733e7f95594918557/icon-monitor-cog-64-color.png');"></span>
|
||||
Using the JS client
|
||||
</h4>
|
||||
</div>
|
||||
<ul class="ul-col-md-2 ul-col-1">
|
||||
<li>
|
||||
<a href="bulk_examples.html">Bulk indexing</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="get_examples.html">Getting documents</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="search_examples.html">Searching</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="examples.html">More examples</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="my-5">
|
||||
<div class="d-flex align-items-center mb-3">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blteacd058910f155d8/636925a6e0ff7c532db636d7/64x64_Color_icon-dev-tools-64-color.png');"></span>
|
||||
API and developer docs
|
||||
</h4>
|
||||
</div>
|
||||
<ul class="ul-col-md-2 ul-col-1">
|
||||
<li>
|
||||
<a href="api-reference.html">API reference</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="client-helpers.html">Client helpers</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="integrations.html">Integrations</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="changelog-client.html">Release notes</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<h3 class="explore">Explore by use case</h3>
|
||||
|
||||
<div class="row my-4">
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<a class="no-text-decoration" href="https://www.elastic.co/guide/en/enterprise-search/current/start.html">
|
||||
<div class="card h-100">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blt11200907c1c033aa/634d9da119d8652169cf9b2b/enterprise-search-logo-color-32px.png');"></span>
|
||||
Search my data
|
||||
</h4>
|
||||
<p>Create search experiences for your content, wherever it lives.</p>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<a class="no-text-decoration" href="https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions/current/getting-started-observability.html">
|
||||
<div class="card h-100">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/bltaa08b370a00bbecc/634d9da14e565f1cdce27f7c/observability-logo-color-32px.png');"></span>
|
||||
Observe my data
|
||||
</h4>
|
||||
<p>Follow our guides to monitor logs, metrics, and traces.</p>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
<div class="col-md-4 col-12 mb-2">
|
||||
<a class="no-text-decoration" href="https://www.elastic.co/guide/en/security/current/es-overview.html">
|
||||
<div class="card h-100">
|
||||
<h4 class="mt-3">
|
||||
<span class="inline-block float-left icon mr-2" style="background-image: url('https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blt5e0e0ad9a13e6b8c/634d9da18473831f96bbdf1e/security-logo-color-32px.png');"></span>
|
||||
Protect my environment
|
||||
</h4>
|
||||
<p>Learn how to defend against threats across your environment.</p>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p class="my-4"><a href="https://www.elastic.co/guide/index.html">View all Elastic docs</a></p>
|
||||
@ -1,6 +1,6 @@
|
||||
= Elasticsearch JavaScript Client
|
||||
|
||||
:branch: master
|
||||
include::{asciidoc-dir}/../../shared/versions/stack/{source_branch}.asciidoc[]
|
||||
include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
|
||||
|
||||
include::introduction.asciidoc[]
|
||||
|
||||
@ -154,9 +154,6 @@ request: {
|
||||
|
||||
The event order is described in the following graph, in some edge cases, the
|
||||
order is not guaranteed.
|
||||
You can find in
|
||||
https://github.com/elastic/elasticsearch-js/blob/main/test/acceptance/events-order.test.js[`test/acceptance/events-order.test.js`]
|
||||
how the order changes based on the situation.
|
||||
|
||||
[source]
|
||||
----
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
2
index.d.ts
vendored
2
index.d.ts
vendored
@ -22,6 +22,6 @@ import SniffingTransport from './lib/sniffingTransport'
|
||||
|
||||
export * from '@elastic/transport'
|
||||
export * as estypes from './lib/api/types'
|
||||
export * as estypesWithBody from './lib/api/types'
|
||||
export * as estypesWithBody from './lib/api/typesWithBodyKey'
|
||||
export { Client, SniffingTransport }
|
||||
export type { ClientOptions, NodeOptions } from './lib/client'
|
||||
|
||||
29
package.json
29
package.json
@ -1,17 +1,16 @@
|
||||
{
|
||||
"name": "@elastic/elasticsearch",
|
||||
"version": "8.7.0",
|
||||
"versionCanary": "8.7.0-canary.0",
|
||||
"version": "8.7.3",
|
||||
"versionCanary": "8.7.3-canary.0",
|
||||
"description": "The official Elasticsearch client for Node.js",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"scripts": {
|
||||
"test": "npm run build && npm run lint && tap test/{unit,acceptance}/{*,**/*}.test.ts",
|
||||
"test": "npm run build && npm run lint && tap test/unit/{*,**/*}.test.ts",
|
||||
"test:unit": "npm run build && tap test/unit/{*,**/*}.test.ts",
|
||||
"test:acceptance": "npm run build && tap test/acceptance/*.test.ts",
|
||||
"test:coverage-100": "npm run build && tap test/{unit,acceptance}/{*,**/*}.test.ts --coverage --100",
|
||||
"test:coverage-report": "npm run build && tap test/{unit,acceptance}/{*,**/*}.test.ts --coverage && nyc report --reporter=text-lcov > coverage.lcov",
|
||||
"test:coverage-ui": "npm run build && tap test/{unit,acceptance}/{*,**/*}.test.ts --coverage --coverage-report=html",
|
||||
"test:coverage-100": "npm run build && tap test/unit/{*,**/*}.test.ts --coverage --100",
|
||||
"test:coverage-report": "npm run build && tap test/unit/{*,**/*}.test.ts --coverage && nyc report --reporter=text-lcov > coverage.lcov",
|
||||
"test:coverage-ui": "npm run build && tap test/unit/{*,**/*}.test.ts --coverage --coverage-report=html",
|
||||
"test:integration": "tsc && node test/integration/index.js",
|
||||
"lint": "ts-standard src",
|
||||
"lint:fix": "ts-standard --fix src",
|
||||
@ -31,10 +30,16 @@
|
||||
"client",
|
||||
"index"
|
||||
],
|
||||
"author": {
|
||||
"name": "Tomas Della Vedova",
|
||||
"company": "Elastic BV"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Tomas Della Vedova",
|
||||
"company": "Elastic BV"
|
||||
},
|
||||
{
|
||||
"name": "Josh Mock",
|
||||
"company": "Elastic BV"
|
||||
}
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -81,7 +86,7 @@
|
||||
"zx": "^6.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@elastic/transport": "^8.3.1",
|
||||
"@elastic/transport": "~8.3.1",
|
||||
"tslib": "^2.4.0"
|
||||
},
|
||||
"tap": {
|
||||
|
||||
@ -228,7 +228,7 @@ function generateSingleApi (version, spec, common) {
|
||||
|
||||
${genUrlValidation(paths, api)}
|
||||
|
||||
let { ${genQueryBlacklist(false)}, ...querystring } = params
|
||||
let { ${genQueryDenylist(false)}, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
@ -316,20 +316,20 @@ function generateSingleApi (version, spec, common) {
|
||||
}, {})
|
||||
}
|
||||
|
||||
function genQueryBlacklist (addQuotes = true) {
|
||||
function genQueryDenylist (addQuotes = true) {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
const blacklist = ['method', 'body']
|
||||
const denylist = ['method', 'body']
|
||||
parts.forEach(p => {
|
||||
const camelStr = toCamelCase(p)
|
||||
if (camelStr !== p) blacklist.push(`${camelStr}`)
|
||||
blacklist.push(`${p}`)
|
||||
if (camelStr !== p) denylist.push(`${camelStr}`)
|
||||
denylist.push(`${p}`)
|
||||
})
|
||||
return addQuotes ? blacklist.map(q => `'${q}'`) : blacklist
|
||||
return addQuotes ? denylist.map(q => `'${q}'`) : denylist
|
||||
}
|
||||
|
||||
function buildPath () {
|
||||
|
||||
@ -279,7 +279,7 @@ export default class Cluster {
|
||||
async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest | TB.ClusterPutComponentTemplateRequest, options?: TransportRequestOptions): Promise<T.ClusterPutComponentTemplateResponse>
|
||||
async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest | TB.ClusterPutComponentTemplateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['template', 'aliases', 'mappings', 'settings', 'version', '_meta']
|
||||
const acceptedBody: string[] = ['template', 'version', '_meta', 'allow_auto_create']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -37,12 +37,12 @@ import * as T from '../types'
|
||||
import * as TB from '../typesWithBodyKey'
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default async function FieldCapsApi (this: That, params: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.FieldCapsResponse>
|
||||
export default async function FieldCapsApi (this: That, params: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FieldCapsResponse, unknown>>
|
||||
export default async function FieldCapsApi (this: That, params: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise<T.FieldCapsResponse>
|
||||
export default async function FieldCapsApi (this: That, params: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.FieldCapsResponse>
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FieldCapsResponse, unknown>>
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise<T.FieldCapsResponse>
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['index_filter', 'runtime_mappings']
|
||||
const acceptedBody: string[] = ['fields', 'index_filter', 'runtime_mappings']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
@ -53,6 +53,7 @@ export default async function FieldCapsApi (this: That, params: T.FieldCapsReque
|
||||
body = userBody != null ? { ...userBody } : undefined
|
||||
}
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
body = body ?? {}
|
||||
|
||||
68
src/api/api/health_report.ts
Normal file
68
src/api/api/health_report.ts
Normal file
@ -0,0 +1,68 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint-disable import/export */
|
||||
/* eslint-disable @typescript-eslint/no-misused-new */
|
||||
/* eslint-disable @typescript-eslint/no-extraneous-class */
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
|
||||
// This file was automatically generated by elastic/elastic-client-generator-js
|
||||
// DO NOT MODIFY IT BY HAND. Instead, modify the source open api file,
|
||||
// and elastic/elastic-client-generator-js to regenerate this file again.
|
||||
|
||||
import {
|
||||
Transport,
|
||||
TransportRequestOptions,
|
||||
TransportRequestOptionsWithMeta,
|
||||
TransportRequestOptionsWithOutMeta,
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
import * as TB from '../typesWithBodyKey'
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.HealthReportResponse>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.HealthReportResponse, unknown>>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptions): Promise<T.HealthReportResponse>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest | TB.HealthReportRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['feature']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
let method = ''
|
||||
let path = ''
|
||||
if (params.feature != null) {
|
||||
method = 'GET'
|
||||
path = `/_health_report/${encodeURIComponent(params.feature.toString())}`
|
||||
} else {
|
||||
method = 'GET'
|
||||
path = '/_health_report'
|
||||
}
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
@ -135,10 +135,10 @@ export default class License {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicensePostResponse, unknown>>
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicensePostResponse, unknown>>
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params?: T.LicensePostRequest | TB.LicensePostRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['license', 'licenses']
|
||||
const querystring: Record<string, any> = {}
|
||||
@ -151,6 +151,7 @@ export default class License {
|
||||
body = userBody != null ? { ...userBody } : undefined
|
||||
}
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedBody.includes(key)) {
|
||||
body = body ?? {}
|
||||
|
||||
@ -82,8 +82,15 @@ export default class Logstash {
|
||||
}
|
||||
}
|
||||
|
||||
const method = 'GET'
|
||||
const path = `/_logstash/pipeline/${encodeURIComponent(params.id.toString())}`
|
||||
let method = ''
|
||||
let path = ''
|
||||
if (params.id != null) {
|
||||
method = 'GET'
|
||||
path = `/_logstash/pipeline/${encodeURIComponent(params.id.toString())}`
|
||||
} else {
|
||||
method = 'GET'
|
||||
path = '/_logstash/pipeline'
|
||||
}
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
|
||||
@ -43,19 +43,19 @@ export default class Ml {
|
||||
this.transport = transport
|
||||
}
|
||||
|
||||
async clearTrainedModelDeploymentCache (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async clearTrainedModelDeploymentCache (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async clearTrainedModelDeploymentCache (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async clearTrainedModelDeploymentCache (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
async clearTrainedModelDeploymentCache (this: That, params: T.MlClearTrainedModelDeploymentCacheRequest | TB.MlClearTrainedModelDeploymentCacheRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.MlClearTrainedModelDeploymentCacheResponse>
|
||||
async clearTrainedModelDeploymentCache (this: That, params: T.MlClearTrainedModelDeploymentCacheRequest | TB.MlClearTrainedModelDeploymentCacheRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MlClearTrainedModelDeploymentCacheResponse, unknown>>
|
||||
async clearTrainedModelDeploymentCache (this: That, params: T.MlClearTrainedModelDeploymentCacheRequest | TB.MlClearTrainedModelDeploymentCacheRequest, options?: TransportRequestOptions): Promise<T.MlClearTrainedModelDeploymentCacheResponse>
|
||||
async clearTrainedModelDeploymentCache (this: That, params: T.MlClearTrainedModelDeploymentCacheRequest | TB.MlClearTrainedModelDeploymentCacheRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['model_id']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
@ -2006,7 +2006,7 @@ export default class Ml {
|
||||
async updateDatafeed (this: That, params: T.MlUpdateDatafeedRequest | TB.MlUpdateDatafeedRequest, options?: TransportRequestOptions): Promise<T.MlUpdateDatafeedResponse>
|
||||
async updateDatafeed (this: That, params: T.MlUpdateDatafeedRequest | TB.MlUpdateDatafeedRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['datafeed_id']
|
||||
const acceptedBody: string[] = ['aggregations', 'chunking_config', 'delayed_data_check_config', 'frequency', 'indices', 'indexes', 'indices_options', 'max_empty_searches', 'query', 'query_delay', 'runtime_mappings', 'script_fields', 'scroll_size']
|
||||
const acceptedBody: string[] = ['aggregations', 'chunking_config', 'delayed_data_check_config', 'frequency', 'indices', 'indexes', 'indices_options', 'job_id', 'max_empty_searches', 'query', 'query_delay', 'runtime_mappings', 'script_fields', 'scroll_size']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
@ -2074,7 +2074,7 @@ export default class Ml {
|
||||
async updateJob (this: That, params: T.MlUpdateJobRequest | TB.MlUpdateJobRequest, options?: TransportRequestOptions): Promise<T.MlUpdateJobResponse>
|
||||
async updateJob (this: That, params: T.MlUpdateJobRequest | TB.MlUpdateJobRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['job_id']
|
||||
const acceptedBody: string[] = ['allow_lazy_open', 'analysis_limits', 'background_persist_interval', 'custom_settings', 'categorization_filters', 'description', 'model_plot_config', 'daily_model_snapshot_retention_after_days', 'model_snapshot_retention_days', 'renormalization_window_days', 'results_retention_days', 'groups', 'detectors', 'per_partition_categorization']
|
||||
const acceptedBody: string[] = ['allow_lazy_open', 'analysis_limits', 'background_persist_interval', 'custom_settings', 'categorization_filters', 'description', 'model_plot_config', 'model_prune_window', 'daily_model_snapshot_retention_after_days', 'model_snapshot_retention_days', 'renormalization_window_days', 'results_retention_days', 'groups', 'detectors', 'per_partition_categorization']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
@ -2137,6 +2137,28 @@ export default class Ml {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async updateTrainedModelDeployment (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO>
|
||||
async updateTrainedModelDeployment (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async updateTrainedModelDeployment (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async updateTrainedModelDeployment (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['model_id']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
|
||||
params = params ?? {}
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
const method = 'POST'
|
||||
const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/deployment/_update`
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async upgradeJobSnapshot (this: That, params: T.MlUpgradeJobSnapshotRequest | TB.MlUpgradeJobSnapshotRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.MlUpgradeJobSnapshotResponse>
|
||||
async upgradeJobSnapshot (this: That, params: T.MlUpgradeJobSnapshotRequest | TB.MlUpgradeJobSnapshotRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MlUpgradeJobSnapshotResponse, unknown>>
|
||||
async upgradeJobSnapshot (this: That, params: T.MlUpgradeJobSnapshotRequest | TB.MlUpgradeJobSnapshotRequest, options?: TransportRequestOptions): Promise<T.MlUpgradeJobSnapshotResponse>
|
||||
|
||||
@ -42,7 +42,7 @@ export default async function SearchMvtApi (this: That, params: T.SearchMvtReque
|
||||
export default async function SearchMvtApi (this: That, params: T.SearchMvtRequest | TB.SearchMvtRequest, options?: TransportRequestOptions): Promise<T.SearchMvtResponse>
|
||||
export default async function SearchMvtApi (this: That, params: T.SearchMvtRequest | TB.SearchMvtRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['index', 'field', 'zoom', 'x', 'y']
|
||||
const acceptedBody: string[] = ['aggs', 'exact_bounds', 'extent', 'fields', 'grid_precision', 'grid_type', 'query', 'runtime_mappings', 'size', 'sort', 'track_total_hits']
|
||||
const acceptedBody: string[] = ['aggs', 'buffer', 'exact_bounds', 'extent', 'fields', 'grid_agg', 'grid_precision', 'grid_type', 'query', 'runtime_mappings', 'size', 'sort', 'track_total_hits', 'with_labels']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -290,7 +290,7 @@ export default class Snapshot {
|
||||
async restore (this: That, params: T.SnapshotRestoreRequest | TB.SnapshotRestoreRequest, options?: TransportRequestOptions): Promise<T.SnapshotRestoreResponse>
|
||||
async restore (this: That, params: T.SnapshotRestoreRequest | TB.SnapshotRestoreRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['repository', 'snapshot']
|
||||
const acceptedBody: string[] = ['ignore_index_settings', 'ignore_unavailable', 'include_aliases', 'include_global_state', 'index_settings', 'indices', 'partial', 'rename_pattern', 'rename_replacement']
|
||||
const acceptedBody: string[] = ['feature_states', 'ignore_index_settings', 'ignore_unavailable', 'include_aliases', 'include_global_state', 'index_settings', 'indices', 'partial', 'rename_pattern', 'rename_replacement']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -215,6 +215,28 @@ export default class Transform {
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async scheduleNowTransform (this: That, params: T.TransformScheduleNowTransformRequest | TB.TransformScheduleNowTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.TransformScheduleNowTransformResponse>
|
||||
async scheduleNowTransform (this: That, params: T.TransformScheduleNowTransformRequest | TB.TransformScheduleNowTransformRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TransformScheduleNowTransformResponse, unknown>>
|
||||
async scheduleNowTransform (this: That, params: T.TransformScheduleNowTransformRequest | TB.TransformScheduleNowTransformRequest, options?: TransportRequestOptions): Promise<T.TransformScheduleNowTransformResponse>
|
||||
async scheduleNowTransform (this: That, params: T.TransformScheduleNowTransformRequest | TB.TransformScheduleNowTransformRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['transform_id']
|
||||
const querystring: Record<string, any> = {}
|
||||
const body = undefined
|
||||
|
||||
for (const key in params) {
|
||||
if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body') {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
const method = 'POST'
|
||||
const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}/_schedule_now`
|
||||
return await this.transport.request({ path, method, querystring, body }, options)
|
||||
}
|
||||
|
||||
async startTransform (this: That, params: T.TransformStartTransformRequest | TB.TransformStartTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.TransformStartTransformResponse>
|
||||
async startTransform (this: That, params: T.TransformStartTransformRequest | TB.TransformStartTransformRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TransformStartTransformResponse, unknown>>
|
||||
async startTransform (this: That, params: T.TransformStartTransformRequest | TB.TransformStartTransformRequest, options?: TransportRequestOptions): Promise<T.TransformStartTransformResponse>
|
||||
|
||||
@ -55,6 +55,7 @@ import getScriptContextApi from './api/get_script_context'
|
||||
import getScriptLanguagesApi from './api/get_script_languages'
|
||||
import getSourceApi from './api/get_source'
|
||||
import GraphApi from './api/graph'
|
||||
import healthReportApi from './api/health_report'
|
||||
import IlmApi from './api/ilm'
|
||||
import indexApi from './api/index'
|
||||
import IndicesApi from './api/indices'
|
||||
@ -134,6 +135,7 @@ export default interface API {
|
||||
getScriptLanguages: typeof getScriptLanguagesApi
|
||||
getSource: typeof getSourceApi
|
||||
graph: GraphApi
|
||||
healthReport: typeof healthReportApi
|
||||
ilm: IlmApi
|
||||
index: typeof indexApi
|
||||
indices: IndicesApi
|
||||
@ -306,6 +308,7 @@ API.prototype.getScript = getScriptApi
|
||||
API.prototype.getScriptContext = getScriptContextApi
|
||||
API.prototype.getScriptLanguages = getScriptLanguagesApi
|
||||
API.prototype.getSource = getSourceApi
|
||||
API.prototype.healthReport = healthReportApi
|
||||
API.prototype.index = indexApi
|
||||
API.prototype.info = infoApi
|
||||
API.prototype.knnSearch = knnSearchApi
|
||||
|
||||
789
src/api/types.ts
789
src/api/types.ts
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -20,6 +20,7 @@
|
||||
import { ConnectionOptions as TlsConnectionOptions } from 'tls'
|
||||
import { URL } from 'url'
|
||||
import buffer from 'buffer'
|
||||
import os from 'os'
|
||||
import {
|
||||
Transport,
|
||||
UndiciConnection,
|
||||
@ -173,7 +174,9 @@ export default class Client extends API {
|
||||
tls: null,
|
||||
caFingerprint: null,
|
||||
agent: null,
|
||||
headers: {},
|
||||
headers: {
|
||||
'user-agent': `elasticsearch-js/${clientVersion} Node.js ${nodeVersion}; Transport ${transportVersion}; (${os.platform()} ${os.release()} ${os.arch()})`
|
||||
},
|
||||
nodeFilter: null,
|
||||
generateRequestId: null,
|
||||
name: 'elasticsearch-js',
|
||||
|
||||
@ -5,8 +5,8 @@
|
||||
Yes.
|
||||
|
||||
## Background
|
||||
Elasticsearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/elastic/elasticsearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/api).<br/>
|
||||
To support different languages at the same time, the Elasticsearch team decided to provide a [YAML specification](https://github.com/elastic/elasticsearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.<br/>
|
||||
Elasticsearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/elastic/elasticsearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/api).<br/>
|
||||
To support different languages at the same time, the Elasticsearch team decided to provide a [YAML specification](https://github.com/elastic/elasticsearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.<br/>
|
||||
This testing suite uses that specification to generate the test for the specified version of Elasticsearch on the fly.
|
||||
|
||||
## Run
|
||||
@ -20,20 +20,45 @@ Once the Elasticsearch repository has been cloned, the testing suite will connec
|
||||
|
||||
The specification does not allow the test to be run in parallel, so it might take a while to run the entire testing suite; on my machine, `MacBookPro15,2 core i7 2.7GHz 16GB of RAM` it takes around four minutes.
|
||||
|
||||
### Running locally
|
||||
|
||||
If you want to run the integration tests on your development machine, you must have an Elasticsearch instance running first.
|
||||
A local instance can be spun up in a Docker container by running the [`.ci/run-elasticsearch.sh`](/.ci/run-elasticsearch.sh) script.
|
||||
This is the same script CI jobs use to run Elasticsearch for integration tests, so your results should be relatively consistent.
|
||||
|
||||
To simplify the process of starting a container, testing, and cleaning up the container, you can run the `make integration` target:
|
||||
|
||||
```sh
|
||||
# set some parameters
|
||||
export STACK_VERSION=8.7.0
|
||||
export TEST_SUITE=free # can be `free` or `platinum`
|
||||
make integration
|
||||
```
|
||||
|
||||
If Elasticsearch doesn't come up, run `make integration-cleanup` and then `DETACH=false .ci/run-elasticsearch.sh` manually to read the startup logs.
|
||||
|
||||
If you get an error about `vm.max_map_count` being too low, run `sudo sysctl -w vm.max_map_count=262144` to update the setting until the next reboot, or `sudo sysctl -w vm.max_map_count=262144; echo 'vm.max_map_count=262144' | sudo tee -a /etc/sysctl.conf` to update the setting permanently.
|
||||
|
||||
### Exit on the first failure
|
||||
Bu default the suite will run all the test, even if one assertion has failed. If you want to stop the test at the first failure, use the bailout option:
|
||||
|
||||
By default the suite will run all the tests, even if one assertion has failed. If you want to stop the test at the first failure, use the bailout option:
|
||||
|
||||
```sh
|
||||
npm run test:integration -- --bail
|
||||
```
|
||||
|
||||
### Calculate the code coverage
|
||||
|
||||
If you want to calculate the code coverage just run the testing suite with the following parameters, once the test ends, it will open a browser window with the results.
|
||||
|
||||
```sh
|
||||
npm run test:integration -- --cov --coverage-report=html
|
||||
```
|
||||
|
||||
## How does this thing work?
|
||||
|
||||
At first sight, it might seem complicated, but once you understand what the moving parts are, it's quite easy.
|
||||
|
||||
1. Connects to the given Elasticsearch instance
|
||||
1. Gets the ES version and build hash
|
||||
1. Checkout to the given hash (and clone the repository if it is not present)
|
||||
@ -46,7 +71,4 @@ At first sight, it might seem complicated, but once you understand what the movi
|
||||
|
||||
Inside the `index.js` file, you will find the connection, cloning, reading and parsing part of the test, while inside the `test-runner.js` file you will find the function to handle the assertions. Inside `test-runner.js`, we use a [queue](https://github.com/delvedor/workq) to be sure that everything is run in the correct order.
|
||||
|
||||
Checkout the [rest-api-spec readme](https://github.com/elastic/elasticsearch/blob/master/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc) if you want to know more about how the assertions work.
|
||||
|
||||
#### Why are we running the test with the `--harmony` flag?
|
||||
Because on Node v6 the regex lookbehinds are not supported.
|
||||
Check out the [rest-api-spec readme](https://github.com/elastic/elasticsearch/blob/main/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc) if you want to know more about how the assertions work.
|
||||
|
||||
@ -27,9 +27,9 @@ process.on('unhandledRejection', function (err) {
|
||||
const { writeFileSync, readFileSync, readdirSync, statSync } = require('fs')
|
||||
const { join, sep } = require('path')
|
||||
const yaml = require('js-yaml')
|
||||
const minimist = require('minimist')
|
||||
const ms = require('ms')
|
||||
const { Client } = require('../../index')
|
||||
const { kProductCheck } = require('@elastic/transport/lib/symbols')
|
||||
const build = require('./test-runner')
|
||||
const { sleep } = require('./helper')
|
||||
const createJunitReporter = require('./reporter')
|
||||
@ -42,12 +42,21 @@ const MAX_API_TIME = 1000 * 90
|
||||
const MAX_FILE_TIME = 1000 * 30
|
||||
const MAX_TEST_TIME = 1000 * 3
|
||||
|
||||
const options = minimist(process.argv.slice(2), {
|
||||
boolean: ['bail'],
|
||||
string: ['suite', 'test'],
|
||||
})
|
||||
|
||||
const freeSkips = {
|
||||
// not supported yet
|
||||
'/free/cluster.desired_nodes/10_basic.yml': ['*'],
|
||||
'/free/health/30_feature.yml': ['*'],
|
||||
'/free/health/40_useractions.yml': ['*'],
|
||||
// the v8 client never sends the scroll_id in querystgring,
|
||||
|
||||
// Cannot find methods on `Internal` object
|
||||
'/free/cluster.desired_balance/10_basic.yml': ['*'],
|
||||
'/free/cluster.desired_nodes/20_dry_run.yml': ['*'],
|
||||
'/free/cluster.prevalidate_node_removal/10_basic.yml': ['*'],
|
||||
|
||||
// the v8 client never sends the scroll_id in querystring,
|
||||
// the way the test is structured causes a security exception
|
||||
'free/scroll/10_basic.yml': ['Body params override query string'],
|
||||
'free/scroll/11_clear.yml': [
|
||||
@ -56,80 +65,74 @@ const freeSkips = {
|
||||
],
|
||||
'free/cat.allocation/10_basic.yml': ['*'],
|
||||
'free/cat.snapshots/10_basic.yml': ['Test cat snapshots output'],
|
||||
// TODO: remove this once 'arbitrary_key' is implemented
|
||||
// https://github.com/elastic/elasticsearch/pull/41492
|
||||
'indices.split/30_copy_settings.yml': ['*'],
|
||||
|
||||
'indices.stats/50_disk_usage.yml': ['Disk usage stats'],
|
||||
'indices.stats/60_field_usage.yml': ['Field usage stats'],
|
||||
|
||||
// skipping because we are booting ES with `discovery.type=single-node`
|
||||
// and this test will fail because of this configuration
|
||||
'nodes.stats/30_discovery.yml': ['*'],
|
||||
|
||||
// the expected error is returning a 503,
|
||||
// which triggers a retry and the node to be marked as dead
|
||||
'search.aggregation/240_max_buckets.yml': ['*'],
|
||||
|
||||
// long values and json do not play nicely together
|
||||
'search.aggregation/40_range.yml': ['Min and max long range bounds'],
|
||||
|
||||
// the yaml runner assumes that null means "does not exists",
|
||||
// while null is a valid json value, so the check will fail
|
||||
'search/320_disallow_queries.yml': ['Test disallow expensive queries'],
|
||||
'free/tsdb/90_unsupported_operations.yml': ['noop update']
|
||||
'free/tsdb/90_unsupported_operations.yml': ['noop update'],
|
||||
}
|
||||
const platinumBlackList = {
|
||||
|
||||
const platinumDenyList = {
|
||||
'api_key/10_basic.yml': ['Test get api key'],
|
||||
'api_key/20_query.yml': ['*'],
|
||||
'api_key/11_invalidation.yml': ['Test invalidate api key by realm name'],
|
||||
'analytics/histogram.yml': ['Histogram requires values in increasing order'],
|
||||
// this two test cases are broken, we should
|
||||
// return on those in the future.
|
||||
'analytics/top_metrics.yml': [
|
||||
'sort by keyword field fails',
|
||||
'sort by string script fails'
|
||||
],
|
||||
'cat.aliases/10_basic.yml': ['Empty cluster'],
|
||||
'index/10_with_id.yml': ['Index with ID'],
|
||||
'indices.get_alias/10_basic.yml': ['Get alias against closed indices'],
|
||||
'indices.get_alias/20_empty.yml': ['Check empty aliases when getting all aliases via /_alias'],
|
||||
'text_structure/find_structure.yml': ['*'],
|
||||
// https://github.com/elastic/elasticsearch/pull/39400
|
||||
'ml/jobs_crud.yml': ['Test put job with id that is already taken'],
|
||||
|
||||
// object keys must me strings, and `0.0.toString()` is `0`
|
||||
'ml/evaluate_data_frame.yml': [
|
||||
'Test binary_soft_classifition precision',
|
||||
'Test binary_soft_classifition recall',
|
||||
'Test binary_soft_classifition confusion_matrix'
|
||||
],
|
||||
// it gets random failures on CI, must investigate
|
||||
'ml/set_upgrade_mode.yml': [
|
||||
'Attempt to open job when upgrade_mode is enabled',
|
||||
'Setting upgrade mode to disabled from enabled'
|
||||
],
|
||||
|
||||
// The cleanup fails with a index not found when retrieving the jobs
|
||||
'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'],
|
||||
'ml/bucket_correlation_agg.yml': ['Test correlation bucket agg simple'],
|
||||
|
||||
// start should be a string
|
||||
'ml/jobs_get_result_overall_buckets.yml': ['Test overall buckets given epoch start and end params'],
|
||||
|
||||
// this can't happen with the client
|
||||
'ml/start_data_frame_analytics.yml': ['Test start with inconsistent body/param ids'],
|
||||
'ml/stop_data_frame_analytics.yml': ['Test stop with inconsistent body/param ids'],
|
||||
'ml/preview_datafeed.yml': ['*'],
|
||||
|
||||
// Investigate why is failing
|
||||
'ml/inference_crud.yml': ['*'],
|
||||
'ml/categorization_agg.yml': ['Test categorization aggregation with poor settings'],
|
||||
'ml/filter_crud.yml': ['*'],
|
||||
|
||||
// investigate why this is failing
|
||||
'monitoring/bulk/10_basic.yml': ['*'],
|
||||
'monitoring/bulk/20_privileges.yml': ['*'],
|
||||
'license/20_put_license.yml': ['*'],
|
||||
'snapshot/10_basic.yml': ['*'],
|
||||
'snapshot/20_operator_privileges_disabled.yml': ['*'],
|
||||
|
||||
// the body is correct, but the regex is failing
|
||||
'sql/sql.yml': ['Getting textual representation'],
|
||||
'searchable_snapshots/10_usage.yml': ['*'],
|
||||
'service_accounts/10_basic.yml': ['*'],
|
||||
|
||||
// we are setting two certificates in the docker config
|
||||
'ssl/10_basic.yml': ['*'],
|
||||
'token/10_basic.yml': ['*'],
|
||||
'token/11_invalidation.yml': ['*'],
|
||||
|
||||
// very likely, the index template has not been loaded yet.
|
||||
// we should run a indices.existsTemplate, but the name of the
|
||||
// template may vary during time.
|
||||
@ -147,16 +150,20 @@ const platinumBlackList = {
|
||||
'transforms_stats.yml': ['*'],
|
||||
'transforms_stats_continuous.yml': ['*'],
|
||||
'transforms_update.yml': ['*'],
|
||||
|
||||
// js does not support ulongs
|
||||
'unsigned_long/10_basic.yml': ['*'],
|
||||
'unsigned_long/20_null_value.yml': ['*'],
|
||||
'unsigned_long/30_multi_fields.yml': ['*'],
|
||||
'unsigned_long/40_different_numeric.yml': ['*'],
|
||||
'unsigned_long/50_script_values.yml': ['*'],
|
||||
|
||||
// the v8 client flattens the body into the parent object
|
||||
'platinum/users/10_basic.yml': ['Test put user with different username in body'],
|
||||
|
||||
// docker issue?
|
||||
'watcher/execute_watch/60_http_input.yml': ['*'],
|
||||
|
||||
// the checks are correct, but for some reason the test is failing on js side
|
||||
// I bet is because the backslashes in the rg
|
||||
'watcher/execute_watch/70_invalid.yml': ['*'],
|
||||
@ -170,8 +177,9 @@ const platinumBlackList = {
|
||||
'platinum/ml/delete_job_force.yml': ['Test force delete an open job that is referred by a started datafeed'],
|
||||
'platinum/ml/evaluate_data_frame.yml': ['*'],
|
||||
'platinum/ml/get_datafeed_stats.yml': ['*'],
|
||||
|
||||
// start should be a string in the yaml test
|
||||
'platinum/ml/start_stop_datafeed.yml': ['*']
|
||||
'platinum/ml/start_stop_datafeed.yml': ['*'],
|
||||
}
|
||||
|
||||
function runner (opts = {}) {
|
||||
@ -183,8 +191,6 @@ function runner (opts = {}) {
|
||||
}
|
||||
}
|
||||
const client = new Client(options)
|
||||
// TODO: remove the following line once https://github.com/elastic/elasticsearch/issues/82358 is fixed
|
||||
client.transport[kProductCheck] = null
|
||||
log('Loading yaml suite')
|
||||
start({ client, isXPack: opts.isXPack })
|
||||
.catch(err => {
|
||||
@ -289,13 +295,21 @@ async function start ({ client, isXPack }) {
|
||||
}
|
||||
|
||||
const cleanPath = file.slice(file.lastIndexOf(apiName))
|
||||
|
||||
// skip if --suite CLI arg doesn't match
|
||||
if (options.suite && !cleanPath.endsWith(options.suite)) continue
|
||||
|
||||
log(' ' + cleanPath)
|
||||
const junitTestSuite = junitTestSuites.testsuite(apiName.slice(1) + ' - ' + cleanPath)
|
||||
|
||||
for (const test of tests) {
|
||||
const testTime = now()
|
||||
const name = Object.keys(test)[0]
|
||||
|
||||
// skip setups, teardowns and anything that doesn't match --test flag when present
|
||||
if (name === 'setup' || name === 'teardown') continue
|
||||
if (options.test && !name.endsWith(options.test)) continue
|
||||
|
||||
const junitTestCase = junitTestSuite.testcase(name)
|
||||
|
||||
stats.total += 1
|
||||
@ -316,7 +330,12 @@ async function start ({ client, isXPack }) {
|
||||
junitTestSuites.end()
|
||||
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
|
||||
if (options.bail) {
|
||||
process.exit(1)
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
const totalTestTime = now() - testTime
|
||||
junitTestCase.end()
|
||||
@ -380,7 +399,8 @@ function generateJunitXmlReport (junit, suite) {
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const node = process.env.TEST_ES_SERVER || 'http://elastic:changeme@localhost:9200'
|
||||
const scheme = process.env.TEST_SUITE === 'platinum' ? 'https' : 'http'
|
||||
const node = process.env.TEST_ES_SERVER || `${scheme}://elastic:changeme@localhost:9200`
|
||||
const opts = {
|
||||
node,
|
||||
isXPack: process.env.TEST_SUITE !== 'free'
|
||||
@ -389,26 +409,28 @@ if (require.main === module) {
|
||||
}
|
||||
|
||||
const shouldSkip = (isXPack, file, name) => {
|
||||
if (options.suite || options.test) return false
|
||||
|
||||
let list = Object.keys(freeSkips)
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const freeTest = freeSkips[list[i]]
|
||||
for (let j = 0; j < freeTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
log(`Skipping test ${testName} because is blacklisted in the free test`)
|
||||
log(`Skipping test ${testName} because it is denylisted in the free test suite`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (file.includes('x-pack') || isXPack) {
|
||||
list = Object.keys(platinumBlackList)
|
||||
list = Object.keys(platinumDenyList)
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const platTest = platinumBlackList[list[i]]
|
||||
const platTest = platinumDenyList[list[i]]
|
||||
for (let j = 0; j < platTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
log(`Skipping test ${testName} because is blacklisted in the platinum test`)
|
||||
log(`Skipping test ${testName} because it is denylisted in the platinum test suite`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
@ -432,3 +432,12 @@ test('caFingerprint can\'t be configured over http / 2', t => {
|
||||
)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('user agent is in the correct format', t => {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const agentRaw = client.transport[symbols.kHeaders]['user-agent'] || ''
|
||||
const agentSplit = agentRaw.split(/\s+/)
|
||||
t.equal(agentSplit[0].split('/')[0], 'elasticsearch-js')
|
||||
t.ok(/^\d+\.\d+\.\d+/.test(agentSplit[0].split('/')[1]))
|
||||
t.end()
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user