Compare commits
212 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8166b3b0b4 | |||
| 05b7ee6f5b | |||
| d1ba1423c8 | |||
| eaa6c1e2b3 | |||
| a5e8f74f8c | |||
| c0f28a6f03 | |||
| 119e979c2b | |||
| da2cb5fd27 | |||
| 6aaf80500e | |||
| e46109675a | |||
| e041b3ac5c | |||
| 803f7f00c9 | |||
| 72b0401f8e | |||
| 3c2f0723d5 | |||
| 955113e51f | |||
| 72444ed155 | |||
| 2694b5d16c | |||
| c0d22ab963 | |||
| c61b8598a4 | |||
| fac687d4af | |||
| eeb0d94046 | |||
| 94a7274647 | |||
| db41d2895c | |||
| f155f68702 | |||
| 770fac4b9d | |||
| 2241ae667e | |||
| 6751482e21 | |||
| 1bb105a469 | |||
| 257ec2214e | |||
| 5f49e49823 | |||
| 986b756c6b | |||
| 429fdab80b | |||
| 6042bcd20c | |||
| 3d59389b9d | |||
| 31d9398be4 | |||
| bd7ba9b962 | |||
| 06cb5b4926 | |||
| 2d75e0a16c | |||
| e502d2f17a | |||
| a061576809 | |||
| 0ed7cd016a | |||
| 843ad31ec5 | |||
| 31e1247df5 | |||
| ccd69195dd | |||
| 38cc36656c | |||
| 23b4434d07 | |||
| 6fcb168445 | |||
| 54e2088acd | |||
| c480a24b31 | |||
| 4d4f18db5f | |||
| ee3936b193 | |||
| b19fed4fef | |||
| b62e2427fb | |||
| 976282f95b | |||
| 355e108319 | |||
| 37e20ee85d | |||
| 93e2b8b695 | |||
| 9c092a0b30 | |||
| d161c0a428 | |||
| 8c7d4c42e6 | |||
| fbc4fa0685 | |||
| cb6084b7c3 | |||
| 35ce1bfef1 | |||
| 151aef2707 | |||
| 8579a85fde | |||
| be0400789a | |||
| e2905c5708 | |||
| f02c66cdcc | |||
| 8d868df86a | |||
| 5ebd549ad1 | |||
| 7f364b75b7 | |||
| fe0ddb31a1 | |||
| 955eb121fb | |||
| da1a798310 | |||
| 82c9e5df37 | |||
| 4e1273ef33 | |||
| dbd0ec2457 | |||
| fc7109aa66 | |||
| 758b745254 | |||
| 3f3e9bac1e | |||
| b3b9d40293 | |||
| 14cdd64f7d | |||
| 5d41135190 | |||
| e4c4f1acb7 | |||
| ad3caf9e94 | |||
| fcb421a54e | |||
| 71e6e7007a | |||
| dabe34dae8 | |||
| 346663f704 | |||
| 32345dac41 | |||
| 6efbed6be1 | |||
| 8be306b82d | |||
| 979475b542 | |||
| a829634f83 | |||
| dd9b38b051 | |||
| 0e98719d60 | |||
| 7da9976777 | |||
| 4ca358cad6 | |||
| cf2eda1ab3 | |||
| 5d747eec0c | |||
| f38cbde243 | |||
| 61c18a6ba5 | |||
| 432cd36879 | |||
| e7de86a1f2 | |||
| f23f77cc41 | |||
| 09b5c84d24 | |||
| 604d4aefa7 | |||
| e279b3ebfa | |||
| fceebae8ae | |||
| e45ed28c05 | |||
| 58b457eedc | |||
| 132d6d6062 | |||
| 9e08aaebe2 | |||
| 889fee2316 | |||
| 83b32f7ef4 | |||
| 60aa521b7e | |||
| 608b517d64 | |||
| bf4c57f7bc | |||
| 715292b501 | |||
| 1042a02733 | |||
| 9c959971a5 | |||
| e2745b4c75 | |||
| 4b8969cc78 | |||
| d62d8c9831 | |||
| 77e2f613f2 | |||
| 69b243171b | |||
| 37b8a33209 | |||
| 99cefe8b19 | |||
| 84ab2a787d | |||
| f737290d10 | |||
| 94da0d241a | |||
| 384debee9e | |||
| 94bf5b2aa7 | |||
| f34bb6aa28 | |||
| 1f9db892ea | |||
| 069103612a | |||
| db911746a0 | |||
| 7b255bed98 | |||
| 74be52ebb1 | |||
| 0e5beddd65 | |||
| bd89ab5dd7 | |||
| c202a6bbc5 | |||
| 8e162dd8b8 | |||
| 2b0eebc8fa | |||
| f97ba5b02a | |||
| 72a1114186 | |||
| 542585a5dc | |||
| e1de2bd53d | |||
| 4be14a1f6c | |||
| a71ebb5f68 | |||
| 05f7078534 | |||
| b250049ee7 | |||
| fe2d8c1915 | |||
| b9ea8f8906 | |||
| 896216860f | |||
| 45e3c0657a | |||
| b65e468b95 | |||
| 768ba3d8ae | |||
| 2da30cd4cd | |||
| a13992ec7d | |||
| 95fd81a883 | |||
| 6f2aaa5c7c | |||
| b857d8ee71 | |||
| 4aa00e03e1 | |||
| e2974b0747 | |||
| 3bd7ba95f8 | |||
| f96aa32345 | |||
| 628254df2d | |||
| 1ef318aded | |||
| ec9a4dc960 | |||
| f3d9dfb48e | |||
| 7f7942e207 | |||
| d584836399 | |||
| c7cbe941db | |||
| 113b32258d | |||
| 6e63530801 | |||
| 38c17fd7f3 | |||
| 63eb92b42a | |||
| 7475dba8b9 | |||
| 3ad00b4a9f | |||
| 2721008867 | |||
| c106146d30 | |||
| 78dab89db8 | |||
| af2dbc01d3 | |||
| 3ac5a1cc65 | |||
| fba3e41862 | |||
| 6a821583c0 | |||
| 86d89a47a0 | |||
| 1d84468762 | |||
| 8afdec052a | |||
| b77bdf2a79 | |||
| d61d54a811 | |||
| d430aecdbd | |||
| 29a0e53978 | |||
| 05e3139f80 | |||
| 8b9ca79d5b | |||
| 24e1f4fb26 | |||
| fa33037b86 | |||
| c2fb0a294f | |||
| abd15eb111 | |||
| 352f73e7c2 | |||
| 1d8da99d5b | |||
| 8df91fce7c | |||
| 1607a0d3f7 | |||
| 57ee5cf6c2 | |||
| 6eabf37097 | |||
| 5413eb5f35 | |||
| 4aaf49b6ea | |||
| d3f22f1e14 | |||
| 51323e769d | |||
| 1fb789862d | |||
| c2c417a9fd |
@ -11,6 +11,6 @@ RUN apt-get clean -y && \
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY package.json .
|
||||
RUN npm install --production=false
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
30
.buildkite/Dockerfile-make
Normal file
30
.buildkite/Dockerfile-make
Normal file
@ -0,0 +1,30 @@
|
||||
ARG NODE_JS_VERSION=${NODE_JS_VERSION:-18}
|
||||
FROM node:${NODE_JS_VERSION}
|
||||
|
||||
ARG BUILDER_UID=1000
|
||||
ARG BUILDER_GID=1000
|
||||
ENV BUILDER_USER elastic
|
||||
ENV BUILDER_GROUP elastic
|
||||
|
||||
# install zip util
|
||||
RUN apt-get clean -y && \
|
||||
apt-get update -y && \
|
||||
apt-get install -y zip
|
||||
|
||||
# Set user permissions and directory
|
||||
RUN (id -g ${BUILDER_GID} || groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP}) \
|
||||
&& (id -u ${BUILDER_UID} || useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GID} -m elastic) \
|
||||
&& mkdir -p /usr/src/elasticsearch-js \
|
||||
&& chown -R ${BUILDER_UID}:${BUILDER_GID} /usr/src/
|
||||
|
||||
WORKDIR /usr/src/elasticsearch-js
|
||||
|
||||
# run remainder of commands as non-root user
|
||||
USER ${BUILDER_UID}:${BUILDER_GID}
|
||||
|
||||
# install dependencies
|
||||
COPY package.json .
|
||||
RUN npm install
|
||||
|
||||
# copy project files
|
||||
COPY . .
|
||||
@ -2,7 +2,7 @@
|
||||
#
|
||||
# Shared cleanup routines between different steps
|
||||
#
|
||||
# Please source .ci/functions/imports.sh as a whole not just this file
|
||||
# Please source .buildkite/functions/imports.sh as a whole not just this file
|
||||
#
|
||||
# Version 1.0.0
|
||||
# - Initial version after refactor
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
#
|
||||
# Exposes a routine scripts can call to wait for a container if that container set up a health command
|
||||
#
|
||||
# Please source .ci/functions/imports.sh as a whole not just this file
|
||||
# Please source .buildkite/functions/imports.sh as a whole not just this file
|
||||
#
|
||||
# Version 1.0.1
|
||||
# - Initial version after refactor
|
||||
|
||||
@ -74,14 +74,15 @@ async function release (args) {
|
||||
|
||||
async function bump (args) {
|
||||
assert(args.length === 1, 'Bump task expects one parameter')
|
||||
const [version] = args
|
||||
let [version] = args
|
||||
const packageJson = JSON.parse(await readFile(
|
||||
join(import.meta.url, '..', 'package.json'),
|
||||
'utf8'
|
||||
))
|
||||
|
||||
if (version.split('.').length === 2) version = `${version}.0`
|
||||
const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
|
||||
assert(semver.valid(cleanVersion))
|
||||
assert(semver.valid(cleanVersion), `${cleanVersion} is not seen as a valid semver version. raw version: ${version}`)
|
||||
packageJson.version = cleanVersion
|
||||
packageJson.versionCanary = `${cleanVersion}-canary.0`
|
||||
|
||||
@ -94,7 +95,7 @@ async function bump (args) {
|
||||
const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), 'utf8')
|
||||
await writeFile(
|
||||
join(import.meta.url, '..', '.buildkite', 'pipeline.yml'),
|
||||
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}-SNAPSHOT`),
|
||||
pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}`),
|
||||
'utf8'
|
||||
)
|
||||
}
|
||||
@ -124,6 +125,13 @@ async function codegen (args) {
|
||||
await $`cp -R ${join(import.meta.url, '..', '..', 'elastic-client-generator-js', 'output')}/* ${join(import.meta.url, '..', 'src', 'api')}`
|
||||
await $`mv ${join(import.meta.url, '..', 'src', 'api', 'reference.asciidoc')} ${join(import.meta.url, '..', 'docs', 'reference.asciidoc')}`
|
||||
await $`npm run build`
|
||||
|
||||
// run docs example generation
|
||||
if (version === 'main') {
|
||||
await $`node ./scripts/generate-docs-examples.js`
|
||||
} else {
|
||||
await $`node ./scripts/generate-docs-examples.js ${version.split('.').slice(0, 2).join('.')}`
|
||||
}
|
||||
}
|
||||
|
||||
function onError (err) {
|
||||
@ -1,31 +1,32 @@
|
||||
---
|
||||
steps:
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}"
|
||||
agents:
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
memory: "8G"
|
||||
cpu: "2"
|
||||
|
||||
steps:
|
||||
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }})"
|
||||
env:
|
||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||
TEST_SUITE: "{{ matrix.suite }}"
|
||||
STACK_VERSION: 8.12.0-SNAPSHOT
|
||||
TEST_SUITE: "platinum"
|
||||
STACK_VERSION: 9.0.0
|
||||
GITHUB_TOKEN_PATH: "secret/ci/elastic-elasticsearch-js/github-token"
|
||||
TEST_ES_STACK: "1"
|
||||
matrix:
|
||||
setup:
|
||||
suite:
|
||||
- "free"
|
||||
- "platinum"
|
||||
nodejs:
|
||||
- "18"
|
||||
- "20"
|
||||
- "22"
|
||||
command: ./.buildkite/run-tests.sh
|
||||
artifact_paths: "./junit-output/junit-*.xml"
|
||||
- wait: ~
|
||||
continue_on_failure: true
|
||||
- label: ":junit: Test results"
|
||||
agents:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
plugins:
|
||||
- junit-annotate#v2.4.1:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
job-uuid-file-pattern: 'junit-(.*).xml'
|
||||
job-uuid-file-pattern: "junit-(.*).xml"
|
||||
fail-build-on-error: true
|
||||
failure-format: file
|
||||
|
||||
@ -9,7 +9,6 @@
|
||||
"\\.md$",
|
||||
"\\.asciidoc$",
|
||||
"^docs\\/",
|
||||
"^\\.ci\\/",
|
||||
"^scripts\\/",
|
||||
"^catalog-info\\.yaml$",
|
||||
"^test\\/unit\\/",
|
||||
|
||||
@ -15,17 +15,24 @@ docker build \
|
||||
--build-arg NODE_VERSION="$NODE_VERSION" \
|
||||
.
|
||||
|
||||
echo "--- :javascript: Running $TEST_SUITE tests"
|
||||
GITHUB_TOKEN=$(vault read -field=token "$GITHUB_TOKEN_PATH")
|
||||
export GITHUB_TOKEN
|
||||
|
||||
echo "--- :javascript: Running tests"
|
||||
mkdir -p "$repo/junit-output"
|
||||
docker run \
|
||||
--network="${network_name}" \
|
||||
--env TEST_ES_STACK \
|
||||
--env STACK_VERSION \
|
||||
--env GITHUB_TOKEN \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "ELASTIC_PASSWORD=${elastic_password}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--env "ELASTIC_USER=elastic" \
|
||||
--env "BUILDKITE=true" \
|
||||
--volume "/usr/src/app/node_modules" \
|
||||
--volume "$repo:/usr/src/app" \
|
||||
--volume "$repo/junit-output:/junit-output" \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
bash -c "npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
|
||||
|
||||
@ -1,30 +0,0 @@
|
||||
ARG NODE_JS_VERSION=${NODE_JS_VERSION:-18}
|
||||
FROM node:${NODE_JS_VERSION}
|
||||
|
||||
ARG BUILDER_UID=1000
|
||||
ARG BUILDER_GID=1000
|
||||
ENV BUILDER_USER elastic
|
||||
ENV BUILDER_GROUP elastic
|
||||
|
||||
# install zip util
|
||||
RUN apt-get clean -y && \
|
||||
apt-get update -y && \
|
||||
apt-get install -y zip
|
||||
|
||||
# Set user permissions and directory
|
||||
RUN (id -g ${BUILDER_GID} || groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP}) \
|
||||
&& (id -u ${BUILDER_UID} || useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GID} -m elastic) \
|
||||
&& mkdir -p /usr/src/elasticsearch-js \
|
||||
&& chown -R ${BUILDER_UID}:${BUILDER_GID} /usr/src/
|
||||
|
||||
WORKDIR /usr/src/elasticsearch-js
|
||||
|
||||
# run remainder of commands as non-root user
|
||||
USER ${BUILDER_UID}:${BUILDER_GID}
|
||||
|
||||
# install dependencies
|
||||
COPY package.json .
|
||||
RUN npm install --production=false
|
||||
|
||||
# copy project files
|
||||
COPY . .
|
||||
214
.ci/make.sh
214
.ci/make.sh
@ -1,214 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# ------------------------------------------------------- #
|
||||
#
|
||||
# Build entry script for elasticsearch-js
|
||||
#
|
||||
# Must be called: ./.ci/make.sh <target> <params>
|
||||
#
|
||||
# Version: 1.1.0
|
||||
#
|
||||
# Targets:
|
||||
# ---------------------------
|
||||
# assemble <VERSION> : build client artifacts with version
|
||||
# bump <VERSION> : bump client internals to version
|
||||
# bumpmatrix <VERSION> : bump stack version in test matrix to version
|
||||
# codegen <VERSION> : generate endpoints
|
||||
# docsgen <VERSION> : generate documentation
|
||||
# examplegen : generate the doc examples
|
||||
# clean : clean workspace
|
||||
#
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Bootstrap
|
||||
# ------------------------------------------------------- #
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
repo=$(realpath "$script_path/../")
|
||||
|
||||
# shellcheck disable=SC1090
|
||||
CMD=$1
|
||||
TASK=$1
|
||||
TASK_ARGS=()
|
||||
VERSION=$2
|
||||
STACK_VERSION=$VERSION
|
||||
set -euo pipefail
|
||||
|
||||
product="elastic/elasticsearch-js"
|
||||
output_folder=".ci/output"
|
||||
codegen_folder=".ci/output"
|
||||
OUTPUT_DIR="$repo/${output_folder}"
|
||||
NODE_JS_VERSION=18
|
||||
WORKFLOW=${WORKFLOW-staging}
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
|
||||
|
||||
case $CMD in
|
||||
clean)
|
||||
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
|
||||
rm -rf "$output_folder"
|
||||
echo -e "\033[32;1mdone.\033[0m"
|
||||
exit 0
|
||||
;;
|
||||
assemble)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: assemble artifact $VERSION\033[0m"
|
||||
TASK=release
|
||||
TASK_ARGS=("$VERSION" "$output_folder")
|
||||
;;
|
||||
codegen)
|
||||
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
||||
# fall back to branch name or `main` if no VERSION is set
|
||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [[ "$branch_name" =~ ^[0-9]+\.[0-9]+ ]]; then
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
||||
VERSION="$branch_name"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using \`main\`\033[0m"
|
||||
VERSION="main"
|
||||
fi
|
||||
fi
|
||||
if [ "$VERSION" = 'main' ]; then
|
||||
echo -e "\033[36;1mTARGET: codegen API $VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
|
||||
fi
|
||||
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
docsgen)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
examplesgen)
|
||||
echo -e "\033[36;1mTARGET: generate examples\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
bump)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
|
||||
TASK=bump
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
bumpmatrix)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
|
||||
TASK=bumpmatrix
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
*)
|
||||
echo -e "\n'$CMD' is not supported right now\n"
|
||||
echo -e "\nUsage:"
|
||||
echo -e "\t $0 release \$VERSION\n"
|
||||
echo -e "\t $0 bump \$VERSION"
|
||||
echo -e "\t $0 codegen \$VERSION"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Build Container
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
echo -e "\033[34;1mINFO: building $product container\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag "$product" \
|
||||
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
|
||||
--build-arg "BUILDER_UID=$(id -u)" \
|
||||
--build-arg "BUILDER_GID=$(id -g)" \
|
||||
.
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Run the Container
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
echo -e "\033[34;1mINFO: running $product container\033[0m"
|
||||
|
||||
if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}" ]]; then
|
||||
echo -e "\033[34;1mINFO: Running in local mode"
|
||||
docker run \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
else
|
||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Post Command tasks & checks
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
if [[ "$CMD" == "assemble" ]]; then
|
||||
if compgen -G ".ci/output/*" > /dev/null; then
|
||||
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "bump" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "codegen" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "docsgen" ]]; then
|
||||
echo "TODO"
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "examplesgen" ]]; then
|
||||
echo "TODO"
|
||||
fi
|
||||
@ -5,3 +5,7 @@ elasticsearch
|
||||
.git
|
||||
lib
|
||||
junit-output
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
|
||||
3
.github/CODEOWNERS
vendored
Normal file
3
.github/CODEOWNERS
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
package.json @joshmock
|
||||
renovate.json @joshmock
|
||||
catalog-info.yaml @joshmock
|
||||
49
.github/ISSUE_TEMPLATE/bug.md
vendored
49
.github/ISSUE_TEMPLATE/bug.md
vendored
@ -1,49 +0,0 @@
|
||||
---
|
||||
name: 🐛 Bug report
|
||||
about: Create a report to help us improve
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `not reproducible` label.**
|
||||
|
||||
## 🐛 Bug Report
|
||||
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
## To Reproduce
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
Paste your code here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
<!--
|
||||
In some cases, it might be challenging to reproduce the bug in a few lines of code.
|
||||
You can fork the following repository, which contains all the configuration needed
|
||||
to spin up a three nodes Elasticsearch cluster with security enabled.
|
||||
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
|
||||
https://github.com/delvedor/es-reproduce-issue
|
||||
--->
|
||||
|
||||
## Expected behavior
|
||||
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
Paste the results here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
## Your Environment
|
||||
|
||||
- *node version*: 6,8,10
|
||||
- `@elastic/elasticsearch` *version*: >=7.0.0
|
||||
- *os*: Mac, Windows, Linux
|
||||
- *any other relevant information*
|
||||
66
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
66
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
---
|
||||
name: 🐛 Bug report
|
||||
description: Create a report to help us improve
|
||||
labels: ["Category: Bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
- type: textarea
|
||||
id: bug-report
|
||||
attributes:
|
||||
label: 🐛 Bug report
|
||||
description: A clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: To reproduce
|
||||
description: Steps to reproduce the behavior
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: node-js-version
|
||||
attributes:
|
||||
label: Node.js version
|
||||
placeholder: 18.x, 20.x, etc.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: client-version
|
||||
attributes:
|
||||
label: "@elastic/elasticsearch version"
|
||||
placeholder: 7.17.0, 8.14.1, etc.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
placeholder: Ubuntu 22.04, macOS, etc.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: env-info
|
||||
attributes:
|
||||
label: Any other relevant environment information
|
||||
22
.github/ISSUE_TEMPLATE/feature.md
vendored
22
.github/ISSUE_TEMPLATE/feature.md
vendored
@ -1,22 +0,0 @@
|
||||
---
|
||||
name: 🚀 Feature Proposal
|
||||
about: Submit a proposal for a new feature
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `invalid` label.**
|
||||
|
||||
## 🚀 Feature Proposal
|
||||
|
||||
A clear and concise description of what the feature is.
|
||||
|
||||
## Motivation
|
||||
|
||||
Please outline the motivation for the proposal.
|
||||
|
||||
## Example
|
||||
|
||||
Please provide an example for how this feature would be used.
|
||||
33
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
33
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
---
|
||||
name: 🚀 Feature Proposal
|
||||
description: Submit a proposal for a new feature
|
||||
labels: ["Category: Feature"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
- type: textarea
|
||||
id: feature-proposal
|
||||
attributes:
|
||||
label: 🚀 Feature Proposal
|
||||
description: A clear and concise description of what the feature is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: motivation
|
||||
attributes:
|
||||
label: Motivation
|
||||
description: Please outline the motivation for the proposal.
|
||||
|
||||
- type: textarea
|
||||
id: example
|
||||
attributes:
|
||||
label: Example
|
||||
description: Please provide an example for how this feature would be used.
|
||||
10
.github/ISSUE_TEMPLATE/question.md
vendored
10
.github/ISSUE_TEMPLATE/question.md
vendored
@ -1,10 +0,0 @@
|
||||
---
|
||||
name: 💬 Questions / Help
|
||||
about: If you have questions, please check our Gitter or Help repo
|
||||
---
|
||||
|
||||
## 💬 Questions and Help
|
||||
|
||||
### Please note that this issue tracker is not a help forum and this issue may be closed.
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
21
.github/ISSUE_TEMPLATE/question.yaml
vendored
Normal file
21
.github/ISSUE_TEMPLATE/question.yaml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
---
|
||||
name: 💬 Questions / Help
|
||||
description: If you have questions, please check our community forum or support
|
||||
labels: ["Category: Question"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
### Please note that this issue tracker is not a help forum and this issue may be closed.
|
||||
|
||||
Please check our [community forum](https://discuss.elastic.co/) or [contact Elastic support](https://www.elastic.co/support) if your issue is not specifically related to the documented functionality of this client library.
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: Question
|
||||
description: Your question or comment
|
||||
validations:
|
||||
required: true
|
||||
56
.github/ISSUE_TEMPLATE/regression.md
vendored
56
.github/ISSUE_TEMPLATE/regression.md
vendored
@ -1,56 +0,0 @@
|
||||
---
|
||||
name: 💥 Regression Report
|
||||
about: Report unexpected behavior that worked in previous versions
|
||||
---
|
||||
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `invalid` label.**
|
||||
|
||||
## 💥 Regression Report
|
||||
|
||||
A clear and concise description of what the regression is.
|
||||
|
||||
## Last working version
|
||||
|
||||
Worked up to version:
|
||||
|
||||
Stopped working in version:
|
||||
|
||||
## To Reproduce
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
Paste your code here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
<!--
|
||||
In some cases, it might be challenging to reproduce the bug in a few lines of code.
|
||||
You can fork the following repository, which contains all the configuration needed
|
||||
to spin up a three nodes Elasticsearch cluster with security enabled.
|
||||
The repository also contains a preconfigured client instance that you can use to reproduce the issue.
|
||||
https://github.com/delvedor/es-reproduce-issue
|
||||
--->
|
||||
|
||||
## Expected behavior
|
||||
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
Paste the results here:
|
||||
|
||||
```js
|
||||
|
||||
```
|
||||
|
||||
## Your Environment
|
||||
|
||||
- *node version*: 6,8,10
|
||||
- `@elastic/elasticsearch` *version*: >=7.0.0
|
||||
- *typescript version*: 4.x (if applicable)
|
||||
- *os*: Mac, Windows, Linux
|
||||
- *any other relevant information*
|
||||
92
.github/ISSUE_TEMPLATE/regression.yaml
vendored
Normal file
92
.github/ISSUE_TEMPLATE/regression.yaml
vendored
Normal file
@ -0,0 +1,92 @@
|
||||
---
|
||||
name: 💥 Regression Report
|
||||
description: Report unexpected behavior that worked in previous versions
|
||||
labels: ["Category: Bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
It's not uncommon that somebody already opened an issue or in the best case it's already fixed but not merged. That's the reason why you should [search](https://github.com/elastic/elasticsearch-js/issues) at first before submitting a new one.
|
||||
|
||||
**Please read this entire template before posting any issue. If you ignore these instructions
|
||||
and post an issue here that does not follow the instructions, your issue might be closed,
|
||||
locked, and assigned the `Category: Not an issue` label.**
|
||||
|
||||
- type: textarea
|
||||
id: report
|
||||
attributes:
|
||||
label: Regression report
|
||||
description: A clear and concise description of what the regression is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: last-working-version
|
||||
attributes:
|
||||
label: Last working version
|
||||
description: Version of `@elastic/elasticsearch` where this last worked.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: to-reproduce
|
||||
attributes:
|
||||
label: To reproduce
|
||||
description: |
|
||||
Paste your code here that shows how to reproduce the behavior.
|
||||
|
||||
In some cases, it might be challenging to reproduce the bug in a few lines of code.
|
||||
You can fork the following repository, which contains all the configuration needed to spin up a three nodes Elasticsearch cluster with security enabled.
|
||||
[This repository](https://github.com/delvedor/es-reproduce-issue) also contains a preconfigured client instance that you can use to reproduce the issue.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected-behavior
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: node-version
|
||||
attributes:
|
||||
label: Node.js version
|
||||
description: What version of Node.js you are using (`node --version`).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: typescript-version
|
||||
attributes:
|
||||
label: TypeScript version
|
||||
description: TypeScript version you are using, if applicable.
|
||||
|
||||
- type: input
|
||||
id: elasticsearch-client-version
|
||||
attributes:
|
||||
label: Elasticsearch client version
|
||||
description: What version of `@elastic/elasticsearch` and `@elastic/transport` you are using (`npm ls -a | grep '@elastic'`).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: elasticsearch-version
|
||||
attributes:
|
||||
label: Elasticsearch server version
|
||||
description: What version of Elasticsearch you are using.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: operating-system
|
||||
attributes:
|
||||
label: Operating system
|
||||
description: What operating system you are running.
|
||||
placeholder: e.g. Linux, MacOS, Windows
|
||||
|
||||
- type: textarea
|
||||
id: env-info
|
||||
attributes:
|
||||
label: Any other relevant environment information.
|
||||
6
.github/ISSUE_TEMPLATE/security.md
vendored
6
.github/ISSUE_TEMPLATE/security.md
vendored
@ -1,6 +0,0 @@
|
||||
---
|
||||
name: 👮 Security Issue
|
||||
about: Responsible Disclosure
|
||||
---
|
||||
|
||||
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).
|
||||
8
.github/ISSUE_TEMPLATE/security.yaml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/security.yaml
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
---
|
||||
name: 👮 Security Issue
|
||||
description: Responsible disclosure
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you want to report a security issue, please take a look at [elastic/security](https://www.elastic.co/community/security).
|
||||
222
.github/make.sh
vendored
Executable file
222
.github/make.sh
vendored
Executable file
@ -0,0 +1,222 @@
|
||||
#!/usr/bin/env bash
|
||||
# ------------------------------------------------------- #
|
||||
#
|
||||
# Build entry script for elasticsearch-js
|
||||
#
|
||||
# Must be called: ./.github/make.sh <target> <params>
|
||||
#
|
||||
# Version: 1.1.0
|
||||
#
|
||||
# Targets:
|
||||
# ---------------------------
|
||||
# assemble <VERSION> : build client artifacts with version
|
||||
# bump <VERSION> : bump client internals to version
|
||||
# bumpmatrix <VERSION> : bump stack version in test matrix to version
|
||||
# codegen <VERSION> : generate endpoints
|
||||
# docsgen <VERSION> : generate documentation
|
||||
# examplegen : generate the doc examples
|
||||
# clean : clean workspace
|
||||
#
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Bootstrap
|
||||
# ------------------------------------------------------- #
|
||||
script_path=$(dirname "$(realpath -s "$0")")
|
||||
repo=$(realpath "$script_path/../")
|
||||
|
||||
# shellcheck disable=SC1090
|
||||
CMD=$1
|
||||
TASK=$1
|
||||
TASK_ARGS=()
|
||||
VERSION=$2
|
||||
STACK_VERSION=$VERSION
|
||||
set -euo pipefail
|
||||
|
||||
product="elastic/elasticsearch-js"
|
||||
output_folder=".buildkite/output"
|
||||
codegen_folder=".buildkite/output"
|
||||
OUTPUT_DIR="$repo/${output_folder}"
|
||||
NODE_JS_VERSION=22
|
||||
WORKFLOW=${WORKFLOW-staging}
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
|
||||
|
||||
case $CMD in
|
||||
clean)
|
||||
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
|
||||
rm -rf "$output_folder"
|
||||
echo -e "\033[32;1mdone.\033[0m"
|
||||
exit 0
|
||||
;;
|
||||
assemble)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: assemble artifact $VERSION\033[0m"
|
||||
TASK=release
|
||||
TASK_ARGS=("$VERSION" "$output_folder")
|
||||
;;
|
||||
codegen)
|
||||
if [ -v "$VERSION" ] || [[ -z "$VERSION" ]]; then
|
||||
# fall back to branch name or `main` if no VERSION is set
|
||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [[ "$branch_name" =~ ^[0-9]+\.([0-9]+|x) ]]; then
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using branch name: \`$branch_name\`\033[0m"
|
||||
VERSION="$branch_name"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen -> No VERSION argument found, using \`main\`\033[0m"
|
||||
VERSION="main"
|
||||
fi
|
||||
fi
|
||||
if [ "$VERSION" = 'main' ]; then
|
||||
echo -e "\033[36;1mTARGET: codegen API $VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
|
||||
fi
|
||||
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
docsgen)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
examplesgen)
|
||||
echo -e "\033[36;1mTARGET: generate examples\033[0m"
|
||||
TASK=codegen
|
||||
TASK_ARGS=("$VERSION" "$codegen_folder")
|
||||
;;
|
||||
bump)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
|
||||
TASK=bump
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
bumpmatrix)
|
||||
if [ -v $VERSION ]; then
|
||||
echo -e "\033[31;1mTARGET: bumpmatrix -> missing version parameter\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "\033[36;1mTARGET: bump stack in test matrix to version $VERSION\033[0m"
|
||||
TASK=bumpmatrix
|
||||
TASK_ARGS=("$VERSION")
|
||||
;;
|
||||
*)
|
||||
echo -e "\n'$CMD' is not supported right now\n"
|
||||
echo -e "\nUsage:"
|
||||
echo -e "\t $0 release \$VERSION\n"
|
||||
echo -e "\t $0 bump \$VERSION"
|
||||
echo -e "\t $0 codegen \$VERSION"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Build Container
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
echo -e "\033[34;1mINFO: building $product container\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .buildkite/Dockerfile-make \
|
||||
--tag "$product" \
|
||||
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
|
||||
--build-arg "BUILDER_UID=$(id -u)" \
|
||||
--build-arg "BUILDER_GID=$(id -g)" \
|
||||
.
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Run the Container
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
echo -e "\033[34;1mINFO: running $product container\033[0m"
|
||||
|
||||
if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}" ]]; then
|
||||
echo -e "\033[34;1mINFO: Running in local mode"
|
||||
docker run \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
--volume "$(realpath "$repo/../elastic-client-generator-js"):/usr/src/elastic-client-generator-js" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
else
|
||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||
|
||||
# determine branch to clone
|
||||
GENERATOR_BRANCH="main"
|
||||
if [[ "$VERSION" == 8.* ]]; then
|
||||
GENERATOR_BRANCH="8.x"
|
||||
fi
|
||||
echo -e "\033[34;1mINFO: Generator branch: $GENERATOR_BRANCH"
|
||||
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Post Command tasks & checks
|
||||
# ------------------------------------------------------- #
|
||||
|
||||
if [[ "$CMD" == "assemble" ]]; then
|
||||
if compgen -G ".buildkite/output/*" >/dev/null; then
|
||||
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "bump" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "codegen" ]]; then
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
|
||||
else
|
||||
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "docsgen" ]]; then
|
||||
echo "TODO"
|
||||
fi
|
||||
|
||||
if [[ "$CMD" == "examplesgen" ]]; then
|
||||
echo "TODO"
|
||||
fi
|
||||
81
.github/workflows/nodejs.yml
vendored
81
.github/workflows/nodejs.yml
vendored
@ -9,15 +9,17 @@ jobs:
|
||||
name: Detect files changed
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
src-only: '${{ steps.changes.outputs.src-only }}'
|
||||
src-only: "${{ steps.changes.outputs.src-only }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter/@v2.11.1
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dorny/paths-filter/@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
src-only:
|
||||
- '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.ci,.buildkite,scripts}/**/*|catalog-info.yaml)'
|
||||
- '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.buildkite,scripts}/**/*|catalog-info.yaml)'
|
||||
- '.github/workflows/**'
|
||||
|
||||
test:
|
||||
@ -30,24 +32,19 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [18.x, 20.x]
|
||||
node-version: [18.x, 20.x, 22.x, 23.x]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
# workaround for failing tests on Node.js 14.x
|
||||
# see https://github.com/actions/setup-node/issues/411
|
||||
- name: Force install specific npm version
|
||||
run: |
|
||||
npm install --global npm@8.3.1
|
||||
npm install --global npm@9.7.1
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
npm install
|
||||
@ -60,21 +57,23 @@ jobs:
|
||||
run: |
|
||||
npm run test:unit
|
||||
|
||||
- name: ECMAScript module test
|
||||
run: |
|
||||
npm run test:esm
|
||||
|
||||
license:
|
||||
name: License check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
with:
|
||||
node-version: 22.x
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
@ -83,3 +82,39 @@ jobs:
|
||||
- name: License checker
|
||||
run: |
|
||||
npm run license-checker
|
||||
|
||||
test-bun:
|
||||
name: Test Bun
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: paths-filter
|
||||
# only run if code relevant to unit tests was changed
|
||||
if: needs.paths-filter.outputs.src-only == 'true'
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Bun
|
||||
uses: oven-sh/setup-bun@4bc047ad259df6fc24a6c9b0f9a0cb08cf17fbe5 # v2
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
bun install
|
||||
|
||||
- name: Lint
|
||||
run: |
|
||||
bun run lint
|
||||
|
||||
- name: Unit test
|
||||
run: |
|
||||
bun run test:unit-bun
|
||||
|
||||
- name: ECMAScript module test
|
||||
run: |
|
||||
bun run test:esm
|
||||
|
||||
38
.github/workflows/npm-publish.yml
vendored
Normal file
38
.github/workflows/npm-publish.yml
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
name: Publish Package to npm
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Git branch to build and publish"
|
||||
required: true
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
- uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
with:
|
||||
node-version: "22.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- run: npm install -g npm
|
||||
- run: npm install
|
||||
- run: npm test
|
||||
- run: npm publish --provenance --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- run: |
|
||||
version=$(jq -r .version package.json)
|
||||
gh release create \
|
||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
|
||||
--target "$BRANCH_NAME" \
|
||||
-t "v$version" \
|
||||
"v$version"
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
43
.github/workflows/serverless-patch.sh
vendored
Executable file
43
.github/workflows/serverless-patch.sh
vendored
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -exuo pipefail
|
||||
|
||||
merge_commit_sha=$(jq -r '.pull_request.merge_commit_sha' "$GITHUB_EVENT_PATH")
|
||||
pull_request_id=$(jq -r '.pull_request.number' "$GITHUB_EVENT_PATH")
|
||||
pr_shortcode="elastic/elasticsearch-js#$pull_request_id"
|
||||
|
||||
# generate patch file
|
||||
cd "$GITHUB_WORKSPACE/stack"
|
||||
git format-patch -1 --stdout "$merge_commit_sha" > /tmp/patch.diff
|
||||
|
||||
# set committer info
|
||||
git config --global user.email "elasticmachine@users.noreply.github.com"
|
||||
git config --global user.name "Elastic Machine"
|
||||
|
||||
# apply patch file
|
||||
cd "$GITHUB_WORKSPACE/serverless"
|
||||
git am -C1 --reject /tmp/patch.diff || git am --quit
|
||||
|
||||
# generate PR body comment
|
||||
comment="Patch applied from $pr_shortcode"
|
||||
|
||||
# enumerate rejected patches in PR comment
|
||||
has_rejects='false'
|
||||
for f in ./**/*.rej; do
|
||||
has_rejects='true'
|
||||
comment="$comment
|
||||
|
||||
## Rejected patch \`$f\` must be resolved:
|
||||
|
||||
\`\`\`diff
|
||||
$(cat "$f")
|
||||
\`\`\`
|
||||
"
|
||||
done
|
||||
|
||||
# delete .rej files
|
||||
rm -fv ./**/*.rej
|
||||
|
||||
# send data to output parameters
|
||||
echo "$comment" > /tmp/pr_body
|
||||
echo "PR_DRAFT=$has_rejects" >> "$GITHUB_OUTPUT"
|
||||
53
.github/workflows/serverless-patch.yml
vendored
Normal file
53
.github/workflows/serverless-patch.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
---
|
||||
name: Apply PR changes to serverless
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- closed
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
apply-patch:
|
||||
name: Apply patch
|
||||
runs-on: ubuntu-latest
|
||||
# Only react to merged PRs for security reasons.
|
||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||
if: >
|
||||
github.event.pull_request.merged
|
||||
&& (
|
||||
(
|
||||
github.event.action == 'closed'
|
||||
&& contains(github.event.pull_request.labels.*.name, 'apply-to-serverless')
|
||||
)
|
||||
||
|
||||
(
|
||||
github.event.action == 'labeled'
|
||||
&& github.event.label.name == 'apply-to-serverless'
|
||||
)
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: elastic/elasticsearch-js
|
||||
ref: main
|
||||
path: stack
|
||||
fetch-depth: 0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: elastic/elasticsearch-serverless-js
|
||||
ref: main
|
||||
path: serverless
|
||||
- name: Apply patch from stack to serverless
|
||||
id: apply-patch
|
||||
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
||||
- uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: serverless
|
||||
title: "Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}"
|
||||
commit-message: "Apply patch from elastic/elasticsearch-js#${{ github.event.pull_request.number }}"
|
||||
body-path: /tmp/pr_body
|
||||
draft: "${{ steps.apply-patch.outputs.PR_DRAFT }}"
|
||||
add-paths: ":!*.rej"
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@ -8,7 +8,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
- uses: actions/stale@1160a2240286f5da8ec72b1c0816ce2481aabf84 # v8
|
||||
with:
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@ -63,3 +63,12 @@ test/bundlers/**/bundle.js
|
||||
test/bundlers/parcel-test/.parcel-cache
|
||||
|
||||
lib
|
||||
junit-output
|
||||
bun.lockb
|
||||
test-results
|
||||
processinfo
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
schema
|
||||
|
||||
@ -64,7 +64,6 @@ test
|
||||
scripts
|
||||
|
||||
# ci configuration
|
||||
.ci
|
||||
.travis.yml
|
||||
.buildkite
|
||||
certs
|
||||
@ -73,3 +72,8 @@ CODE_OF_CONDUCT.md
|
||||
CONTRIBUTING.md
|
||||
|
||||
src
|
||||
bun.lockb
|
||||
.tap
|
||||
rest-api-spec
|
||||
yaml-rest-tests
|
||||
generated-tests
|
||||
|
||||
11
Makefile
11
Makefile
@ -1,11 +0,0 @@
|
||||
.PHONY: integration-setup
|
||||
integration-setup: integration-cleanup
|
||||
DETACH=true .ci/run-elasticsearch.sh
|
||||
|
||||
.PHONY: integration-cleanup
|
||||
integration-cleanup:
|
||||
docker container rm --force --volumes instance || true
|
||||
|
||||
.PHONY: integration
|
||||
integration: integration-setup
|
||||
npm run test:integration
|
||||
@ -6,7 +6,7 @@ metadata:
|
||||
name: elasticsearch-js
|
||||
spec:
|
||||
type: library
|
||||
owner: group:clients-team
|
||||
owner: group:devtools-team
|
||||
lifecycle: production
|
||||
|
||||
---
|
||||
@ -18,7 +18,7 @@ metadata:
|
||||
description: elasticsearch-js - integration tests
|
||||
spec:
|
||||
type: buildkite-pipeline
|
||||
owner: group:clients-team
|
||||
owner: group:devtools-team
|
||||
system: buildkite
|
||||
implementation:
|
||||
apiVersion: buildkite.elastic.dev/v1
|
||||
@ -29,7 +29,7 @@ spec:
|
||||
repository: elastic/elasticsearch-js
|
||||
pipeline_file: .buildkite/pipeline.yml
|
||||
teams:
|
||||
clients-team:
|
||||
devtools-team:
|
||||
access_level: MANAGE_BUILD_AND_READ
|
||||
everyone:
|
||||
access_level: READ_ONLY
|
||||
@ -37,14 +37,14 @@ spec:
|
||||
build_pull_requests: false
|
||||
build_branches: false
|
||||
cancel_intermediate_builds: true
|
||||
cancel_intermediate_builds_branch_filter: '!main'
|
||||
cancel_intermediate_builds_branch_filter: "!main"
|
||||
schedules:
|
||||
main_semi_daily:
|
||||
branch: 'main'
|
||||
cronline: '0 */12 * * *'
|
||||
8_12_semi_daily:
|
||||
branch: '8.12'
|
||||
cronline: '0 */12 * * *'
|
||||
8_11_daily:
|
||||
branch: '8.11'
|
||||
cronline: '@daily'
|
||||
main:
|
||||
branch: "main"
|
||||
cronline: "@daily"
|
||||
8_x:
|
||||
branch: "8.x"
|
||||
cronline: "@daily"
|
||||
8_14:
|
||||
branch: "8.16"
|
||||
cronline: "@daily"
|
||||
|
||||
@ -167,16 +167,19 @@ const client = new Client({
|
||||
----
|
||||
|
||||
|`nodeFilter`
|
||||
a|`function` - Filters which node not to use for a request. +
|
||||
a|`function` - Takes a `Connection` and returns `true` if it can be sent a request, otherwise `false`. +
|
||||
_Default:_
|
||||
[source,js]
|
||||
----
|
||||
function defaultNodeFilter (node) {
|
||||
// avoid master only nodes
|
||||
if (node.roles.master === true &&
|
||||
node.roles.data === false &&
|
||||
node.roles.ingest === false) {
|
||||
return false
|
||||
function defaultNodeFilter (conn) {
|
||||
if (conn.roles != null) {
|
||||
if (
|
||||
// avoid master-only nodes
|
||||
conn.roles.master &&
|
||||
!conn.roles.data &&
|
||||
!conn.roles.ingest &&
|
||||
!conn.roles.ml
|
||||
) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
@ -252,19 +255,19 @@ const client = new Client({
|
||||
----
|
||||
|
||||
|`disablePrototypePoisoningProtection`
|
||||
|`boolean`, `'proto'`, `'constructor'` - By the default the client will protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more. If needed you can disable prototype poisoning protection entirely or one of the two checks. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
||||
_Default:_ `false`
|
||||
|`boolean`, `'proto'`, `'constructor'` - The client can protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more about this security concern. If needed, you can enable prototype poisoning protection entirely (`false`) or one of the two checks (`'proto'` or `'constructor'`). For performance reasons, it is disabled by default. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
||||
_Default:_ `true`
|
||||
|
||||
|`caFingerprint`
|
||||
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
|
||||
@ -1,6 +1,253 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.18.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Ensure Apache Arrow ES|QL helper uses async iterator
|
||||
|
||||
The `esql.toArrowReader()` helper function was trying to return `RecordBatchStreamReader`, a synchronous iterator, despite the fact that the `apache-arrow` package was, in most cases, automatically coercing it to `AsyncRecordBatchStreamReader`, its asynchronous counterpart. It now is always returned as an async iterator.
|
||||
|
||||
[discrete]
|
||||
=== 8.18.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix broken node roles and node filter
|
||||
|
||||
The docs note a `nodeFilter` option on the client that will, by default, filter the nodes based on any `roles` values that are set at instantition. At some point, this functionality was partially disabled. This brings the feature back, ensuring that it matches what the documentation has said it does all along.
|
||||
|
||||
[discrete]
|
||||
=== 8.18.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.18`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.18/release-notes-8.18.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved Cloud ID parsing
|
||||
|
||||
When using a Cloud ID as the `cloud` parameter to instantiate the client, that ID was assumed to be in the correct format. New assertions have been added to verify that format and throw a `ConfigurationError` if it is invalid. See https://github.com/elastic/elasticsearch-js/issues/2694[#2694].
|
||||
|
||||
[discrete]
|
||||
=== 8.17.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.17`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.17/release-notes-8.17.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.16.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Drop testing artifacts from npm package
|
||||
|
||||
Tap, the unit testing tool used by this project, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix ECMAScript imports
|
||||
|
||||
Fixed package configuration to correctly support native ECMAScript `import` syntax.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.16`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.16/release-notes-8.16.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Support Apache Arrow in ES|QL helper
|
||||
|
||||
The ES|QL helper can now return results as an Apache Arrow `Table` or `RecordBatchReader`, which enables high-performance calculations on ES|QL results, even if the response data is larger than the system's available memory. See <<esql-helper>> for more information.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Pass prototype poisoning options to serializer correctly
|
||||
|
||||
The client's `disablePrototypePoisoningProtection` option was set to `true` by default, but when it was set to any other value it was ignored, making it impossible to enable prototype poisoning protection without providing a custom serializer implementation.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Drop testing artifacts from npm package
|
||||
|
||||
Tap, the unit testing tool, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.15.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== OpenTelemetry zero-code instrumentation support
|
||||
|
||||
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
|
||||
See {jsclient}/observability.html#_opentelemetry[the docs]
|
||||
for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.1
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `8.14`
|
||||
|
||||
Updated types based on fixes and changes to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.14.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== ES|QL object API helper
|
||||
|
||||
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
|
||||
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
|
||||
|
||||
[discrete]
|
||||
===== `onSuccess` callback added to bulk helper
|
||||
|
||||
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
|
||||
|
||||
[discrete]
|
||||
===== Request retries are more polite
|
||||
|
||||
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== 8.13.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Pin @elastic/transport to `~8.4.1`
|
||||
|
||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
|
||||
|
||||
[discrete]
|
||||
=== 8.13.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.13.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
|
||||
|
||||
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.3
|
||||
|
||||
@ -46,8 +293,6 @@ The failing state could be reached when a server's response times are slower tha
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
|
||||
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.11.1
|
||||
|
||||
@ -138,7 +383,7 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.
|
||||
[discrete]
|
||||
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
|
||||
|
||||
In the https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
|
||||
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
@ -7,5 +7,6 @@ section, you can see the possible options that you can use to configure it.
|
||||
|
||||
* <<basic-config>>
|
||||
* <<advanced-config>>
|
||||
* <<timeout-best-practices>>
|
||||
* <<child>>
|
||||
* <<client-testing>>
|
||||
|
||||
@ -349,7 +349,7 @@ In this case, the result will be:
|
||||
body: object | boolean
|
||||
statusCode: number
|
||||
headers: object
|
||||
warnings: [string],
|
||||
warnings: string[],
|
||||
meta: object
|
||||
}
|
||||
----
|
||||
@ -410,19 +410,23 @@ The supported request specific options are:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`ignore`
|
||||
|`[number]` - HTTP status codes which should not be considered errors for this request. +
|
||||
|`number[]` - HTTP status codes which should not be considered errors for this request. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`requestTimeout`
|
||||
|`number` - Max request timeout for the request in milliseconds, it overrides the client default. +
|
||||
|`number | string` - Max request timeout for the request in milliseconds, it overrides the client default. +
|
||||
_Default:_ `30000`
|
||||
|
||||
|`retryOnTimeout`
|
||||
|`boolean` - Retry requests that have timed out.
|
||||
_Default:_ `false`
|
||||
|
||||
|`maxRetries`
|
||||
|`number` - Max number of retries for the request, it overrides the client default. +
|
||||
_Default:_ `3`
|
||||
|
||||
|`compression`
|
||||
|`string, boolean` - Enables body compression for the request. +
|
||||
|`string | boolean` - Enables body compression for the request. +
|
||||
_Options:_ `false`, `'gzip'` +
|
||||
_Default:_ `false`
|
||||
|
||||
@ -446,6 +450,10 @@ _Default:_ `null`
|
||||
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
||||
_Default:_ `null`
|
||||
|
||||
|`opaqueId`
|
||||
|`string` - Set the `X-Opaque-Id` HTTP header. See {ref}/api-conventions.html#x-opaque-id
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||
_Default:_ `null`
|
||||
@ -458,6 +466,17 @@ _Default:_ `null`
|
||||
|`AbortSignal` - The AbortSignal instance to allow request abortion. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`meta`
|
||||
|`boolean` - Rather than returning the body, return an object containing `body`, `statusCode`, `headers` and `meta` keys +
|
||||
_Default_: `false`
|
||||
|
||||
|`redaction`
|
||||
|`object` - Options for redacting potentially sensitive data from error metadata. See <<redaction>>.
|
||||
|
||||
|`retryBackoff`
|
||||
|`(min: number, max: number, attempt: number) => number;` - A function that calculates how long to sleep, in seconds, before the next request retry +
|
||||
_Default:_ A built-in function that uses exponential backoff with jitter.
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
|
||||
10
docs/doc_examples/00272f75a6afea91f8554ef7cda0c1f2.asciidoc
Normal file
10
docs/doc_examples/00272f75a6afea91f8554ef7cda0c1f2.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.clearCachedRealms({
|
||||
realms: "default_file,ldap1",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/004743b9c9f61588926ccf734696b713.asciidoc
Normal file
11
docs/doc_examples/004743b9c9f61588926ccf734696b713.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.forcemerge({
|
||||
index: ".ds-my-data-stream-2099.03.07-000001",
|
||||
max_num_segments: 1,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/004a17b42ab5155bb61da797a006fa9f.asciidoc
Normal file
19
docs/doc_examples/004a17b42ab5155bb61da797a006fa9f.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
query: {
|
||||
pinned: {
|
||||
ids: ["1", "4", "100"],
|
||||
organic: {
|
||||
match: {
|
||||
description: "iphone",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/006e0e16c9f1da58c0bfe57377f7fc38.asciidoc
Normal file
20
docs/doc_examples/006e0e16c9f1da58c0bfe57377f7fc38.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
analysis: {
|
||||
analyzer: {
|
||||
my_analyzer: {
|
||||
tokenizer: "whitespace",
|
||||
filter: ["stemmer"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
40
docs/doc_examples/007179b5e241da650562a5f0a5007823.asciidoc
Normal file
40
docs/doc_examples/007179b5e241da650562a5f0a5007823.asciidoc
Normal file
@ -0,0 +1,40 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.watcher.putWatch({
|
||||
id: "cluster_health_watch",
|
||||
trigger: {
|
||||
schedule: {
|
||||
interval: "10s",
|
||||
},
|
||||
},
|
||||
input: {
|
||||
http: {
|
||||
request: {
|
||||
host: "localhost",
|
||||
port: 9200,
|
||||
path: "/_cluster/health",
|
||||
},
|
||||
},
|
||||
},
|
||||
condition: {
|
||||
compare: {
|
||||
"ctx.payload.status": {
|
||||
eq: "red",
|
||||
},
|
||||
},
|
||||
},
|
||||
actions: {
|
||||
send_email: {
|
||||
email: {
|
||||
to: "username@example.org",
|
||||
subject: "Cluster Status Warning",
|
||||
body: "Cluster status is RED",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.migration.postFeatureUpgrade();
|
||||
console.log(response);
|
||||
----
|
||||
16
docs/doc_examples/0091fc75271b1fbbd4269622a4881e8b.asciidoc
Normal file
16
docs/doc_examples/0091fc75271b1fbbd4269622a4881e8b.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index",
|
||||
query: {
|
||||
match: {
|
||||
"http.clientip": "40.135.0.0",
|
||||
},
|
||||
},
|
||||
fields: ["http.clientip"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/00ad41bde67beac991534ae0e04b1296.asciidoc
Normal file
11
docs/doc_examples/00ad41bde67beac991534ae0e04b1296.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataStream({
|
||||
name: "my-data-stream",
|
||||
filter_path: "data_streams.indices.index_name",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.shardStores();
|
||||
console.log(response);
|
||||
----
|
||||
15
docs/doc_examples/00c05aa931fc985985e3e21c93cf43ff.asciidoc
Normal file
15
docs/doc_examples/00c05aa931fc985985e3e21c93cf43ff.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.renderSearchTemplate({
|
||||
source: '{ "query": {{#toJson}}my_query{{/toJson}} }',
|
||||
params: {
|
||||
my_query: {
|
||||
match_all: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/00d65f7b9daa1c6b18eedd8ace206bae.asciidoc
Normal file
12
docs/doc_examples/00d65f7b9daa1c6b18eedd8ace206bae.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.analyze({
|
||||
tokenizer: "standard",
|
||||
filter: ["asciifolding"],
|
||||
text: "açaí à la carte",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
67
docs/doc_examples/00e0c964c79fcc1876ab957da2ffce82.asciidoc
Normal file
67
docs/doc_examples/00e0c964c79fcc1876ab957da2ffce82.asciidoc
Normal file
@ -0,0 +1,67 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "italian_example",
|
||||
settings: {
|
||||
analysis: {
|
||||
filter: {
|
||||
italian_elision: {
|
||||
type: "elision",
|
||||
articles: [
|
||||
"c",
|
||||
"l",
|
||||
"all",
|
||||
"dall",
|
||||
"dell",
|
||||
"nell",
|
||||
"sull",
|
||||
"coll",
|
||||
"pell",
|
||||
"gl",
|
||||
"agl",
|
||||
"dagl",
|
||||
"degl",
|
||||
"negl",
|
||||
"sugl",
|
||||
"un",
|
||||
"m",
|
||||
"t",
|
||||
"s",
|
||||
"v",
|
||||
"d",
|
||||
],
|
||||
articles_case: true,
|
||||
},
|
||||
italian_stop: {
|
||||
type: "stop",
|
||||
stopwords: "_italian_",
|
||||
},
|
||||
italian_keywords: {
|
||||
type: "keyword_marker",
|
||||
keywords: ["esempio"],
|
||||
},
|
||||
italian_stemmer: {
|
||||
type: "stemmer",
|
||||
language: "light_italian",
|
||||
},
|
||||
},
|
||||
analyzer: {
|
||||
rebuilt_italian: {
|
||||
tokenizer: "standard",
|
||||
filter: [
|
||||
"italian_elision",
|
||||
"lowercase",
|
||||
"italian_stop",
|
||||
"italian_keywords",
|
||||
"italian_stemmer",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc
Normal file
19
docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "my-e5-model",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service_settings: {
|
||||
num_allocations: 1,
|
||||
num_threads: 1,
|
||||
model_id: ".multilingual-e5-small",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
13
docs/doc_examples/010d5e901a2690fa7b2396edbe6cd463.asciidoc
Normal file
13
docs/doc_examples/010d5e901a2690fa7b2396edbe6cd463.asciidoc
Normal file
@ -0,0 +1,13 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putIndexTemplate({
|
||||
name: "my-data-stream-template",
|
||||
index_patterns: ["my-data-stream*"],
|
||||
data_stream: {},
|
||||
priority: 500,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
body: {
|
||||
transient: {
|
||||
'cluster.routing.use_adaptive_replica_selection': false
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.update({
|
||||
index: 'test',
|
||||
id: '1',
|
||||
body: {
|
||||
doc: {
|
||||
name: 'new_name'
|
||||
},
|
||||
detect_noop: false
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
46
docs/doc_examples/015e6e6132b6d6d44bddb06bc3b316ed.asciidoc
Normal file
@ -0,0 +1,46 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
range: {
|
||||
year: {
|
||||
gt: 2023,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "elastic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
rank_constant: 1,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
aggs: {
|
||||
topics: {
|
||||
terms: {
|
||||
field: "topic",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
24
docs/doc_examples/0163af36c8472ac0c5160c8b716f5b26.asciidoc
Normal file
24
docs/doc_examples/0163af36c8472ac0c5160c8b716f5b26.asciidoc
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "sales",
|
||||
size: 0,
|
||||
filter_path: "aggregations",
|
||||
query: {
|
||||
term: {
|
||||
type: "t-shirt",
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
avg_price: {
|
||||
avg: {
|
||||
field: "price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
18
docs/doc_examples/0165d22da5f2fc7678392b31d8eb5566.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "my-rerank-model",
|
||||
inference_config: {
|
||||
service: "cohere",
|
||||
service_settings: {
|
||||
model_id: "rerank-english-v3.0",
|
||||
api_key: "{{COHERE_API_KEY}}",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
17
docs/doc_examples/016f3147dae9ff2c3e831257ae470361.asciidoc
Normal file
17
docs/doc_examples/016f3147dae9ff2c3e831257ae470361.asciidoc
Normal file
@ -0,0 +1,17 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.updateAliases({
|
||||
actions: [
|
||||
{
|
||||
add: {
|
||||
index: "logs-*",
|
||||
alias: "logs",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
26
docs/doc_examples/019e329ed5a930aef825266822e7377a.asciidoc
Normal file
26
docs/doc_examples/019e329ed5a930aef825266822e7377a.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "asciifold_example",
|
||||
settings: {
|
||||
analysis: {
|
||||
analyzer: {
|
||||
standard_asciifolding: {
|
||||
tokenizer: "standard",
|
||||
filter: ["my_ascii_folding"],
|
||||
},
|
||||
},
|
||||
filter: {
|
||||
my_ascii_folding: {
|
||||
type: "asciifolding",
|
||||
preserve_original: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
36
docs/doc_examples/01ae196538fac197eedbbf458a4ef31b.asciidoc
Normal file
36
docs/doc_examples/01ae196538fac197eedbbf458a4ef31b.asciidoc
Normal file
@ -0,0 +1,36 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "idx",
|
||||
settings: {
|
||||
index: {
|
||||
mapping: {
|
||||
source: {
|
||||
mode: "synthetic",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
kwd: {
|
||||
type: "keyword",
|
||||
ignore_above: 3,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "idx",
|
||||
id: 1,
|
||||
document: {
|
||||
kwd: ["foo", "foo", "bang", "bar", "baz"],
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
37
docs/doc_examples/01b23f09d2b7f140faf649eadbbf3ac3.asciidoc
Normal file
37
docs/doc_examples/01b23f09d2b7f140faf649eadbbf3ac3.asciidoc
Normal file
@ -0,0 +1,37 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.putComponentTemplate({
|
||||
name: "component_template1",
|
||||
template: {
|
||||
mappings: {
|
||||
properties: {
|
||||
"@timestamp": {
|
||||
type: "date",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.cluster.putComponentTemplate({
|
||||
name: "runtime_component_template",
|
||||
template: {
|
||||
mappings: {
|
||||
runtime: {
|
||||
day_of_week: {
|
||||
type: "keyword",
|
||||
script: {
|
||||
source:
|
||||
"emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ENGLISH))",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
10
docs/doc_examples/01bc0f2ed30eb3dd23511d01ce0ac6e1.asciidoc
Normal file
10
docs/doc_examples/01bc0f2ed30eb3dd23511d01ce0ac6e1.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transform.startTransform({
|
||||
transform_id: "ecommerce_transform",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
12
docs/doc_examples/01cd0ea360282a2c591a366679d7187d.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.tasks.list({
|
||||
human: "true",
|
||||
detailed: "true",
|
||||
actions: "indices:data/write/bulk",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
17
docs/doc_examples/01da9e0620e48270617fc248e6415cac.asciidoc
Normal file
17
docs/doc_examples/01da9e0620e48270617fc248e6415cac.asciidoc
Normal file
@ -0,0 +1,17 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
aggs: {
|
||||
"my-agg-name": {
|
||||
terms: {
|
||||
field: "my-field",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/01dc7bdc223bd651574ed2d3954a5b1c.asciidoc
Normal file
10
docs/doc_examples/01dc7bdc223bd651574ed2d3954a5b1c.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.watcher.executeWatch({
|
||||
id: "my_watch",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
39
docs/doc_examples/01f50acf7998b24969f451e922d145eb.asciidoc
Normal file
39
docs/doc_examples/01f50acf7998b24969f451e922d145eb.asciidoc
Normal file
@ -0,0 +1,39 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "basque_example",
|
||||
settings: {
|
||||
analysis: {
|
||||
filter: {
|
||||
basque_stop: {
|
||||
type: "stop",
|
||||
stopwords: "_basque_",
|
||||
},
|
||||
basque_keywords: {
|
||||
type: "keyword_marker",
|
||||
keywords: ["Adibidez"],
|
||||
},
|
||||
basque_stemmer: {
|
||||
type: "stemmer",
|
||||
language: "basque",
|
||||
},
|
||||
},
|
||||
analyzer: {
|
||||
rebuilt_basque: {
|
||||
tokenizer: "standard",
|
||||
filter: [
|
||||
"lowercase",
|
||||
"basque_stop",
|
||||
"basque_keywords",
|
||||
"basque_stemmer",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/020c95db88ef356093f03be84893ddf9.asciidoc
Normal file
10
docs/doc_examples/020c95db88ef356093f03be84893ddf9.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ccr.followStats({
|
||||
index: "<index>",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
20
docs/doc_examples/020de6b6cb960a76297452725a38889f.asciidoc
Normal file
20
docs/doc_examples/020de6b6cb960a76297452725a38889f.asciidoc
Normal file
@ -0,0 +1,20 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
query: {
|
||||
has_child: {
|
||||
type: "child",
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
max_children: 10,
|
||||
min_children: 2,
|
||||
score_mode: "min",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/0246f73cc2ed3dfec577119e8cd15404.asciidoc
Normal file
19
docs/doc_examples/0246f73cc2ed3dfec577119e8cd15404.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putMapping({
|
||||
index: "my-index-000001",
|
||||
properties: {
|
||||
name: {
|
||||
properties: {
|
||||
last: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
28
docs/doc_examples/025155da86802ebf4c3aeee5aab692f9.asciidoc
Normal file
28
docs/doc_examples/025155da86802ebf4c3aeee5aab692f9.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "sales",
|
||||
mappings: {
|
||||
properties: {
|
||||
tags: {
|
||||
type: "keyword",
|
||||
},
|
||||
comments: {
|
||||
type: "nested",
|
||||
properties: {
|
||||
username: {
|
||||
type: "keyword",
|
||||
},
|
||||
comment: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/02520ac7816b2c4cf8fb413fd16122f2.asciidoc
Normal file
11
docs/doc_examples/02520ac7816b2c4cf8fb413fd16122f2.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ml.flushJob({
|
||||
job_id: "low_request_rate",
|
||||
calc_interim: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,14 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: 'twitter',
|
||||
size: '0',
|
||||
q: 'extra:test',
|
||||
filter_path: 'hits.total'
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
23
docs/doc_examples/0264e994a7e68561e2ca6be0f0d90ee9.asciidoc
Normal file
23
docs/doc_examples/0264e994a7e68561e2ca6be0f0d90ee9.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
aggs: {
|
||||
JapaneseCars: {
|
||||
terms: {
|
||||
field: "make",
|
||||
include: ["mazda", "honda"],
|
||||
},
|
||||
},
|
||||
ActiveCarManufacturers: {
|
||||
terms: {
|
||||
field: "make",
|
||||
exclude: ["rover", "jensen"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/0280247e0cf2e561c548f22c9fb31163.asciidoc
Normal file
10
docs/doc_examples/0280247e0cf2e561c548f22c9fb31163.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.invalidateToken({
|
||||
username: "myuser",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
28
docs/doc_examples/02853293a5b7cd9cc7a886eb413bbeb6.asciidoc
Normal file
28
docs/doc_examples/02853293a5b7cd9cc7a886eb413bbeb6.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.analyze({
|
||||
tokenizer: "keyword",
|
||||
char_filter: [
|
||||
{
|
||||
type: "mapping",
|
||||
mappings: [
|
||||
"٠ => 0",
|
||||
"١ => 1",
|
||||
"٢ => 2",
|
||||
"٣ => 3",
|
||||
"٤ => 4",
|
||||
"٥ => 5",
|
||||
"٦ => 6",
|
||||
"٧ => 7",
|
||||
"٨ => 8",
|
||||
"٩ => 9",
|
||||
],
|
||||
},
|
||||
],
|
||||
text: "My license plate is ٢٥٠١٥",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,43 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
aggs: {
|
||||
countries: {
|
||||
terms: {
|
||||
field: 'artist.country',
|
||||
order: [
|
||||
{
|
||||
'rock>playback_stats.avg': 'desc'
|
||||
},
|
||||
{
|
||||
_count: 'desc'
|
||||
}
|
||||
]
|
||||
},
|
||||
aggs: {
|
||||
rock: {
|
||||
filter: {
|
||||
term: {
|
||||
genre: 'rock'
|
||||
}
|
||||
},
|
||||
aggs: {
|
||||
playback_stats: {
|
||||
stats: {
|
||||
field: 'play_count'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
21
docs/doc_examples/029de2f5383a42e1ac4ca1565bd2a130.asciidoc
Normal file
21
docs/doc_examples/029de2f5383a42e1ac4ca1565bd2a130.asciidoc
Normal file
@ -0,0 +1,21 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
mappings: {
|
||||
properties: {
|
||||
full_name: {
|
||||
type: "text",
|
||||
index_prefixes: {
|
||||
min_chars: 1,
|
||||
max_chars: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
16
docs/doc_examples/02b00f21e9d23d82276ace0dd154d779.asciidoc
Normal file
16
docs/doc_examples/02b00f21e9d23d82276ace0dd154d779.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
routing: "user1,user2",
|
||||
query: {
|
||||
match: {
|
||||
title: "document",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/02b6aa3e5652839f03de3a655854b897.asciidoc
Normal file
10
docs/doc_examples/02b6aa3e5652839f03de3a655854b897.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
24
docs/doc_examples/02c48d461536709c3fc8a0e8147c3787.asciidoc
Normal file
24
docs/doc_examples/02c48d461536709c3fc8a0e8147c3787.asciidoc
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "pipelineB",
|
||||
description: "outer pipeline",
|
||||
processors: [
|
||||
{
|
||||
pipeline: {
|
||||
name: "pipelineA",
|
||||
},
|
||||
},
|
||||
{
|
||||
set: {
|
||||
field: "outer_pipeline_set",
|
||||
value: "outer",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/02f65c6bab8f40bf3ce18160623d1870.asciidoc
Normal file
10
docs/doc_examples/02f65c6bab8f40bf3ce18160623d1870.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getTemplate({
|
||||
name: "template_1",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
38
docs/doc_examples/02fad6b80bb29c2a7e6840db2fc67b18.asciidoc
Normal file
38
docs/doc_examples/02fad6b80bb29c2a7e6840db2fc67b18.asciidoc
Normal file
@ -0,0 +1,38 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_wildcard: {
|
||||
type: "wildcard",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: 1,
|
||||
document: {
|
||||
my_wildcard: "This string can be quite lengthy",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
wildcard: {
|
||||
my_wildcard: {
|
||||
value: "*quite*lengthy",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
12
docs/doc_examples/0308cbd85281f95fc458042afe3f587d.asciidoc
Normal file
12
docs/doc_examples/0308cbd85281f95fc458042afe3f587d.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.get({
|
||||
index: "my-index-000001",
|
||||
id: 0,
|
||||
_source: "*.id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/032eac56b798bea29390e102538f4a26.asciidoc
Normal file
10
docs/doc_examples/032eac56b798bea29390e102538f4a26.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.refresh({
|
||||
index: "my-index-000001,my-index-000002",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,22 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
aggs: {
|
||||
genres: {
|
||||
terms: {
|
||||
script: {
|
||||
source: "doc['genre'].value",
|
||||
lang: 'painless'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
26
docs/doc_examples/033838729cfb5d1a28d04f69ee78d924.asciidoc
Normal file
26
docs/doc_examples/033838729cfb5d1a28d04f69ee78d924.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "example",
|
||||
document: {
|
||||
location: {
|
||||
type: "Polygon",
|
||||
orientation: "LEFT",
|
||||
coordinates: [
|
||||
[
|
||||
[-177, 10],
|
||||
[176, 15],
|
||||
[172, 0],
|
||||
[176, -15],
|
||||
[-177, -10],
|
||||
[-177, 10],
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
36
docs/doc_examples/0350410d11579f4e876c798ce1eaef5b.asciidoc
Normal file
36
docs/doc_examples/0350410d11579f4e876c798ce1eaef5b.asciidoc
Normal file
@ -0,0 +1,36 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: 5,
|
||||
refresh: "true",
|
||||
document: {
|
||||
query: {
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match: {
|
||||
message: {
|
||||
query: "Japanese art",
|
||||
_name: "query1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
message: {
|
||||
query: "Holand culture",
|
||||
_name: "query2",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
38
docs/doc_examples/0350ff5ebb8207c004eb771088339cb4.asciidoc
Normal file
38
docs/doc_examples/0350ff5ebb8207c004eb771088339cb4.asciidoc
Normal file
@ -0,0 +1,38 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "example-index",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
term: {
|
||||
text: "blue shoes sale",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "ml.tokens",
|
||||
inference_id: "my_elser_model",
|
||||
query: "What blue shoes are on sale?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
rank_window_size: 50,
|
||||
rank_constant: 20,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
29
docs/doc_examples/03582fc93683e573062bcfda45e01d69.asciidoc
Normal file
29
docs/doc_examples/03582fc93683e573062bcfda45e01d69.asciidoc
Normal file
@ -0,0 +1,29 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
analysis: {
|
||||
analyzer: {
|
||||
my_custom_analyzer: {
|
||||
type: "custom",
|
||||
tokenizer: "standard",
|
||||
char_filter: ["html_strip"],
|
||||
filter: ["lowercase", "asciifolding"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.indices.analyze({
|
||||
index: "my-index-000001",
|
||||
analyzer: "my_custom_analyzer",
|
||||
text: "Is this déjà vu</b>?",
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
11
docs/doc_examples/035a7a919eb6513b4769a3727b7d6447.asciidoc
Normal file
11
docs/doc_examples/035a7a919eb6513b4769a3727b7d6447.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.analyze({
|
||||
analyzer: "whitespace",
|
||||
text: "The quick brown fox.",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -0,0 +1,8 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.getServiceAccounts();
|
||||
console.log(response);
|
||||
----
|
||||
14
docs/doc_examples/03b1d76fa0b773d5b7d74ecb7e1e1a80.asciidoc
Normal file
14
docs/doc_examples/03b1d76fa0b773d5b7d74ecb7e1e1a80.asciidoc
Normal file
@ -0,0 +1,14 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.snapshot.restore({
|
||||
repository: "my_repository",
|
||||
snapshot: "my_snapshot_2099.05.06",
|
||||
indices: "my-index,logs-my_app-default",
|
||||
rename_pattern: "(.+)",
|
||||
rename_replacement: "restored-$1",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/03c4b815bf1e6a8c5cfcc6ddf94bc093.asciidoc
Normal file
11
docs/doc_examples/03c4b815bf1e6a8c5cfcc6ddf94bc093.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.sql.query({
|
||||
format: "txt",
|
||||
query: "SELECT * FROM library ORDER BY page_count DESC LIMIT 5",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc
Normal file
12
docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.connector.updateApiKeyId({
|
||||
connector_id: "my-connector",
|
||||
api_key_id: "my-api-key-id",
|
||||
api_key_secret_id: "my-connector-secret-id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/044b2f99e7438e408685b258db17f863.asciidoc
Normal file
12
docs/doc_examples/044b2f99e7438e408685b258db17f863.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.eql.search({
|
||||
index: "my-data-stream",
|
||||
query: '\n process where process.name == "regsvr32.exe"\n ',
|
||||
size: 50,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
52
docs/doc_examples/046b2249bbc49e77848c114cee940f17.asciidoc
Normal file
52
docs/doc_examples/046b2249bbc49e77848c114cee940f17.asciidoc
Normal file
@ -0,0 +1,52 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index",
|
||||
retriever: {
|
||||
rrf: {
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "How is the weather in Jamaica?",
|
||||
fields: ["title", "description"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
text_expansion: {
|
||||
"ml.inference.title_expanded.predicted_value": {
|
||||
model_id: ".elser_model_2",
|
||||
model_text: "How is the weather in Jamaica?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
text_expansion: {
|
||||
"ml.inference.description_expanded.predicted_value": {
|
||||
model_id: ".elser_model_2",
|
||||
model_text: "How is the weather in Jamaica?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
window_size: 10,
|
||||
rank_constant: 20,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
27
docs/doc_examples/0470d7101637568b9d3d1239f06325a7.asciidoc
Normal file
27
docs/doc_examples/0470d7101637568b9d3d1239f06325a7.asciidoc
Normal file
@ -0,0 +1,27 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_internal/desired_nodes/<history_id>/<version>",
|
||||
body: {
|
||||
nodes: [
|
||||
{
|
||||
settings: {
|
||||
"node.name": "instance-000187",
|
||||
"node.external_id": "instance-000187",
|
||||
"node.roles": ["data_hot", "master"],
|
||||
"node.attr.data": "hot",
|
||||
"node.attr.logical_availability_zone": "zone-0",
|
||||
},
|
||||
processors: 8,
|
||||
memory: "58gb",
|
||||
storage: "2tb",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,20 +4,14 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
body: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: 'Will Smith',
|
||||
type: 'cross_fields',
|
||||
fields: [
|
||||
'first_name',
|
||||
'last_name'
|
||||
],
|
||||
operator: 'and'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
query: "Will Smith",
|
||||
type: "cross_fields",
|
||||
fields: ["first_name", "last_name"],
|
||||
operator: "and",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
|
||||
10
docs/doc_examples/048652b6abfe195da8ea8cef10ee01b1.asciidoc
Normal file
10
docs/doc_examples/048652b6abfe195da8ea8cef10ee01b1.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transform.resetTransform({
|
||||
transform_id: "ecommerce_transform",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,17 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.create({
|
||||
index: 'twitter',
|
||||
id: '1',
|
||||
body: {
|
||||
user: 'kimchy',
|
||||
post_date: '2009-11-15T14:12:12',
|
||||
message: 'trying out Elasticsearch'
|
||||
}
|
||||
})
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
31
docs/doc_examples/04d586a536061ec1045d0bb2dc3d1a5f.asciidoc
Normal file
31
docs/doc_examples/04d586a536061ec1045d0bb2dc3d1a5f.asciidoc
Normal file
@ -0,0 +1,31 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "set_os",
|
||||
description: "sets the value of host.os.name from the field os",
|
||||
processors: [
|
||||
{
|
||||
set: {
|
||||
field: "host.os.name",
|
||||
value: "{{{os}}}",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.ingest.simulate({
|
||||
id: "set_os",
|
||||
docs: [
|
||||
{
|
||||
_source: {
|
||||
os: "Ubuntu",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
27
docs/doc_examples/04d6ce0c903bd468afbecd3aa1c4a78a.asciidoc
Normal file
27
docs/doc_examples/04d6ce0c903bd468afbecd3aa1c4a78a.asciidoc
Normal file
@ -0,0 +1,27 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "my-pipeline-id",
|
||||
description: "My optional pipeline description",
|
||||
processors: [
|
||||
{
|
||||
set: {
|
||||
description: "My optional processor description",
|
||||
field: "my-keyword-field",
|
||||
value: "foo",
|
||||
},
|
||||
},
|
||||
],
|
||||
_meta: {
|
||||
reason: "set my-keyword-field to foo",
|
||||
serialization: {
|
||||
class: "MyPipeline",
|
||||
id: 10,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user