Compare commits

...

24 Commits

Author SHA1 Message Date
7733eec328 Prepare 7.17.14 (#2294)
* Prepare 7.17.14

* Tweaks to Github action
2024-07-02 11:33:33 -05:00
d614d95b8d Do not retry on timeout by default (#2293)
* Do not retry on timeout by default

* Stop testing on Node.js 12

No longer available on actions/setup-node

* Node.js 14.x is no longer available for testing either

* Fix linter issue

* Update acceptance tests

* Add retryOnTimeout to type defs

* Linter cleanup

* Drop code coverage step from Github action
2024-06-28 13:28:52 -05:00
af385f0bac Update changelog for 7.17.13 (#2020) 2023-09-26 11:11:58 -05:00
02c5b8664e Skip adding new nodes that aren't ready yet (#1994) 2023-08-31 12:23:49 -05:00
63cd655e79 Bump 7.17.12-canary.1 2023-07-14 11:03:09 -05:00
11a5711409 7.17.12 changelog (#1941) 2023-07-14 10:20:00 -05:00
3f01fafd9e [Backport 7.17] fix: add types to exports (v7.x) (#1930) (#1940)
Co-authored-by: Nicolas Morel <nicolas@morel.io>
2023-07-13 16:12:31 -05:00
cca9a7b212 Bumps 7.17 to 7.17.12 (#1926)
Co-authored-by: Josh Mock <joshua.mock@elastic.co>
2023-06-29 14:13:50 -05:00
a8c8d2c712 Changelog for 7.17.11 (#1923) 2023-06-29 13:42:16 -05:00
4a88f2d5e5 Type declaration fix for 7.17.11 (#1927) 2023-06-29 13:36:27 -05:00
acbd146d53 Bump canary version to 7.17.11-canary.2 2023-06-27 16:28:30 -05:00
9eb3df5d28 Connection test should support IPv6 2023-06-27 16:26:51 -05:00
3d3263eaf4 API changes for 7.17.11 2023-06-27 16:07:38 -05:00
56e28766c8 Backport bulk helper index drift bugfix to 7.x (#1915) 2023-06-15 11:14:56 -05:00
a59227bea8 Bumps 7.17 to 7.17.11 (#1860) 2023-05-02 10:05:56 -05:00
4dd2354b9d [7.17] [DOCS] Includes source_branch in docs index 2023-02-22 07:45:48 -06:00
0b5d9ed3b1 Bumps 7.17 to 7.17.9 2023-01-03 08:34:58 -06:00
1b17df3d67 Bumps 7.17 to 7.17.8 2022-10-25 07:59:09 -05:00
b68d71a528 Fix docs URLs to use '7.17' instead of 'master' 2022-09-27 06:40:17 -05:00
64caf21ea2 Bumps to version 7.17.7 2022-08-30 07:43:48 -05:00
1ab2d86787 Bumps to version 7.17.6 2022-07-07 11:30:22 -05:00
c1c4ebc607 Bumps to version 7.17.5 (#1702) 2022-05-30 10:53:21 +02:00
10e15039f4 Add make.sh for v7 (#1701) 2022-05-25 08:40:47 +02:00
7f13a73bd3 More lenient parameter checks (v7) (#1663) 2022-03-30 13:34:46 +02:00
26 changed files with 634 additions and 164 deletions

View File

@ -1,4 +1,4 @@
ARG NODE_JS_VERSION=10
ARG NODE_JS_VERSION=16
FROM node:${NODE_JS_VERSION}
# Create app directory

107
.ci/make.mjs Normal file
View File

@ -0,0 +1,107 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* global $ argv */
'use strict'
import 'zx/globals'
import { readFile, writeFile } from 'fs/promises'
import assert from 'assert'
import { join } from 'desm'
import semver from 'semver'
assert(typeof argv.task === 'string', 'Missing task parameter')
switch (argv.task) {
case 'release':
release(argv._).catch(onError)
break
case 'bump':
bump(argv._).catch(onError)
break
case 'codegen':
codegen(argv._).catch(onError)
break
default:
console.log(`Unknown task: ${argv.task}`)
process.exit(1)
}
async function release (args) {
assert(args.length === 2, 'Release task expects two parameters')
let [version, outputFolder] = args
if (process.env.WORKFLOW === 'snapshot' && !version.endsWith('SNAPSHOT')) {
version = `${version}-SNAPSHOT`
}
await bump([version])
const packageJson = JSON.parse(await readFile(
join(import.meta.url, '..', 'package.json'),
'utf8'
))
await $`npm pack`
await $`zip elasticsearch-js-${version}.zip elastic-elasticsearch-${packageJson.version}.tgz`
await $`rm elastic-elasticsearch-${packageJson.version}.tgz`
await $`mv ${join(import.meta.url, '..', `elasticsearch-js-${version}.zip`)} ${join(import.meta.url, '..', outputFolder, `elasticsearch-js-${version}.zip`)}`
}
async function bump (args) {
assert(args.length === 1, 'Bump task expects one parameter')
const [version] = args
const packageJson = JSON.parse(await readFile(
join(import.meta.url, '..', 'package.json'),
'utf8'
))
const cleanVersion = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
assert(semver.valid(cleanVersion))
packageJson.version = cleanVersion
packageJson.versionCanary = `${cleanVersion}-canary.0`
await writeFile(
join(import.meta.url, '..', 'package.json'),
JSON.stringify(packageJson, null, 2),
'utf8'
)
const testMatrix = await readFile(join(import.meta.url, 'test-matrix.yml'), 'utf8')
await writeFile(
join(import.meta.url, 'test-matrix.yml'),
testMatrix.replace(/STACK_VERSION:\s+\- "[0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?"/, `STACK_VERSION:\n - "${cleanVersion}-SNAPSHOT"`), // eslint-disable-line
'utf8'
)
}
// this command can only be executed locally for now
async function codegen (args) {
assert(args.length === 1, 'Bump task expects one parameter')
const [version] = args
await $`node scripts/generate --version ${version}`
}
function onError (err) {
console.log(err)
process.exit(1)
}

178
.ci/make.sh Executable file
View File

@ -0,0 +1,178 @@
#!/usr/bin/env bash
# ------------------------------------------------------- #
#
# Skeleton for common build entry script for all elastic
# clients. Needs to be adapted to individual client usage.
#
# Must be called: ./.ci/make.sh <target> <params>
#
# Version: 1.1.0
#
# Targets:
# ---------------------------
# assemble <VERSION> : build client artefacts with version
# bump <VERSION> : bump client internals to version
# codegen <VERSION> : generate endpoints
# docsgen <VERSION> : generate documentation
# examplegen : generate the doc examples
# clean : clean workspace
#
# ------------------------------------------------------- #
# ------------------------------------------------------- #
# Bootstrap
# ------------------------------------------------------- #
script_path=$(dirname "$(realpath -s "$0")")
repo=$(realpath "$script_path/../")
# shellcheck disable=SC1090
CMD=$1
TASK=$1
TASK_ARGS=()
VERSION=$2
STACK_VERSION=$VERSION
NODE_JS_VERSION=16
WORKFLOW=${WORKFLOW-staging}
set -euo pipefail
product="elastic/elasticsearch-js"
output_folder=".ci/output"
OUTPUT_DIR="$repo/${output_folder}"
REPO_BINDING="${OUTPUT_DIR}:/sln/${output_folder}"
mkdir -p "$OUTPUT_DIR"
echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m"
echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m"
echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m"
# ------------------------------------------------------- #
# Parse Command
# ------------------------------------------------------- #
case $CMD in
clean)
echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m"
rm -rf "$output_folder"
echo -e "\033[32;1mdone.\033[0m"
exit 0
;;
assemble)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: assemble artefact $VERSION\033[0m"
TASK=release
TASK_ARGS=("$VERSION" "$output_folder")
;;
codegen)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: codegen -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m"
TASK=codegen
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION")
;;
docsgen)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m"
TASK=codegen
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
examplesgen)
echo -e "\033[36;1mTARGET: generate examples\033[0m"
TASK=codegen
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION" "$codegen_folder")
;;
bump)
if [ -v $VERSION ]; then
echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m"
exit 1
fi
echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m"
TASK=bump
# VERSION is BRANCH here for now
TASK_ARGS=("$VERSION")
;;
*)
echo -e "\nUsage:\n\t $CMD is not supported right now\n"
exit 1
esac
# ------------------------------------------------------- #
# Build Container
# ------------------------------------------------------- #
echo -e "\033[34;1mINFO: building $product container\033[0m"
docker build \
--file .ci/Dockerfile \
--tag ${product} \
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
--build-arg USER_ID="$(id -u)" \
--build-arg GROUP_ID="$(id -g)" \
.
# ------------------------------------------------------- #
# Run the Container
# ------------------------------------------------------- #
echo -e "\033[34;1mINFO: running $product container\033[0m"
docker run \
--volume $repo:/usr/src/app \
--volume /usr/src/app/node_modules \
--env "WORKFLOW=${WORKFLOW}" \
--name make-elasticsearch-js \
--rm \
$product \
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}
# ------------------------------------------------------- #
# Post Command tasks & checks
# ------------------------------------------------------- #
if [[ "$CMD" == "assemble" ]]; then
if compgen -G ".ci/output/*" > /dev/null; then
echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "bump" ]]; then
if [ -n "$(git status --porcelain)" ]; then
echo -e "\033[32;1mTARGET: successfully bumped client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: failed bumped client v$VERSION\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "codegen" ]]; then
if [ -n "$(git status --porcelain)" ]; then
echo -e "\033[32;1mTARGET: successfully generated client v$VERSION\033[0m"
else
echo -e "\033[31;1mTARGET: failed generating client v$VERSION\033[0m"
exit 1
fi
fi
if [[ "$CMD" == "docsgen" ]]; then
echo "TODO"
fi
if [[ "$CMD" == "examplesgen" ]]; then
echo "TODO"
fi

View File

@ -1,6 +1,6 @@
---
STACK_VERSION:
- 7.16-SNAPSHOT
- "7.17.12-SNAPSHOT"
NODE_JS_VERSION:
- 16

View File

@ -9,36 +9,36 @@ jobs:
strategy:
matrix:
node-version: [12.x, 14.x, 16.x]
node-version: [16.x, 18.x, 20.x]
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Install
run: |
npm install
- name: Install
run: |
npm install
- name: Lint
run: |
npm run lint
- name: Lint
run: |
npm run lint
- name: Unit test
run: |
npm run test:unit
- name: Unit test
run: |
npm run test:unit
- name: Acceptance test
run: |
npm run test:acceptance
- name: Acceptance test
run: |
npm run test:acceptance
- name: Type Definitions
run: |
npm run test:types
- name: Type Definitions
run: |
npm run test:types
helpers-integration-test:
name: Helpers integration test
@ -46,133 +46,99 @@ jobs:
strategy:
matrix:
node-version: [12.x, 14.x, 16.x]
node-version: [16.x, 18.x, 20.x]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Configure sysctl limits
run: |
sudo swapoff -a
sudo sysctl -w vm.swappiness=1
sudo sysctl -w fs.file-max=262144
sudo sysctl -w vm.max_map_count=262144
- name: Configure sysctl limits
run: |
sudo swapoff -a
sudo sysctl -w vm.swappiness=1
sudo sysctl -w fs.file-max=262144
sudo sysctl -w vm.max_map_count=262144
- name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master
with:
stack-version: 7.16-SNAPSHOT
- name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master
with:
stack-version: 7.17-SNAPSHOT
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Install
run: |
npm install
- name: Install
run: |
npm install
- name: Integration test
run: |
npm run test:integration:helpers
- name: Integration test
run: |
npm run test:integration:helpers
bundler-support:
name: Bundler support
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Configure sysctl limits
run: |
sudo swapoff -a
sudo sysctl -w vm.swappiness=1
sudo sysctl -w fs.file-max=262144
sudo sysctl -w vm.max_map_count=262144
- name: Configure sysctl limits
run: |
sudo swapoff -a
sudo sysctl -w vm.swappiness=1
sudo sysctl -w fs.file-max=262144
sudo sysctl -w vm.max_map_count=262144
- name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master
with:
stack-version: 7.16-SNAPSHOT
- name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master
with:
stack-version: 7.17-SNAPSHOT
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- name: Use Node.js 16.x
uses: actions/setup-node@v4
with:
node-version: 16.x
- name: Install
run: |
npm install
npm install --prefix test/bundlers/parcel-test
npm install --prefix test/bundlers/rollup-test
npm install --prefix test/bundlers/webpack-test
- name: Install
run: |
npm install
npm install --prefix test/bundlers/parcel-test
npm install --prefix test/bundlers/rollup-test
npm install --prefix test/bundlers/webpack-test
- name: Build
run: |
npm run build --prefix test/bundlers/parcel-test
npm run build --prefix test/bundlers/rollup-test
npm run build --prefix test/bundlers/webpack-test
- name: Build
run: |
npm run build --prefix test/bundlers/parcel-test
npm run build --prefix test/bundlers/rollup-test
npm run build --prefix test/bundlers/webpack-test
- name: Run bundle
run: |
npm start --prefix test/bundlers/parcel-test
npm start --prefix test/bundlers/rollup-test
npm start --prefix test/bundlers/webpack-test
- name: Run bundle
run: |
npm start --prefix test/bundlers/parcel-test
npm start --prefix test/bundlers/rollup-test
npm start --prefix test/bundlers/webpack-test
mock-support:
name: Mock support
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- name: Use Node.js 14.x
uses: actions/setup-node@v4
with:
node-version: 14.x
- name: Install
run: |
npm install
npm install --prefix test/mock
- name: Install
run: |
npm install
npm install --prefix test/mock
- name: Run test
run: |
npm test --prefix test/mock
code-coverage:
name: Code coverage
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Install
run: |
npm install
- name: Code coverage report
run: |
npm run test:coverage-report
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
with:
file: ./coverage.lcov
fail_ci_if_error: true
- name: Code coverage 100%
run: |
npm run test:coverage-100
- name: Run test
run: |
npm test --prefix test/mock
license:
name: License check
@ -183,17 +149,17 @@ jobs:
node-version: [14.x]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Install
run: |
npm install
- name: Install
run: |
npm install
- name: License checker
run: |
npm run license-checker
- name: License checker
run: |
npm run license-checker

View File

@ -23,8 +23,8 @@
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['master_timeout', 'timeout', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'wait_for_completion', 'verify', 'ignore_unavailable', 'index_details', 'include_repository', 'verbose', 'local', 'blob_count', 'concurrency', 'read_node_count', 'early_read_node_count', 'seed', 'rare_action_probability', 'max_blob_size', 'max_total_data_size', 'detailed', 'rarely_abort_writes']
const snakeCase = { masterTimeout: 'master_timeout', errorTrace: 'error_trace', filterPath: 'filter_path', waitForCompletion: 'wait_for_completion', ignoreUnavailable: 'ignore_unavailable', indexDetails: 'index_details', includeRepository: 'include_repository', blobCount: 'blob_count', readNodeCount: 'read_node_count', earlyReadNodeCount: 'early_read_node_count', rareActionProbability: 'rare_action_probability', maxBlobSize: 'max_blob_size', maxTotalDataSize: 'max_total_data_size', rarelyAbortWrites: 'rarely_abort_writes' }
const acceptedQuerystring = ['master_timeout', 'timeout', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'wait_for_completion', 'verify', 'ignore_unavailable', 'index_details', 'include_repository', 'sort', 'size', 'order', 'from_sort_value', 'after', 'offset', 'slm_policy_filter', 'verbose', 'local', 'blob_count', 'concurrency', 'read_node_count', 'early_read_node_count', 'seed', 'rare_action_probability', 'max_blob_size', 'max_total_data_size', 'detailed', 'rarely_abort_writes']
const snakeCase = { masterTimeout: 'master_timeout', errorTrace: 'error_trace', filterPath: 'filter_path', waitForCompletion: 'wait_for_completion', ignoreUnavailable: 'ignore_unavailable', indexDetails: 'index_details', includeRepository: 'include_repository', fromSortValue: 'from_sort_value', slmPolicyFilter: 'slm_policy_filter', blobCount: 'blob_count', readNodeCount: 'read_node_count', earlyReadNodeCount: 'early_read_node_count', rareActionProbability: 'rare_action_probability', maxBlobSize: 'max_blob_size', maxTotalDataSize: 'max_total_data_size', rarelyAbortWrites: 'rarely_abort_writes' }
function SnapshotApi (transport, ConfigurationError) {
this.transport = transport

View File

@ -2628,6 +2628,13 @@ export interface SnapshotGet extends Generic {
ignore_unavailable?: boolean;
index_details?: boolean;
include_repository?: boolean;
sort?: 'start_time' | 'duration' | 'name' | 'repository' | 'index_count' | 'shard_count' | 'failed_shard_count';
size?: number;
order?: 'asc' | 'desc';
from_sort_value?: string;
after?: string;
offset?: number;
slm_policy_filter?: string;
verbose?: boolean;
}

View File

@ -1,6 +1,65 @@
[[changelog-client]]
== Release notes
[discrete]
=== 7.17.14
[discrete]
==== Fixes
Stops retrying timed-out requests by default.
Re-enabling the old behavior can be done by providing a `retryOnTimeout` option when instantiating the client.
https://github.com/elastic/elasticsearch-js/pull/2293[#2293]
[discrete]
=== 7.17.13
[discrete]
==== Fixes
Fixes a bug where newly-added nodes that don't have HTTP information yet should be skipped during sniffing, to prevent an unexpected `TypeError`. https://github.com/elastic/elasticsearch-js/issues/1230[#1230]
[discrete]
=== 7.17.12
[discrete]
==== Notes
This is the first 7.x release where patch versions of the client will no longer (intentionally) align with patch versions of Elasticsearch. The latest patch release of the client will always be compatible with the corresponding minor release of Elasticsearch.
[discrete]
==== Fixes
[discrete]
===== TypeScript build failure
Fixes a type declaration bug that caused TypeScript builds to fail. https://github.com/elastic/elasticsearch-js/pull/1927[#1927]
[discrete]
===== Add TypeScript type declarations to exports
Adds TypeScript type declarations file to package.json `exports` https://github.com/elastic/elasticsearch-js/pull/1930[#1930]
[discrete]
=== 7.17.11
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v7.17.11`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/7.17/release-notes-7.17.11.html[here].
[discrete]
==== Fixes
[discrete]
===== Fix index drift bug in bulk helper https://github.com/elastic/elasticsearch-js/pull/1759[#1759]
Fixes a bug in the bulk helper that would cause `onDrop` to send back the wrong JSON document or error on a nonexistent document when an error occurred on a bulk HTTP request that contained a `delete` action.
[discrete]
=== 7.17.0
@ -34,7 +93,6 @@ https://www.elastic.co/guide/en/elasticsearch/reference/7.16/release-notes-7.16.
[discrete]
===== Fixed export field deprecation log https://github.com/elastic/elasticsearch-js/pull/1593#[#1593]
[discrete]
=== 7.15.0
@ -50,7 +108,7 @@ https://www.elastic.co/guide/en/elasticsearch/reference/7.15/release-notes-7.15.
[discrete]
===== Support mapbox content type https://github.com/elastic/elasticsearch-js/pull/1500[#1500]
If you call an API that returns a mapbox conten type, the response body will be a buffer.
If you call an API that returns a mapbox content type, the response body will be a buffer.
[discrete]
===== Support CA fingerprint validation https://github.com/elastic/elasticsearch-js/pull/1499[#1499]

View File

@ -1,6 +1,6 @@
= Elasticsearch JavaScript Client
:branch: 7.16
include::{asciidoc-dir}/../../shared/versions/stack/{source_branch}.asciidoc[]
include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
include::introduction.asciidoc[]

View File

@ -377,9 +377,9 @@ child.search({
To improve observability, the client offers an easy way to configure the
`X-Opaque-Id` header. If you set the `X-Opaque-Id` in a specific request, this
allows you to discover this identifier in the
https://www.elastic.co/guide/en/elasticsearch/reference/master/logging.html#deprecation-logging[deprecation logs],
helps you with https://www.elastic.co/guide/en/elasticsearch/reference/master/index-modules-slowlog.html#_identifying_search_slow_log_origin[identifying search slow log origin]
as well as https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html#_identifying_running_tasks[identifying running tasks].
https://www.elastic.co/guide/en/elasticsearch/reference/7.17/logging.html#deprecation-logging[deprecation logs],
helps you with https://www.elastic.co/guide/en/elasticsearch/reference/7.17/index-modules-slowlog.html#_identifying_search_slow_log_origin[identifying search slow log origin]
as well as https://www.elastic.co/guide/en/elasticsearch/reference/7.17/tasks.html#_identifying_running_tasks[identifying running tasks].
The `X-Opaque-Id` should be configured in each request, for doing that you can
use the `opaqueId` option, as you can see in the following example. The

View File

@ -3510,7 +3510,7 @@ link:{ref}/modules-scripting.html[Documentation] +
----
client.getScriptContext()
----
link:https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-contexts.html[Documentation] +
link:{painless}/painless-contexts.html[Documentation] +
[discrete]
@ -9127,7 +9127,7 @@ client.scriptsPainlessExecute({
body: object
})
----
link:https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-execute-api.html[Documentation] +
link:{painless}/painless-execute-api.html[Documentation] +
[cols=2*]
|===
|`body`
@ -10860,6 +10860,13 @@ client.snapshot.get({
ignore_unavailable: boolean,
index_details: boolean,
include_repository: boolean,
sort: 'start_time' | 'duration' | 'name' | 'repository' | 'index_count' | 'shard_count' | 'failed_shard_count',
size: integer,
order: 'asc' | 'desc',
from_sort_value: string,
after: string,
offset: integer,
slm_policy_filter: string,
verbose: boolean
})
----
@ -10884,6 +10891,29 @@ link:{ref}/modules-snapshots.html[Documentation] +
|`include_repository` or `includeRepository`
|`boolean` - Whether to include the repository name in the snapshot info. Defaults to true.
|`sort`
|`'start_time' \| 'duration' \| 'name' \| 'repository' \| 'index_count' \| 'shard_count' \| 'failed_shard_count'` - Allows setting a sort order for the result. Defaults to start_time +
_Default:_ `start_time`
|`size`
|`integer` - Maximum number of snapshots to return. Defaults to 0 which means return all that match without limit.
|`order`
|`'asc' \| 'desc'` - Sort order +
_Default:_ `asc`
|`from_sort_value` or `fromSortValue`
|`string` - Value of the current sort column at which to start retrieval.
|`after`
|`string` - Offset identifier to start pagination from as returned by the 'next' field in the response body.
|`offset`
|`integer` - Numeric offset to start pagination based on the snapshots matching the request. Defaults to 0
|`slm_policy_filter` or `slmPolicyFilter`
|`string` - Filter snapshots by a comma-separated list of SLM policy names that snapshots belong to. Accepts wildcards. Use the special pattern '_none' to match snapshots without an SLM policy
|`verbose`
|`boolean` - Whether to show verbose snapshot info or only show the basic info found in the repository index blob

View File

@ -120,15 +120,15 @@ class Client extends ESAPI {
maxCompressedResponseSize: null
}, opts)
if (options.maxResponseSize !== null && options.maxResponseSize > buffer.constants.MAX_STRING_LENGTH) {
if (options.maxResponseSize != null && options.maxResponseSize > buffer.constants.MAX_STRING_LENGTH) {
throw new ConfigurationError(`The maxResponseSize cannot be bigger than ${buffer.constants.MAX_STRING_LENGTH}`)
}
if (options.maxCompressedResponseSize !== null && options.maxCompressedResponseSize > buffer.constants.MAX_LENGTH) {
if (options.maxCompressedResponseSize != null && options.maxCompressedResponseSize > buffer.constants.MAX_LENGTH) {
throw new ConfigurationError(`The maxCompressedResponseSize cannot be bigger than ${buffer.constants.MAX_LENGTH}`)
}
if (options.caFingerprint !== null && isHttpConnection(opts.node || opts.nodes)) {
if (options.caFingerprint != null && isHttpConnection(opts.node || opts.nodes)) {
throw new ConfigurationError('You can\'t configure the caFingerprint with a http connection')
}

View File

@ -705,11 +705,11 @@ class Helpers {
}
const retry = []
const { items } = body
let indexSlice = 0
for (let i = 0, len = items.length; i < len; i++) {
const action = items[i]
const operation = Object.keys(action)[0]
const { status } = action[operation]
const indexSlice = operation !== 'delete' ? i * 2 : i
if (status >= 400) {
// 429 is the only staus code where we might want to retry
@ -736,6 +736,7 @@ class Helpers {
} else {
stats.successful += 1
}
operation === 'delete' ? indexSlice += 1 : indexSlice += 2
}
callback(null, retry)
})

1
lib/Transport.d.ts vendored
View File

@ -49,6 +49,7 @@ interface TransportOptions {
serializer: Serializer;
maxRetries: number;
requestTimeout: number | string;
retryOnTimeout?: boolean;
suggestCompression?: boolean;
compression?: 'gzip';
sniffInterval?: number;

View File

@ -57,6 +57,7 @@ class Transport {
this.serializer = opts.serializer
this.maxRetries = opts.maxRetries
this.requestTimeout = toMs(opts.requestTimeout)
this.retryOnTimeout = opts.retryOnTimeout != null ? opts.retryOnTimeout : false
this.suggestCompression = opts.suggestCompression === true
this.compression = opts.compression || false
this.context = opts.context || null
@ -220,6 +221,13 @@ class Transport {
})
}
// do not retry timeout errors by default
if (err.name === 'TimeoutError' && this.retryOnTimeout !== true) {
err.meta = result
this.emit('response', err, result)
return callback(err, result)
}
// retry logic
if (meta.attempts < maxRetries) {
meta.attempts++

View File

@ -206,6 +206,10 @@ class BaseConnectionPool {
for (let i = 0, len = ids.length; i < len; i++) {
const node = nodes[ids[i]]
// newly-added nodes do not have http assigned yet, so skip
if (node.http === undefined) continue
// If there is no protocol in
// the `publish_address` new URL will throw
// the publish_address can have two forms:

View File

@ -6,13 +6,14 @@
"exports": {
".": {
"require": "./index.js",
"import": "./index.mjs"
"import": "./index.mjs",
"types": "./index.d.ts"
},
"./*": "./*.js"
},
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
"version": "7.17.0",
"versionCanary": "7.16.0-canary.7",
"version": "7.17.14",
"versionCanary": "7.17.14-canary.0",
"keywords": [
"elasticsearch",
"elastic",
@ -53,6 +54,7 @@
"cross-zip": "^4.0.0",
"dedent": "^0.7.0",
"deepmerge": "^4.2.2",
"desm": "^1.2.0",
"dezalgo": "^1.0.3",
"fast-deep-equal": "^3.1.3",
"into-stream": "^6.0.0",
@ -73,7 +75,8 @@
"tap": "^15.0.9",
"tsd": "^0.15.1",
"workq": "^3.0.0",
"xmlbuilder2": "^2.4.1"
"xmlbuilder2": "^2.4.1",
"zx": "^6.1.0"
},
"dependencies": {
"debug": "^4.3.1",

View File

@ -282,6 +282,7 @@ function fixLink (name, str) {
str = str.replace(/frozen\.html/, 'freeze-index-api.html')
str = str.replace(/ml-file-structure\.html/, 'ml-find-file-structure.html')
str = str.replace(/security-api-get-user-privileges\.html/, 'security-api-get-privileges.html')
str = str.replace(/^.+guide\/en\/elasticsearch\/painless\/[^/]+\/([^./]*\.html(?:#.+)?)$/, '{painless}/$1')
return str
}

View File

@ -169,6 +169,7 @@ export interface ${toPascalCase(name)}${body ? `<T = ${bodyGeneric}>` : ''} exte
case 'int':
case 'double':
case 'long':
case 'integer':
return 'number'
case 'boolean|long':
return 'boolean | number'

View File

@ -88,7 +88,8 @@ test('Connection error', t => {
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnectionError,
maxRetries: 1
maxRetries: 1,
retryOnTimeout: true
})
const order = [
@ -124,18 +125,18 @@ test('Connection error', t => {
})
test('TimeoutError error', t => {
t.plan(10)
t.plan(8)
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnectionTimeout,
maxRetries: 1
maxRetries: 1,
retryOnTimeout: true
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.REQUEST,
events.RESPONSE
]
@ -170,7 +171,8 @@ test('RequestAbortedError error', t => {
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnectionTimeout,
maxRetries: 1
maxRetries: 1,
retryOnTimeout: true
})
const order = [
@ -221,7 +223,8 @@ test('ResponseError error (no retry)', t => {
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxRetries: 1
maxRetries: 1,
retryOnTimeout: true
})
const order = [
@ -373,7 +376,8 @@ test('Deserialization Error', t => {
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxRetries: 1
maxRetries: 1,
retryOnTimeout: true
})
const order = [
@ -423,7 +427,11 @@ test('Socket destroyed while reading the body', t => {
}
buildServer(handler, ({ port }, server) => {
const client = new Client({ node: `http://localhost:${port}`, maxRetries: 1 })
const client = new Client({
node: `http://localhost:${port}`,
maxRetries: 1,
retryOnTimeout: true
})
const order = [
events.SERIALIZATION,

View File

@ -680,7 +680,8 @@ test('TimeoutError', t => {
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnectionTimeout,
maxRetries: 0
maxRetries: 0,
retryOnTimeout: true
})
client.on('request', (err, event) => {

View File

@ -313,6 +313,36 @@ test('API', t => {
t.end()
})
t.test('Should skip nodes that do not have an http property yet', t => {
const pool = new BaseConnectionPool({ Connection })
const nodes = {
a1: {
http: {
publish_address: '127.0.0.1:9200'
},
roles: ['master', 'data', 'ingest']
},
a2: {
roles: ['master', 'data', 'ingest']
}
}
t.same(pool.nodesToHost(nodes, 'http:'), [{
url: new URL('http://127.0.0.1:9200'),
id: 'a1',
roles: {
master: true,
data: true,
ingest: true,
ml: false
}
}])
t.equal(pool.nodesToHost(nodes, 'http:').length, 1)
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, '127.0.0.1:9200')
t.end()
})
t.end()
})

View File

@ -232,7 +232,7 @@ test('Authentication', t => {
server.stop()
})
})
})
}, { skip: true })
t.test('Node with basic auth data in the url (array of nodes)', t => {
t.plan(3)
@ -1176,7 +1176,7 @@ test('Disable keep alive agent', t => {
server.stop()
})
})
})
}, { skip: true })
test('name property as string', t => {
t.plan(1)

View File

@ -237,7 +237,7 @@ test('Disable keep alive', t => {
server.stop()
})
})
})
}, { skip: true })
test('Timeout support', t => {
t.plan(1)
@ -1101,7 +1101,7 @@ test('Should show local/remote socket address in case of ECONNRESET', t => {
method: 'GET'
}, (err, res) => {
t.ok(err instanceof ConnectionError)
t.match(err.message, /socket\shang\sup\s-\sLocal:\s127.0.0.1:\d+,\sRemote:\s127.0.0.1:\d+/)
t.match(err.message, /socket\shang\sup\s-\sLocal:\s(127.0.0.1|::1):\d+,\sRemote:\s(127.0.0.1|::1):\d+/)
server.stop()
})
})

View File

@ -1082,6 +1082,70 @@ test('bulk delete', t => {
server.stop()
})
t.test('Should call onDrop on the correct document when doing a mix of operations that includes deletes', async t => {
// checks to ensure onDrop doesn't provide the wrong document when some operations are deletes
// see https://github.com/elastic/elasticsearch-js/issues/1751
async function handler (req, res) {
res.setHeader('content-type', 'application/json')
res.end(JSON.stringify({
took: 0,
errors: true,
items: [
{ delete: { status: 200 } },
{ index: { status: 429 } },
{ index: { status: 200 } }
]
}))
}
const [{ port }, server] = await buildServer(handler)
const client = new Client({ node: `http://localhost:${port}` })
let counter = 0
const result = await client.helpers.bulk({
datasource: dataset.slice(),
concurrency: 1,
wait: 10,
retries: 0,
onDocument (doc) {
counter++
if (counter === 1) {
return {
delete: {
_index: 'test',
_id: String(counter)
}
}
} else {
return {
index: {
_index: 'test'
}
}
}
},
onDrop (doc) {
t.same(doc, {
status: 429,
error: null,
operation: { index: { _index: 'test' } },
document: { user: 'arya', age: 18 },
retried: false
})
}
})
t.type(result.time, 'number')
t.type(result.bytes, 'number')
t.match(result, {
total: 3,
successful: 2,
retry: 0,
failed: 1,
aborted: false
})
server.stop()
})
t.end()
})

View File

@ -1025,6 +1025,7 @@ test('Retry mechanism and abort', t => {
serializer: new Serializer(),
maxRetries: 2,
requestTimeout: 100,
retryOnTimeout: true,
sniffInterval: false,
sniffOnStart: false
})
@ -2203,6 +2204,7 @@ test('Compress request', t => {
serializer: new Serializer(),
maxRetries: 3,
requestTimeout: 250,
retryOnTimeout: true,
sniffInterval: false,
sniffOnStart: false
})