Compare commits

..

10 Commits
7.11 ... 7.10

Author SHA1 Message Date
4def742e47 Bumped v7.10.0 2020-11-12 07:46:09 +01:00
1b7402d06c API generation 2020-11-12 07:45:50 +01:00
3d728bcad7 Handle connectivity issues while reading the body (#1343) 2020-11-10 18:25:22 +01:00
2d51ef429f [Backport 7.x] Add warning log about nodejs version support (#1350)
Co-authored-by: Tomas Della Vedova <delvedor@users.noreply.github.com>
2020-11-10 18:14:58 +01:00
39789031c3 Added node.js support doc (#1346)
Co-authored-by: István Zoltán Szabó <istvan.szabo@elastic.co>
2020-11-10 18:14:55 +01:00
dfb09b4827 [7.x][DOCS] Adds Connecting section to Node.JS docs (#1344) 2020-11-10 18:13:53 +01:00
11c11e4568 API generation 2020-10-21 11:08:30 +02:00
27a4e908c9 Bumped v7.10.0-rc.1 2020-10-19 16:49:39 +02:00
c20109ec78 Fix integration test (#1335) 2020-10-15 08:56:44 +02:00
d6270b17c4 Updated ci configuration 2020-10-12 16:18:34 +02:00
47 changed files with 270 additions and 2160 deletions

View File

@ -18,7 +18,7 @@ require_stack_version
if [[ -z $es_node_name ]]; then
# only set these once
set -euo pipefail
export TEST_SUITE=${TEST_SUITE-free}
export TEST_SUITE=${TEST_SUITE-oss}
export RUNSCRIPTS=${RUNSCRIPTS-}
export DETACH=${DETACH-false}
export CLEANUP=${CLEANUP-false}
@ -27,7 +27,8 @@ if [[ -z $es_node_name ]]; then
export elastic_password=changeme
export elasticsearch_image=elasticsearch
export elasticsearch_url=https://elastic:${elastic_password}@${es_node_name}:9200
if [[ $TEST_SUITE != "platinum" ]]; then
if [[ $TEST_SUITE != "xpack" ]]; then
export elasticsearch_image=elasticsearch-${TEST_SUITE}
export elasticsearch_url=http://${es_node_name}:9200
fi
export external_elasticsearch_url=${elasticsearch_url/$es_node_name/localhost}

View File

@ -4,7 +4,7 @@
# to form a cluster suitable for running the REST API tests.
#
# Export the STACK_VERSION variable, eg. '8.0.0-SNAPSHOT'.
# Export the TEST_SUITE variable, eg. 'free' or 'platinum' defaults to 'free'.
# Export the TEST_SUITE variable, eg. 'oss' or 'xpack' defaults to 'oss'.
# Export the NUMBER_OF_NODES variable to start more than 1 node
# Version 1.2.0
@ -39,7 +39,7 @@ environment=($(cat <<-END
--env repositories.url.allowed_urls=http://snapshot.test*
END
))
if [[ "$TEST_SUITE" == "platinum" ]]; then
if [[ "$TEST_SUITE" == "xpack" ]]; then
environment+=($(cat <<-END
--env ELASTIC_PASSWORD=$elastic_password
--env xpack.license.self_generated.type=trial
@ -64,7 +64,7 @@ END
fi
cert_validation_flags=""
if [[ "$TEST_SUITE" == "platinum" ]]; then
if [[ "$TEST_SUITE" == "xpack" ]]; then
cert_validation_flags="--insecure --cacert /usr/share/elasticsearch/config/certs/ca.crt --resolve ${es_node_name}:443:127.0.0.1"
fi

View File

@ -2,7 +2,7 @@
# parameters are available to this script
# STACK_VERSION -- version e.g Major.Minor.Patch(-Prelease)
# TEST_SUITE -- which test suite to run: free or platinum
# TEST_SUITE -- which test suite to run: oss or xpack
# ELASTICSEARCH_URL -- The url at which elasticsearch is reachable, a default is composed based on STACK_VERSION and TEST_SUITE
# NODE_JS_VERSION -- node js version (defined in test-matrix.yml, a default is hardcoded here)
script_path=$(dirname $(realpath -s $0))

View File

@ -1,6 +1,6 @@
---
STACK_VERSION:
- 7.11.0-SNAPSHOT
- 7.10.0-SNAPSHOT
NODE_JS_VERSION:
- 14
@ -9,7 +9,7 @@ NODE_JS_VERSION:
- 8
TEST_SUITE:
- free
- platinum
- oss
- xpack
exclude: ~

View File

@ -86,7 +86,7 @@ jobs:
- name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master
with:
stack-version: 7.11.0-SNAPSHOT
stack-version: 7.10.0-SNAPSHOT
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1

View File

@ -71,7 +71,7 @@ npm install @elastic/elasticsearch@<major>
#### Browser
WARNING: There is no official support for the browser environment. It exposes your Elasticsearch instance to everyone, which could lead to security issues.
We recommend that you write a lightweight proxy that uses this client instead, you can see a proxy example [here](./docs/examples/proxy).
We recommend that you write a lightweight proxy that uses this client instead.
## Documentation

View File

@ -85,33 +85,6 @@ AsyncSearchApi.prototype.get = function asyncSearchGetApi (params, options, call
return this.transport.request(request, options, callback)
}
AsyncSearchApi.prototype.status = function asyncSearchStatusApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params['id'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: id')
return handleError(err, callback)
}
var { method, body, id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'GET'
path = '/' + '_async_search' + '/' + 'status' + '/' + encodeURIComponent(id)
// build request object
const request = {
method,
path,
body: null,
querystring
}
return this.transport.request(request, options, callback)
}
AsyncSearchApi.prototype.submit = function asyncSearchSubmitApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)

View File

@ -58,7 +58,7 @@ AutoscalingApi.prototype.deleteAutoscalingPolicy = function autoscalingDeleteAut
return this.transport.request(request, options, callback)
}
AutoscalingApi.prototype.getAutoscalingCapacity = function autoscalingGetAutoscalingCapacityApi (params, options, callback) {
AutoscalingApi.prototype.getAutoscalingDecision = function autoscalingGetAutoscalingDecisionApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
var { method, body, ...querystring } = params
@ -66,7 +66,7 @@ AutoscalingApi.prototype.getAutoscalingCapacity = function autoscalingGetAutosca
var path = ''
if (method == null) method = 'GET'
path = '/' + '_autoscaling' + '/' + 'capacity'
path = '/' + '_autoscaling' + '/' + 'decision'
// build request object
const request = {
@ -139,7 +139,7 @@ AutoscalingApi.prototype.putAutoscalingPolicy = function autoscalingPutAutoscali
Object.defineProperties(AutoscalingApi.prototype, {
delete_autoscaling_policy: { get () { return this.deleteAutoscalingPolicy } },
get_autoscaling_capacity: { get () { return this.getAutoscalingCapacity } },
get_autoscaling_decision: { get () { return this.getAutoscalingDecision } },
get_autoscaling_policy: { get () { return this.getAutoscalingPolicy } },
put_autoscaling_policy: { get () { return this.putAutoscalingPolicy } }
})

View File

@ -23,8 +23,8 @@
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['format', 'local', 'h', 'help', 's', 'v', 'expand_wildcards', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'bytes', 'master_timeout', 'fields', 'time', 'ts', 'health', 'pri', 'include_unloaded_segments', 'full_id', 'active_only', 'detailed', 'index', 'ignore_unavailable', 'nodes', 'actions', 'parent_task_id', 'size', 'allow_no_match', 'allow_no_datafeeds', 'allow_no_jobs', 'from']
const snakeCase = { expandWildcards: 'expand_wildcards', errorTrace: 'error_trace', filterPath: 'filter_path', masterTimeout: 'master_timeout', includeUnloadedSegments: 'include_unloaded_segments', fullId: 'full_id', activeOnly: 'active_only', ignoreUnavailable: 'ignore_unavailable', parentTaskId: 'parent_task_id', allowNoMatch: 'allow_no_match', allowNoDatafeeds: 'allow_no_datafeeds', allowNoJobs: 'allow_no_jobs' }
const acceptedQuerystring = ['format', 'local', 'h', 'help', 's', 'v', 'expand_wildcards', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'bytes', 'master_timeout', 'fields', 'time', 'ts', 'health', 'pri', 'include_unloaded_segments', 'full_id', 'active_only', 'detailed', 'index', 'ignore_unavailable', 'node_id', 'actions', 'parent_task', 'size', 'allow_no_match', 'allow_no_datafeeds', 'allow_no_jobs', 'from']
const snakeCase = { expandWildcards: 'expand_wildcards', errorTrace: 'error_trace', filterPath: 'filter_path', masterTimeout: 'master_timeout', includeUnloadedSegments: 'include_unloaded_segments', fullId: 'full_id', activeOnly: 'active_only', ignoreUnavailable: 'ignore_unavailable', nodeId: 'node_id', parentTask: 'parent_task', allowNoMatch: 'allow_no_match', allowNoDatafeeds: 'allow_no_datafeeds', allowNoJobs: 'allow_no_jobs' }
function CatApi (transport, ConfigurationError) {
this.transport = transport

View File

@ -777,8 +777,13 @@ IndicesApi.prototype.getUpgrade = function indicesGetUpgradeApi (params, options
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'GET'
path = '/' + encodeURIComponent(index) + '/' + '_upgrade'
if ((index) != null) {
if (method == null) method = 'GET'
path = '/' + encodeURIComponent(index) + '/' + '_upgrade'
} else {
if (method == null) method = 'GET'
path = '/' + '_upgrade'
}
// build request object
const request = {
@ -1366,8 +1371,13 @@ IndicesApi.prototype.upgrade = function indicesUpgradeApi (params, options, call
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + encodeURIComponent(index) + '/' + '_upgrade'
if ((index) != null) {
if (method == null) method = 'POST'
path = '/' + encodeURIComponent(index) + '/' + '_upgrade'
} else {
if (method == null) method = 'POST'
path = '/' + '_upgrade'
}
// build request object
const request = {
@ -1548,60 +1558,6 @@ IndicesApi.prototype.getDataStream = function indicesGetDataStreamApi (params, o
return this.transport.request(request, options, callback)
}
IndicesApi.prototype.migrateToDataStream = function indicesMigrateToDataStreamApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params['name'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: name')
return handleError(err, callback)
}
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + '_data_stream' + '/' + '_migrate' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
return this.transport.request(request, options, callback)
}
IndicesApi.prototype.promoteDataStream = function indicesPromoteDataStreamApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params['name'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: name')
return handleError(err, callback)
}
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + '_data_stream' + '/' + '_promote' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
return this.transport.request(request, options, callback)
}
IndicesApi.prototype.reloadSearchAnalyzers = function indicesReloadSearchAnalyzersApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
@ -1689,8 +1645,6 @@ Object.defineProperties(IndicesApi.prototype, {
data_streams_stats: { get () { return this.dataStreamsStats } },
delete_data_stream: { get () { return this.deleteDataStream } },
get_data_stream: { get () { return this.getDataStream } },
migrate_to_data_stream: { get () { return this.migrateToDataStream } },
promote_data_stream: { get () { return this.promoteDataStream } },
reload_search_analyzers: { get () { return this.reloadSearchAnalyzers } }
})

View File

@ -23,8 +23,8 @@
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['allow_no_match', 'allow_no_jobs', 'force', 'timeout', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'requests_per_second', 'allow_no_forecasts', 'wait_for_completion', 'lines_to_sample', 'line_merge_size_limit', 'charset', 'format', 'has_header_row', 'column_names', 'delimiter', 'quote', 'should_trim_fields', 'grok_pattern', 'timestamp_field', 'timestamp_format', 'explain', 'calc_interim', 'start', 'end', 'advance_time', 'skip_time', 'duration', 'expires_in', 'max_model_memory', 'expand', 'exclude_interim', 'from', 'size', 'anomaly_score', 'sort', 'desc', 'job_id', 'partition_field_value', 'exclude_generated', 'verbose', 'allow_no_datafeeds', 'influencer_score', 'top_n', 'bucket_span', 'overall_score', 'record_score', 'include', 'include_model_definition', 'decompress_definition', 'tags', 'reset_start', 'reset_end', 'ignore_unavailable', 'allow_no_indices', 'ignore_throttled', 'expand_wildcards', 'delete_intervening_results', 'enabled']
const snakeCase = { allowNoMatch: 'allow_no_match', allowNoJobs: 'allow_no_jobs', errorTrace: 'error_trace', filterPath: 'filter_path', requestsPerSecond: 'requests_per_second', allowNoForecasts: 'allow_no_forecasts', waitForCompletion: 'wait_for_completion', linesToSample: 'lines_to_sample', lineMergeSizeLimit: 'line_merge_size_limit', hasHeaderRow: 'has_header_row', columnNames: 'column_names', shouldTrimFields: 'should_trim_fields', grokPattern: 'grok_pattern', timestampField: 'timestamp_field', timestampFormat: 'timestamp_format', calcInterim: 'calc_interim', advanceTime: 'advance_time', skipTime: 'skip_time', expiresIn: 'expires_in', maxModelMemory: 'max_model_memory', excludeInterim: 'exclude_interim', anomalyScore: 'anomaly_score', jobId: 'job_id', partitionFieldValue: 'partition_field_value', excludeGenerated: 'exclude_generated', allowNoDatafeeds: 'allow_no_datafeeds', influencerScore: 'influencer_score', topN: 'top_n', bucketSpan: 'bucket_span', overallScore: 'overall_score', recordScore: 'record_score', includeModelDefinition: 'include_model_definition', decompressDefinition: 'decompress_definition', resetStart: 'reset_start', resetEnd: 'reset_end', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', ignoreThrottled: 'ignore_throttled', expandWildcards: 'expand_wildcards', deleteInterveningResults: 'delete_intervening_results' }
const acceptedQuerystring = ['allow_no_match', 'allow_no_jobs', 'force', 'timeout', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'requests_per_second', 'allow_no_forecasts', 'wait_for_completion', 'lines_to_sample', 'line_merge_size_limit', 'charset', 'format', 'has_header_row', 'column_names', 'delimiter', 'quote', 'should_trim_fields', 'grok_pattern', 'timestamp_field', 'timestamp_format', 'explain', 'calc_interim', 'start', 'end', 'advance_time', 'skip_time', 'duration', 'expires_in', 'max_model_memory', 'expand', 'exclude_interim', 'from', 'size', 'anomaly_score', 'sort', 'desc', 'job_id', 'partition_field_value', 'verbose', 'allow_no_datafeeds', 'influencer_score', 'top_n', 'bucket_span', 'overall_score', 'record_score', 'include', 'include_model_definition', 'decompress_definition', 'tags', 'for_export', 'reset_start', 'reset_end', 'ignore_unavailable', 'allow_no_indices', 'ignore_throttled', 'expand_wildcards', 'delete_intervening_results', 'enabled']
const snakeCase = { allowNoMatch: 'allow_no_match', allowNoJobs: 'allow_no_jobs', errorTrace: 'error_trace', filterPath: 'filter_path', requestsPerSecond: 'requests_per_second', allowNoForecasts: 'allow_no_forecasts', waitForCompletion: 'wait_for_completion', linesToSample: 'lines_to_sample', lineMergeSizeLimit: 'line_merge_size_limit', hasHeaderRow: 'has_header_row', columnNames: 'column_names', shouldTrimFields: 'should_trim_fields', grokPattern: 'grok_pattern', timestampField: 'timestamp_field', timestampFormat: 'timestamp_format', calcInterim: 'calc_interim', advanceTime: 'advance_time', skipTime: 'skip_time', expiresIn: 'expires_in', maxModelMemory: 'max_model_memory', excludeInterim: 'exclude_interim', anomalyScore: 'anomaly_score', jobId: 'job_id', partitionFieldValue: 'partition_field_value', allowNoDatafeeds: 'allow_no_datafeeds', influencerScore: 'influencer_score', topN: 'top_n', bucketSpan: 'bucket_span', overallScore: 'overall_score', recordScore: 'record_score', includeModelDefinition: 'include_model_definition', decompressDefinition: 'decompress_definition', forExport: 'for_export', resetStart: 'reset_start', resetEnd: 'reset_end', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', ignoreThrottled: 'ignore_throttled', expandWildcards: 'expand_wildcards', deleteInterveningResults: 'delete_intervening_results' }
function MlApi (transport, ConfigurationError) {
this.transport = transport
@ -1725,43 +1725,6 @@ MlApi.prototype.updateModelSnapshot = function mlUpdateModelSnapshotApi (params,
return this.transport.request(request, options, callback)
}
MlApi.prototype.upgradeJobSnapshot = function mlUpgradeJobSnapshotApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params['job_id'] == null && params['jobId'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: job_id or jobId')
return handleError(err, callback)
}
if (params['snapshot_id'] == null && params['snapshotId'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: snapshot_id or snapshotId')
return handleError(err, callback)
}
// check required url components
if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null && params['jobId'] == null))) {
const err = new this[kConfigurationError]('Missing required parameter of the url: job_id')
return handleError(err, callback)
}
var { method, body, jobId, job_id, snapshotId, snapshot_id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + '_ml' + '/' + 'anomaly_detectors' + '/' + encodeURIComponent(job_id || jobId) + '/' + 'model_snapshots' + '/' + encodeURIComponent(snapshot_id || snapshotId) + '/' + '_upgrade'
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
return this.transport.request(request, options, callback)
}
MlApi.prototype.validate = function mlValidateApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
@ -1873,7 +1836,6 @@ Object.defineProperties(MlApi.prototype, {
update_filter: { get () { return this.updateFilter } },
update_job: { get () { return this.updateJob } },
update_model_snapshot: { get () { return this.updateModelSnapshot } },
upgrade_job_snapshot: { get () { return this.upgradeJobSnapshot } },
validate_detector: { get () { return this.validateDetector } }
})

View File

@ -168,47 +168,6 @@ RollupApi.prototype.putJob = function rollupPutJobApi (params, options, callback
return this.transport.request(request, options, callback)
}
RollupApi.prototype.rollup = function rollupRollupApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params['index'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: index')
return handleError(err, callback)
}
if (params['rollup_index'] == null && params['rollupIndex'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: rollup_index or rollupIndex')
return handleError(err, callback)
}
if (params['body'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: body')
return handleError(err, callback)
}
// check required url components
if ((params['rollup_index'] != null || params['rollupIndex'] != null) && (params['index'] == null)) {
const err = new this[kConfigurationError]('Missing required parameter of the url: index')
return handleError(err, callback)
}
var { method, body, index, rollupIndex, rollup_index, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + encodeURIComponent(index) + '/' + '_rollup' + '/' + encodeURIComponent(rollup_index || rollupIndex)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
return this.transport.request(request, options, callback)
}
RollupApi.prototype.rollupSearch = function rollupRollupSearchApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)

View File

@ -23,8 +23,8 @@
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['force', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'from', 'size', 'allow_no_match', 'exclude_generated', 'defer_validation', 'timeout', 'wait_for_completion', 'wait_for_checkpoint']
const snakeCase = { errorTrace: 'error_trace', filterPath: 'filter_path', allowNoMatch: 'allow_no_match', excludeGenerated: 'exclude_generated', deferValidation: 'defer_validation', waitForCompletion: 'wait_for_completion', waitForCheckpoint: 'wait_for_checkpoint' }
const acceptedQuerystring = ['force', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'from', 'size', 'allow_no_match', 'defer_validation', 'timeout', 'wait_for_completion', 'wait_for_checkpoint']
const snakeCase = { errorTrace: 'error_trace', filterPath: 'filter_path', allowNoMatch: 'allow_no_match', deferValidation: 'defer_validation', waitForCompletion: 'wait_for_completion', waitForCheckpoint: 'wait_for_checkpoint' }
function TransformApi (transport, ConfigurationError) {
this.transport = transport

View File

@ -230,27 +230,6 @@ WatcherApi.prototype.putWatch = function watcherPutWatchApi (params, options, ca
return this.transport.request(request, options, callback)
}
WatcherApi.prototype.queryWatches = function watcherQueryWatchesApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
var { method, body, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = body == null ? 'GET' : 'POST'
path = '/' + '_watcher' + '/' + '_query' + '/' + 'watches'
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
return this.transport.request(request, options, callback)
}
WatcherApi.prototype.start = function watcherStartApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
@ -326,8 +305,7 @@ Object.defineProperties(WatcherApi.prototype, {
delete_watch: { get () { return this.deleteWatch } },
execute_watch: { get () { return this.executeWatch } },
get_watch: { get () { return this.getWatch } },
put_watch: { get () { return this.putWatch } },
query_watches: { get () { return this.queryWatches } }
put_watch: { get () { return this.putWatch } }
})
module.exports = WatcherApi

8
api/kibana.d.ts vendored
View File

@ -78,12 +78,11 @@ interface KibanaClient {
asyncSearch: {
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchDelete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchGet, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
status<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchSubmit<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
}
autoscaling: {
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingDeleteAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getAutoscalingCapacity<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingCapacity, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getAutoscalingDecision<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingDecision, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getAutoscalingPolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putAutoscalingPolicy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
}
@ -229,9 +228,7 @@ interface KibanaClient {
getSettings<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetSettings, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getTemplate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getUpgrade<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetUpgrade, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
migrateToDataStream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesMigrateToDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
open<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesOpen, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
promoteDataStream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPromoteDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putAlias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutAlias<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutIndexTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putMapping<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutMapping<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
@ -334,7 +331,6 @@ interface KibanaClient {
updateFilter<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateFilter<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
updateJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
updateModelSnapshot<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateModelSnapshot<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
upgradeJobSnapshot<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpgradeJobSnapshot, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
validate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlValidate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
validateDetector<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlValidateDetector<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
}
@ -364,7 +360,6 @@ interface KibanaClient {
getRollupCaps<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupGetRollupCaps, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getRollupIndexCaps<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupGetRollupIndexCaps, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RollupPutJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
rollup<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RollupRollup<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
rollupSearch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RollupRollupSearch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
startJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupStartJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
stopJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupStopJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
@ -470,7 +465,6 @@ interface KibanaClient {
executeWatch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherExecuteWatch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getWatch<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherGetWatch, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putWatch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherPutWatch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
queryWatches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherQueryWatches<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
start<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherStart, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
stop<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherStop, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>

View File

@ -39,10 +39,6 @@ export interface AsyncSearchGet extends Generic {
typed_keys?: boolean;
}
export interface AsyncSearchStatus extends Generic {
id: string;
}
export interface AsyncSearchSubmit<T = RequestBody> extends Generic {
index?: string | string[];
_source_exclude?: string | string[];
@ -95,7 +91,7 @@ export interface AutoscalingDeleteAutoscalingPolicy extends Generic {
name: string;
}
export interface AutoscalingGetAutoscalingCapacity extends Generic {
export interface AutoscalingGetAutoscalingDecision extends Generic {
}
export interface AutoscalingGetAutoscalingPolicy extends Generic {
@ -363,10 +359,10 @@ export interface CatSnapshots extends Generic {
export interface CatTasks extends Generic {
format?: string;
nodes?: string | string[];
node_id?: string | string[];
actions?: string | string[];
detailed?: boolean;
parent_task_id?: string;
parent_task?: number;
h?: string | string[];
help?: boolean;
s?: string | string[];
@ -998,7 +994,6 @@ export interface IndicesDeleteAlias extends Generic {
export interface IndicesDeleteDataStream extends Generic {
name: string | string[];
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
}
export interface IndicesDeleteIndexTemplate extends Generic {
@ -1114,7 +1109,6 @@ export interface IndicesGetAlias extends Generic {
export interface IndicesGetDataStream extends Generic {
name?: string | string[];
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
}
export interface IndicesGetFieldMapping extends Generic {
@ -1174,10 +1168,6 @@ export interface IndicesGetUpgrade extends Generic {
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
}
export interface IndicesMigrateToDataStream extends Generic {
name: string;
}
export interface IndicesOpen extends Generic {
index: string | string[];
timeout?: string;
@ -1188,10 +1178,6 @@ export interface IndicesOpen extends Generic {
wait_for_active_shards?: string;
}
export interface IndicesPromoteDataStream extends Generic {
name: string;
}
export interface IndicesPutAlias<T = RequestBody> extends Generic {
index: string | string[];
name: string;
@ -1626,7 +1612,6 @@ export interface MlGetDataFrameAnalytics extends Generic {
allow_no_match?: boolean;
from?: number;
size?: number;
exclude_generated?: boolean;
}
export interface MlGetDataFrameAnalyticsStats extends Generic {
@ -1647,7 +1632,6 @@ export interface MlGetDatafeeds extends Generic {
datafeed_id?: string;
allow_no_match?: boolean;
allow_no_datafeeds?: boolean;
exclude_generated?: boolean;
}
export interface MlGetFilters extends Generic {
@ -1679,7 +1663,6 @@ export interface MlGetJobs extends Generic {
job_id?: string;
allow_no_match?: boolean;
allow_no_jobs?: boolean;
exclude_generated?: boolean;
}
export interface MlGetModelSnapshots<T = RequestBody> extends Generic {
@ -1729,7 +1712,7 @@ export interface MlGetTrainedModels extends Generic {
from?: number;
size?: number;
tags?: string | string[];
exclude_generated?: boolean;
for_export?: boolean;
}
export interface MlGetTrainedModelsStats extends Generic {
@ -1874,13 +1857,6 @@ export interface MlUpdateModelSnapshot<T = RequestBody> extends Generic {
body: T;
}
export interface MlUpgradeJobSnapshot extends Generic {
job_id: string;
snapshot_id: string;
timeout?: string;
wait_for_completion?: boolean;
}
export interface MlValidate<T = RequestBody> extends Generic {
body: T;
}
@ -2054,12 +2030,6 @@ export interface RollupPutJob<T = RequestBody> extends Generic {
body: T;
}
export interface RollupRollup<T = RequestBody> extends Generic {
index: string;
rollup_index: string;
body: T;
}
export interface RollupRollupSearch<T = RequestBody> extends Generic {
index: string | string[];
type?: string;
@ -2506,7 +2476,6 @@ export interface TransformGetTransform extends Generic {
from?: number;
size?: number;
allow_no_match?: boolean;
exclude_generated?: boolean;
}
export interface TransformGetTransformStats extends Generic {
@ -2651,10 +2620,6 @@ export interface WatcherPutWatch<T = RequestBody> extends Generic {
body?: T;
}
export interface WatcherQueryWatches<T = RequestBody> extends Generic {
body?: T;
}
export interface WatcherStart extends Generic {
}

View File

@ -1,77 +1,10 @@
[[changelog-client]]
== Changelog
[discrete]
=== 7.10.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v7.10`.
You can find all the API changes https://www.elastic.co/guide/en/elasticsearch/reference/7.10/release-notes-7.10.0.html[here].
[discrete]
===== Added proxy support https://github.com/elastic/elasticsearch-js/pull/1260[#1260]
If you need to pass through an http(s) proxy for connecting to Elasticsearch, the client offers
out of the box a handy configuration for helping you with it. Under the hood it
uses the https://github.com/delvedor/hpagent[`hpagent`] module.
[source,js]
----
const client = new Client({
node: 'http://localhost:9200',
proxy: 'http://localhost:8080'
})
----
Basic authentication is supported as well:
[source,js]
----
const client = new Client({
node: 'http://localhost:9200',
proxy: 'http://user:pwd@localhost:8080'
})
----
[discrete]
==== Fixes
[discrete]
===== Scroll search should clear the scroll at the end https://github.com/elastic/elasticsearch-js/pull/1331[#1331]
From now on the scroll search helper will automatically close the scroll on Elasticsearch,
by doing so, Elasticsearch will free resources faster.
[discrete]
===== Handle connectivity issues while reading the body https://github.com/elastic/elasticsearch-js/pull/1343[#1343]
It might happen that the underlying socket stops working due to an external cause while reading the body.
This could lead to an unwanted `DeserialzationError`. From now, this will be handled as a generic `ConnectionError`.
[discrete]
==== Warnings
[discrete]
===== Add warning log about nodejs version support https://github.com/elastic/elasticsearch-js/pull/1349[#1349]
`7.11` will be the last version of the client that will support Node.js v8, while `7.12` will be
the last one that supports Node.js v10. If you are eusing this versions you will see a
`DeprecationWaring` in your logs. We strongly recommend to upgrade to newer versions of Node.js
as usng an EOL version will expose you to securty risks.
Please refer to https://ela.st/nodejs-support[ela.st/nodejs-support] for additional information.
[discrete]
=== 7.9.1
[discrete]
==== Fixes
[discrete]
===== Improve child performances https://github.com/elastic/elasticsearch-js/pull/1314[#1314]
The client code has been refactored to speed up the performances of the child method.
@ -82,35 +15,29 @@ This change should not cause any breaking change unless you were mocking the cli
Finally, this change should also fix once and of all the bundlers support.
[discrete]
===== Throw all errors asynchronously https://github.com/elastic/elasticsearch-js/pull/1295[#1295]
Some validation errors were thrown synchronously, causing the callback to be called in th same tick.
This issue is known as _"The release fo Zalgo"_ (see https://blog.izs.me/2013/08/designing-apis-for-asynchrony[here]).
[discrete]
===== Fix `maxRetries` request option handling https://github.com/elastic/elasticsearch-js/pull/1296[#1296]
The `maxRetries` parameter can be configured on a per requets basis, if set to zero it was defaulting to the client default. Now the client is honoring the request specific configuration.
[discrete]
===== Fix RequestOptions.body type to include null https://github.com/elastic/elasticsearch-js/pull/1300[#1300]
The Connection requets option types were not accepting `null` as valid value.
[discrete]
===== Fixed `size` and `maxRetries` parameters in helpers https://github.com/elastic/elasticsearch-js/pull/1284[#1284]
The `size` parameter was being passed too the scroll request, which was causing an error.
`maxRetries` setted to 0 was resulting in no request at all.
[discrete]
=== 7.9.0
[discrete]
==== Features
[discrete]
===== Add ability to disable the http agent https://github.com/elastic/elasticsearch-js/pull/1251[#1251]
If needed, the http agent can be disabled by setting it to `false`
@ -124,7 +51,6 @@ const client = new Client({
})
----
[discrete]
===== Add support for a global context option https://github.com/elastic/elasticsearch-js/pull/1256[#1256]
Before this, you could set a `context` option in each request, but there was no way of setting it globally.
@ -139,7 +65,6 @@ const client = new Client({
})
----
[discrete]
===== ESM support https://github.com/elastic/elasticsearch-js/pull/1235[#1235]
If you are using ES Modules, now you can easily import the client!
@ -149,10 +74,8 @@ If you are using ES Modules, now you can easily import the client!
import { Client } from '@elastic/elasticsearch'
----
[discrete]
==== Fixes
[discrete]
===== Allow the client name to be a symbol https://github.com/elastic/elasticsearch-js/pull/1254[#1254]
It was possible in plain JavaScript, but not in TypeScript, now you can do it in TypeScript as well.
@ -166,18 +89,15 @@ const client = new Client({
})
----
[discrete]
===== Fixed transport.request querystring type https://github.com/elastic/elasticsearch-js/pull/1240[#1240]
Only `Record<string, any>` was allowed. Now `string` is allowed as well.
[discrete]
===== Fixed type definitions https://github.com/elastic/elasticsearch-js/pull/1263[#1263]
* The `transport.request` defintion was incorrect, it was returning a `Promise<T>` instead of `TransportRequestPromise<T>`.
* The `refresh` parameter of most APIs was declared as `'true' | 'false' | 'wait_for'`, which was clunky. Now is `'wait_for' | boolean`.
[discrete]
===== Generate response type as boolean if the request is HEAD only https://github.com/elastic/elasticsearch-js/pull/1275[#1275]
All HEAD request will have the body casted to a boolean value, `true` in case of a 200 response, `false` in case of
@ -194,17 +114,14 @@ const { body } = await client.exist({ index: 'my-index', id: 'my-id' })
console.log(body) // either `true` or `false`
----
[discrete]
==== Internals
[discrete]
===== Updated default http agent configuration https://github.com/elastic/elasticsearch-js/pull/1242[#1242]
Added the scheduling: 'lifo' option to the default HTTP agent configuration to avoid maximizing the open sockets
against Elasticsearch and lowering the risk of encountering socket timeouts.
This feature is only available from Node v14.5+, but it should be backported to v10 and v12. (https://github.com/nodejs/node/pull/33278[nodejs/node#33278])
[discrete]
===== Improve child API https://github.com/elastic/elasticsearch-js/pull/1245[#1245]
This pr introduce two changes which should not impact the surface API:
@ -214,18 +131,14 @@ and improves the child creation performances by ~12%.
* The client no longer inherits from the EventEmitter class, but instead has an internal event emitter and exposes
only the API useful for the users, namely `emit, `on`, `once`, and `off`. The type definitions have been updated accordingly.
[discrete]
=== 7.8.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v7.8`.
You can find all the API changes https://www.elastic.co/guide/en/elasticsearch/reference/7.8/release-notes-7.8.0.html[here].
[discrete]
===== Added multi search helper https://github.com/elastic/elasticsearch-js/pull/1186[#1186]
If you are sending search request at a high rate, this helper might be useful for you.
@ -258,7 +171,6 @@ m.search(
)
----
[discrete]
===== Added timeout support in bulk and msearch helpers https://github.com/elastic/elasticsearch-js/pull/1206[#1206]
If there is a slow producer, the bulk helper might send data with a very large period of time, and if the process crashes for any reason, the data would be lost.
@ -280,48 +192,38 @@ const m = client.helpers.msearch({
})
----
[discrete]
==== Internals
[discrete]
===== Use filter_path for improving the search helpers performances https://github.com/elastic/elasticsearch-js/pull/1199[#1199]
From now on, all he search helpers will use the `filter_path` option automatically when needed to retrieve only the hits source. This change will result in less netwprk traffic and improved deserialization performances.
[discrete]
===== Search helpers documents getter https://github.com/elastic/elasticsearch-js/pull/1186[#1186]
Before this, the `documents` key that you can access in any search helper was computed as soon as we got the search result from Elasticsearch. With this change the `documents` key is now a getter, which makes this procees lazy, resulting in better performances and lower memory impact.
[discrete]
=== 7.7.1
[discrete]
==== Fixes
[discrete]
===== Disable client Helpers in Node.js < 10 - https://github.com/elastic/elasticsearch-js/pull/1194[#1194]
The client helpers can't be used in Node.js < 10 because it needs a custom flag to be able to use them.
Given that not every provider allows the user to specify cuatom Node.js flags, the Helpers has been disabled completely in Node.js < 10.
[discrete]
===== Force lowercase in all headers - https://github.com/elastic/elasticsearch-js/pull/1187[#1187]
Now all the user-provided headers names will be lowercased by default, so there will be no conflicts in case of the same header with different casing.
[discrete]
=== 7.7.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v7.7`.
You can find all the API changes https://www.elastic.co/guide/en/elasticsearch/reference/7.7/release-notes-7.7.0.html[here].
[discrete]
===== Introduced client helpers - https://github.com/elastic/elasticsearch-js/pull/1107[#1107]
From now on, the client comes with an handy collection of helpers to give you a more comfortable experience with some APIs.
@ -335,13 +237,11 @@ The following helpers has been introduced:
- `client.helpers.scrollSearch`
- `client.helpers.scrollDocuments`
[discrete]
===== The `ConnectionPool.getConnection` now always returns a `Connection` - https://github.com/elastic/elasticsearch-js/pull/1127[#1127]
What does this mean? It means that you will see less `NoLivingConnectionError`, which now can only be caused if you set a selector/filter too strict.
For improving the debugging experience, the `NoLivingConnectionsError` error message has been updated.
[discrete]
===== Abortable promises - https://github.com/elastic/elasticsearch-js/pull/1141[#1141]
From now on, it will be possible to abort a request generated with the promise-styl API. If you abort a request generated from a promise, the promise will be rejected with a `RequestAbortedError`.
@ -362,7 +262,6 @@ promise
promise.abort()
----
[discrete]
===== Major refactor of the Type Definitions - https://github.com/elastic/elasticsearch-js/pull/1119[#1119] https://github.com/elastic/elasticsearch-js/issues/1130[#1130] https://github.com/elastic/elasticsearch-js/pull/1132[#1132]
Now every API makes better use of the generics and overloading, so you can (or not, by default request/response bodies are `Record<string, any>`) define the request/response bodies in the generics.
@ -378,31 +277,25 @@ client.search<SearchResponse, SearchBody>(...)
This *should* not be a breaking change, as every generics defaults to `any`. It might happen to some users that the code breaks, but our test didn't detect any of it, probably because they were not robust enough. However, given the gigantic improvement in the developer experience, we have decided to release this change in the 7.x line.
[discrete]
==== Fixes
[discrete]
===== The `ConnectionPool.update` method now cleans the `dead` list - https://github.com/elastic/elasticsearch-js/issues/1122[#1122] https://github.com/elastic/elasticsearch-js/pull/1127[#1127]
It can happen in a situation where we are updating the connections list and running sniff, leaving the `dead` list in a dirty state. Now the `ConnectionPool.update` cleans up the `dead` list every time, which makes way more sense given that all the new connections are alive.
[discrete]
===== `ConnectionPoolmarkDead` should ignore connections that no longer exists - https://github.com/elastic/elasticsearch-js/pull/1159[#1159]
It might happen that markDead is called just after a pool update, and in such case, the clint was adding the dead list a node that no longer exists, causing unhandled exceptions later.
[discrete]
===== Do not retry a request if the body is a stream - https://github.com/elastic/elasticsearch-js/pull/1143[#1143]
The client should not retry if it's sending a stream body, because it should store in memory a copy of the stream to be able to send it again, but since it doesn't know in advance the size of the stream, it risks to take too much memory.
Furthermore, copying everytime the stream is very an expensive operation.
[discrete]
===== Return an error if the request has been aborted - https://github.com/elastic/elasticsearch-js/pull/1141[#1141]
Until now, aborting a request was blocking the HTTP request, but never calling the callback or resolving the promise to notify the user. This is a bug because it could lead to dangerous memory leaks. From now on if the user calls the `request.abort()` method, the callback style API will be called with a `RequestAbortedError`, the promise will be rejected with `RequestAbortedError` as well.
[discrete]
=== 7.6.1
**Fixes:**
@ -414,12 +307,10 @@ Until now, aborting a request was blocking the HTTP request, but never calling t
- Fix typo in api reference - https://github.com/elastic/elasticsearch-js/pull/1109[#1109]
[discrete]
=== 7.6.0
Support for Elasticsearch `v7.6`.
[discrete]
=== 7.5.1
**Fixes:**
@ -435,7 +326,6 @@ Support for Elasticsearch `v7.6`.
- Add examples to reference - https://github.com/elastic/elasticsearch-js/pull/1076[#1076]
- Added new examples - https://github.com/elastic/elasticsearch-js/pull/1031[#1031]
[discrete]
=== 7.5.0
Support for Elasticsearch `v7.5`.
@ -444,7 +334,6 @@ Support for Elasticsearch `v7.5`.
- X-Opaque-Id support https://github.com/elastic/elasticsearch-js/pull/997[#997]
[discrete]
=== 7.4.0
Support for Elasticsearch `v7.4`.
@ -462,7 +351,6 @@ Support for Elasticsearch `v7.4`.
- Update code generation https://github.com/elastic/elasticsearch-js/pull/969[#969]
[discrete]
=== 7.3.0
Support for Elasticsearch `v7.3`.
@ -485,7 +373,6 @@ Support for Elasticsearch `v7.3`.
- Better reference code examples - https://github.com/elastic/elasticsearch-js/pull/920[#920]
- Improve README - https://github.com/elastic/elasticsearch-js/pull/909[#909]
[discrete]
=== 7.2.0
Support for Elasticsearch `v7.2`
@ -494,7 +381,6 @@ Support for Elasticsearch `v7.2`
- Remove auth data from inspect and toJSON in connection class - https://github.com/elastic/elasticsearch-js/pull/887[#887]
[discrete]
=== 7.1.0
Support for Elasticsearch `v7.1`
@ -504,7 +390,6 @@ Support for Elasticsearch `v7.1`
- Support for non-friendly chars in url username and password - https://github.com/elastic/elasticsearch-js/pull/858[#858]
- Patch deprecated parameters - https://github.com/elastic/elasticsearch-js/pull/851[#851]
[discrete]
=== 7.0.1
**Fixes:**
@ -514,9 +399,8 @@ Support for Elasticsearch `v7.1`
- Fix TypeScript definiton *(issue https://github.com/elastic/elasticsearch-js/pull/803[#803])* - https://github.com/elastic/elasticsearch-js/pull/846[#846]
- Added toJSON method to Connection class *(issue https://github.com/elastic/elasticsearch-js/pull/848[#848])* - https://github.com/elastic/elasticsearch-js/pull/849[#849]
[discrete]
=== 7.0.0
Support for Elasticsearch `v7.0`
- Stable release.
- Stable release.

View File

@ -218,15 +218,10 @@ _Default:_ `null`
_Default:_ `{}`
|`context`
|`object` - A custom object that you can use for observability in your events.
|`object` - A custom object that you can use for observability in yoru events.
It will be merged with the API level context option. +
_Default:_ `null`
|`enableMetaHeader`
|`boolean` - If true, adds an header named `'x-elastic-client-meta'`, containing some minimal telemetry data,
such as the client and platform version. +
_Default:_ `true`
|`cloud`
a|`object` - Custom configuration for connecting to
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]

View File

@ -38,7 +38,7 @@ async function run () {
date: new Date()
}, {
id: 2,
text: 'Winter is coming',
text: 'Witer is coming',
user: 'ned',
date: new Date()
}, {

View File

@ -1,51 +0,0 @@
# Logs
logs
*.log
npm-debug.log*
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# coverage output
coverage.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules
jspm_packages
# Optional npm cache directory
.npm
# Optional REPL history
.node_repl_history
# mac files
.DS_Store
# vim swap files
*.swp
#Jetbrains editor folder
.idea
.vercel

View File

@ -1,65 +0,0 @@
# Elasticsearch proxy example
This folder contains an example of how to build a lightweight proxy
between your frontend code and Elasticsearch if you don't
have a more sophisticated backend in place yet.
> **IMPORTANT:** This is not a production ready code and it is only for demonstration purposes,
> we make no guarantees on it's security and stability.
This project is designed to be deployed on [Vercel](https://vercel.com/), a cloud platform
for static sites and Serverless Functions. You can use other functions providers,
such as [Google Cloud functions](https://cloud.google.com/functions).
## Project structure
The project comes with four endpoints:
- `/api/search`: runs a search, requires `'read'` permission
- `/api/autocomplete`: runs an autocomplete suggestion, requires `'read'` permission
- `/api/index`: indexes or updates a document, requires `'write'` permission
- `/api/delete`: deletes a document, requires `'write'` permission
Inside `utils/authorize.js` you can find the authorization logic for the endpoints.
In each endpoint you should configure the `INDEX` variable.
## How to use
Create an account on Vercel, then create a deployment on Elastic Cloud. If you
don't have an account on Elastic Cloud, you can create one with a free 14-day trial
of the [Elasticsearch Service](https://www.elastic.co/elasticsearch/service).
### Configure Elasticsearch
Once you have created a deployment on Elastic Cloud copy the generated Cloud Id and the credentials.
Then open `utils/prepare-elasticsearch.js` and fill your credentials. The script generates
an [Api Key](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html)
that you can use for authenticating your request. Based on the configuration of the Api Key, you will be able
to perform different operation on the specified indices or index pattern.
### Configure Vercel
Install the [Vercel CLI](https://vercel.com/docs/cli) to bootstrap the project,
or read the [quickstart](https://vercel.com/docs) documentation.
If you are using the CLI, bootstrap the project by running `vercel`. Test the project locally
with `vercel dev`, and deploy it with `vercel deploy`.
Configure the `ELASTIC_CLOUD_ID` [environment varible](https://vercel.com/docs/environment-variables) as well.
The Api Key is passed from the frontend app via a `Authorization` header as `Bearer` token and is
used to authorize the API calls to the endpoints as well.
Additional configuration, such as CORS, can be added to [`vercel.json`](https://vercel.com/docs/configuration).
## Authentication
If you are using Elasticsearch only for search purposes, such as a search box, you can create
an Api Key with `read` permissions and store it in your frontend app. Then you can send it
via `Authorization` header to the proxy and run your searches.
If you need to ingest data as well, it's more secure to have a strong authentication in your application.
For such cases, use an external authentication service, such as [Auth0](https://auth0.com/)
or [Magic Link](https://magic.link/). Then create a different Api Key with `read` and `write`
permissions for authenticated users, that will not be stored in the frontend app.
## License
This software is licensed under the [Apache 2 license](../../LICENSE).

View File

@ -1,105 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// IMPORTANT: this is not a production ready code & purely for demonstration purposes,
// we make no guarantees on it's security and stability
// NOTE: to make this endpoint work, you should create an ApiKey with 'read' permissions
'use strict'
const { Client } = require('@elastic/elasticsearch')
const authorize = require('../utils/authorize')
const INDEX = '<index-name>'
const client = new Client({
cloud: {
id: process.env.ELASTIC_CLOUD_ID
}
})
module.exports = async (req, res) => {
const [err, token] = authorize(req)
if (err) {
res.status(401)
res.json(err)
return
}
if (typeof req.query.q !== 'string') {
res.status(400)
res.json({
error: 'Bad Request',
message: 'Missing parameter "query.q"',
statusCode: 400
})
return
}
if (req.query.q.length < 3) {
res.status(400)
res.json({
error: 'Bad Request',
message: 'The length of "query.q" should be at least 3',
statusCode: 400
})
return
}
try {
const response = await client.search({
index: INDEX,
// You could directly send from the browser
// the Elasticsearch's query DSL, but it will
// expose you to the risk that a malicious user
// could overload your cluster by crafting
// expensive queries.
body: {
_source: ['id', 'url', 'name'], // the fields you want to show in the autocompletion
size: 0,
// https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-completion.html
suggest: {
suggestions: {
prefix: req.query.q,
completion: {
field: 'suggest',
size: 5
}
}
}
}
}, {
headers: {
Authorization: `ApiKey ${token}`
}
})
// It might be useful to configure http control caching headers
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
// res.setHeader('stale-while-revalidate', '30')
res.json(response.body)
} catch (err) {
res.status(err.statusCode || 500)
res.json({
error: err.name,
message: err.message,
statusCode: err.statusCode || 500
})
}
}

View File

@ -1,74 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// IMPORTANT: this is not a production ready code & purely for demonstration purposes,
// we make no guarantees on it's security and stability
// NOTE: to make this endpoint work, you should create an ApiKey with 'write' permissions
'use strict'
const { Client } = require('@elastic/elasticsearch')
const authorize = require('../utils/authorize')
const INDEX = '<index-name>'
const client = new Client({
cloud: {
id: process.env.ELASTIC_CLOUD_ID
}
})
module.exports = async (req, res) => {
const [err, token] = authorize(req)
if (err) {
res.status(401)
res.json(err)
return
}
if (typeof req.query.id !== 'string' && req.query.id.length === 0) {
res.status(400)
res.json({
error: 'Bad Request',
message: 'Missing document id',
statusCode: 400
})
return
}
try {
const response = await client.delete({
index: INDEX,
id: req.query.id
}, {
headers: {
Authorization: `ApiKey ${token}`
}
})
res.json(response.body)
} catch (err) {
res.status(err.statusCode || 500)
res.json({
error: err.name,
message: err.message,
statusCode: err.statusCode || 500
})
}
}

View File

@ -1,76 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// IMPORTANT: this is not a production ready code & purely for demonstration purposes,
// we make no guarantees on it's security and stability
// NOTE: to make this endpoint work, you should create an ApiKey with 'write' permissions
'use strict'
const { Client } = require('@elastic/elasticsearch')
const authorize = require('../utils/authorize')
const INDEX = '<index-name>'
const client = new Client({
cloud: {
id: process.env.ELASTIC_CLOUD_ID
}
})
module.exports = async (req, res) => {
const [err, token] = authorize(req)
if (err) {
res.status(401)
res.json(err)
return
}
if (typeof req.body !== 'object') {
res.status(400)
res.json({
error: 'Bad Request',
message: 'The document should be an object',
statusCode: 400
})
return
}
try {
const response = await client.index({
index: INDEX,
id: req.query.id,
body: req.body
}, {
headers: {
Authorization: `ApiKey ${token}`
}
})
res.status(response.statusCode)
res.json(response.body)
} catch (err) {
res.status(err.statusCode || 500)
res.json({
error: err.name,
message: err.message,
statusCode: err.statusCode || 500
})
}
}

View File

@ -1,86 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// IMPORTANT: this is not a production ready code & purely for demonstration purposes,
// we make no guarantees on it's security and stability
// NOTE: to make this endpoint work, you should create an ApiKey with 'read' permissions
'use strict'
const { Client } = require('@elastic/elasticsearch')
const authorize = require('../utils/authorize')
const INDEX = '<index-name>'
const client = new Client({
cloud: {
id: process.env.ELASTIC_CLOUD_ID
}
})
module.exports = async (req, res) => {
const [err, token] = authorize(req)
if (err) {
res.status(401)
res.json(err)
return
}
if (typeof req.body.text !== 'string') {
res.status(400)
res.json({
error: 'Bad Request',
message: 'Missing parameter "body.text"',
statusCode: 400
})
return
}
try {
const response = await client.search({
index: INDEX,
// You could directly send from the browser
// the Elasticsearch's query DSL, but it will
// expose you to the risk that a malicious user
// could overload your cluster by crafting
// expensive queries.
body: {
query: {
match: { field: req.body.text }
}
}
}, {
headers: {
Authorization: `ApiKey ${token}`
}
})
// It might be useful to configure http control caching headers
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
// res.setHeader('stale-while-revalidate', '30')
res.json(response.body)
} catch (err) {
res.status(err.statusCode || 500)
res.json({
error: err.name,
message: err.message,
statusCode: err.statusCode || 500
})
}
}

View File

@ -1,19 +0,0 @@
{
"name": "proxy-example",
"version": "1.0.0",
"private": true,
"description": "",
"main": "index.js",
"scripts": {
"test": "standard"
},
"keywords": [],
"author": "Tomas Della Vedova",
"license": "Apache-2.0",
"dependencies": {
"@elastic/elasticsearch": "^7.10.0"
},
"devDependencies": {
"standard": "^16.0.3"
}
}

View File

@ -1,54 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// IMPORTANT: this is not a production ready code & purely for demonstration purposes,
// we make no guarantees on it's security and stability
'use strict'
module.exports = (req) => {
const auth = req.headers.authorization
if (typeof auth !== 'string') {
return [{
error: 'Unauthorized',
message: 'Missing authorization header',
statusCode: 401
}, null]
}
const [type, token] = req.headers.authorization.split(' ')
if (type !== 'Bearer') {
return [{
error: 'Unauthorized',
message: 'Bad authorization type',
statusCode: 401
}, null]
}
if (token.length === 0) {
return [{
error: 'Unauthorized',
message: 'Bad authorization token',
statusCode: 401
}, null]
}
return [null, token]
}

View File

@ -1,68 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
const { Client } = require('@elastic/elasticsearch')
// Your Cloud Id
const cloudId = ''
// Your admin username
const username = ''
// Your admin password
const password = ''
// The indices or index patterns you will need to access
const indexNames = ['my-index-name-or-pattern']
// see https://www.elastic.co/guide/en/elasticsearch/reference/current/security-privileges.html#privileges-list-indices
const privileges = ['read']
async function generateApiKeys (opts) {
const client = new Client({
cloud: {
id: cloudId
},
auth: {
username,
password
}
})
const { body } = await client.security.createApiKey({
body: {
name: 'elasticsearch-proxy',
role_descriptors: {
'elasticsearch-proxy-users': {
index: [{
names: indexNames,
privileges
}]
}
}
}
})
return Buffer.from(`${body.id}:${body.api_key}`).toString('base64')
}
generateApiKeys()
.then(console.log)
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@ -1,13 +0,0 @@
{
"headers": [
{
"source": "/api/(.*)",
"headers": [
{ "key": "Access-Control-Allow-Credentials", "value": "true" },
{ "key": "Access-Control-Allow-Origin", "value": "*" },
{ "key": "Access-Control-Allow-Methods", "value": "GET,OPTIONS,PATCH,DELETE,POST,PUT" },
{ "key": "Access-Control-Allow-Headers", "value": "X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version" }
]
}
]
}

View File

@ -90,5 +90,4 @@ using.
WARNING: There is no official support for the browser environment. It exposes
your {es} instance to everyone, which could lead to security issues. We
recommend you to write a lightweight proxy that uses this client instead,
you can see a proxy example https://github.com/elastic/elasticsearch-js/tree/master/docs/examples/proxy[here].
recommend you to write a lightweight proxy that uses this client instead.

View File

@ -49,15 +49,6 @@ client.on('response', (err, result) => {
The client emits the following events:
[cols=2*]
|===
|`serialization`
a|Emitted before starting serialization and compression. If you want to measure this phase duration, you should measure the time elapsed between this event and `request`.
[source,js]
----
client.on('serialization', (err, result) => {
console.log(err, result)
})
----
|`request`
a|Emitted before sending the actual request to {es} _(emitted multiple times in case of retries)_.
[source,js]
@ -67,15 +58,6 @@ client.on('request', (err, result) => {
})
----
|`deserialization`
a|Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. _(This event might not be emitted in certain situations)_.
[source,js]
----
client.on('deserialization', (err, result) => {
console.log(err, result)
})
----
|`response`
a|Emitted once {es} response has been received and parsed.
[source,js]
@ -105,7 +87,7 @@ client.on('resurrect', (err, result) => {
|===
The values of `result` in `serialization`, `request`, `deserialization`, `response` and `sniff` will be:
The values of `result` in `request`, `response` and `sniff` will be:
[source,ts]
----
@ -145,29 +127,6 @@ request: {
};
----
[discrete]
==== Events order
The event order is described in the following graph, in some edge cases, the order is not guaranteed.
You can find in https://github.com/elastic/elasticsearch-js/blob/master/test/acceptance/events-order.test.js[`test/acceptance/events-order.test.js`] how the order changes based on the situation.
[source]
----
serialization
│ (serialization and compression happens between those two events)
└─▶ request
│ (actual time spent over the wire)
└─▶ deserialization
│ (deserialization and decompression happens between those two events)
└─▶ response
----
[discrete]
=== Correlation id

View File

@ -142,23 +142,6 @@ link:{ref}/async-search.html[Documentation] +
|===
[discrete]
=== asyncSearch.status
[source,ts]
----
client.asyncSearch.status({
id: string
})
----
link:{ref}/async-search.html[Documentation] +
[cols=2*]
|===
|`id`
|`string` - The async search ID
|===
[discrete]
=== asyncSearch.submit
@ -354,7 +337,7 @@ _Default:_ `5`
[discrete]
=== autoscaling.deleteAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.deleteAutoscalingPolicy({
@ -370,18 +353,18 @@ link:{ref}/autoscaling-delete-autoscaling-policy.html[Documentation] +
|===
[discrete]
=== autoscaling.getAutoscalingCapacity
=== autoscaling.getAutoscalingDecision
*Stability:* experimental
[source,ts]
----
client.autoscaling.getAutoscalingCapacity()
client.autoscaling.getAutoscalingDecision()
----
link:{ref}/autoscaling-get-autoscaling-capacity.html[Documentation] +
link:{ref}/autoscaling-get-autoscaling-decision.html[Documentation] +
[discrete]
=== autoscaling.getAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.getAutoscalingPolicy({
@ -398,7 +381,7 @@ link:{ref}/autoscaling-get-autoscaling-policy.html[Documentation] +
[discrete]
=== autoscaling.putAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.putAutoscalingPolicy({
@ -750,9 +733,7 @@ link:{ref}/cat-indices.html[Documentation] +
|`'b' \| 'k' \| 'kb' \| 'm' \| 'mb' \| 'g' \| 'gb' \| 't' \| 'tb' \| 'p' \| 'pb'` - The unit in which to display byte values
|`local`
|`boolean` - Return local information, do not retrieve the state from master node (default: false) +
WARNING: This parameter has been deprecated.
|`boolean` - Return local information, do not retrieve the state from master node (default: false)
|`master_timeout` or `masterTimeout`
|`string` - Explicit operation timeout for connection to master node
@ -1390,9 +1371,7 @@ link:{ref}/cat-shards.html[Documentation] +
|`'b' \| 'k' \| 'kb' \| 'm' \| 'mb' \| 'g' \| 'gb' \| 't' \| 'tb' \| 'p' \| 'pb'` - The unit in which to display byte values
|`local`
|`boolean` - Return local information, do not retrieve the state from master node (default: false) +
WARNING: This parameter has been deprecated.
|`boolean` - Return local information, do not retrieve the state from master node (default: false)
|`master_timeout` or `masterTimeout`
|`string` - Explicit operation timeout for connection to master node
@ -1470,10 +1449,10 @@ link:{ref}/cat-snapshots.html[Documentation] +
----
client.cat.tasks({
format: string,
nodes: string | string[],
node_id: string | string[],
actions: string | string[],
detailed: boolean,
parent_task_id: string,
parent_task: number,
h: string | string[],
help: boolean,
s: string | string[],
@ -1487,7 +1466,7 @@ link:{ref}/tasks.html[Documentation] +
|`format`
|`string` - a short version of the Accept header, e.g. json, yaml
|`nodes`
|`node_id` or `nodeId`
|`string \| string[]` - A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes
|`actions`
@ -1496,8 +1475,8 @@ link:{ref}/tasks.html[Documentation] +
|`detailed`
|`boolean` - Return detailed task information (default: false)
|`parent_task_id` or `parentTaskId`
|`string` - Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all.
|`parent_task` or `parentTask`
|`number` - Return tasks with specified parent task id. Set to -1 to return all.
|`h`
|`string \| string[]` - Comma-separated list of column names to display
@ -1967,7 +1946,7 @@ link:{ref}/cluster-allocation-explain.html[Documentation] +
[discrete]
=== cluster.deleteComponentTemplate
*Stability:* experimental
[source,ts]
----
client.cluster.deleteComponentTemplate({
@ -2010,7 +1989,7 @@ _Default:_ `true`
[discrete]
=== cluster.existsComponentTemplate
*Stability:* experimental
[source,ts]
----
client.cluster.existsComponentTemplate({
@ -2035,7 +2014,7 @@ link:{ref}/indices-component-template.html[Documentation] +
[discrete]
=== cluster.getComponentTemplate
*Stability:* experimental
[source,ts]
----
client.cluster.getComponentTemplate({
@ -2199,7 +2178,7 @@ _Default:_ `30s`
[discrete]
=== cluster.putComponentTemplate
*Stability:* experimental
[source,ts]
----
client.cluster.putComponentTemplate({
@ -2957,7 +2936,7 @@ link:{ref}/enrich-stats-api.html[Documentation] +
[discrete]
=== eql.delete
*Stability:* beta
[source,ts]
----
client.eql.delete({
@ -2974,7 +2953,7 @@ link:{ref}/eql-search-api.html[Documentation] +
[discrete]
=== eql.get
*Stability:* beta
[source,ts]
----
client.eql.get({
@ -3000,7 +2979,7 @@ _Default:_ `5d`
[discrete]
=== eql.search
*Stability:* beta
[source,ts]
----
client.eql.search({
@ -4064,8 +4043,7 @@ link:{ref}/indices-aliases.html[Documentation] +
[source,ts]
----
client.indices.deleteDataStream({
name: string | string[],
expand_wildcards: 'open' | 'closed' | 'hidden' | 'none' | 'all'
name: string | string[]
})
----
link:{ref}/data-streams.html[Documentation] +
@ -4074,15 +4052,11 @@ link:{ref}/data-streams.html[Documentation] +
|`name`
|`string \| string[]` - A comma-separated list of data streams to delete; use `*` to delete all data streams
|`expand_wildcards` or `expandWildcards`
|`'open' \| 'closed' \| 'hidden' \| 'none' \| 'all'` - Whether wildcard expressions should get expanded to open or closed indices (default: open) +
_Default:_ `open`
|===
[discrete]
=== indices.deleteIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.deleteIndexTemplate({
@ -4212,7 +4186,7 @@ _Default:_ `all`
[discrete]
=== indices.existsIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.existsIndexTemplate({
@ -4552,8 +4526,7 @@ _Default:_ `all`
[source,ts]
----
client.indices.getDataStream({
name: string | string[],
expand_wildcards: 'open' | 'closed' | 'hidden' | 'none' | 'all'
name: string | string[]
})
----
link:{ref}/data-streams.html[Documentation] +
@ -4562,10 +4535,6 @@ link:{ref}/data-streams.html[Documentation] +
|`name`
|`string \| string[]` - A comma-separated list of data streams to get; use `*` to get all data streams
|`expand_wildcards` or `expandWildcards`
|`'open' \| 'closed' \| 'hidden' \| 'none' \| 'all'` - Whether wildcard expressions should get expanded to open or closed indices (default: open) +
_Default:_ `open`
|===
[discrete]
@ -4622,7 +4591,7 @@ _Default:_ `open`
[discrete]
=== indices.getIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.getIndexTemplate({
@ -4812,23 +4781,6 @@ _Default:_ `open`
|===
[discrete]
=== indices.migrateToDataStream
[source,ts]
----
client.indices.migrateToDataStream({
name: string
})
----
link:{ref}/data-streams.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - The name of the alias to migrate
|===
[discrete]
=== indices.open
@ -4871,23 +4823,6 @@ _Default:_ `closed`
|===
[discrete]
=== indices.promoteDataStream
[source,ts]
----
client.indices.promoteDataStream({
name: string
})
----
link:{ref}/data-streams.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - The name of the data stream
|===
[discrete]
=== indices.putAlias
@ -4923,7 +4858,7 @@ link:{ref}/indices-aliases.html[Documentation] +
[discrete]
=== indices.putIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.putIndexTemplate({
@ -5360,7 +5295,7 @@ link:{ref}/indices-shrink-index.html[Documentation] +
[discrete]
=== indices.simulateIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.simulateIndexTemplate({
@ -5393,7 +5328,7 @@ link:{ref}/indices-templates.html[Documentation] +
[discrete]
=== indices.simulateTemplate
*Stability:* experimental
[source,ts]
----
client.indices.simulateTemplate({
@ -6117,7 +6052,7 @@ link:{ref}/ml-delete-calendar-job.html[Documentation] +
[discrete]
=== ml.deleteDataFrameAnalytics
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.deleteDataFrameAnalytics({
@ -6285,7 +6220,7 @@ link:{ref}/ml-delete-snapshot.html[Documentation] +
[discrete]
=== ml.deleteTrainedModel
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.deleteTrainedModel({
@ -6319,7 +6254,7 @@ link:{ref}/ml-apis.html[Documentation] +
[discrete]
=== ml.evaluateDataFrame
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.evaluateDataFrame({
@ -6336,7 +6271,7 @@ link:{ref}/evaluate-dfanalytics.html[Documentation] +
[discrete]
=== ml.explainDataFrameAnalytics
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.explainDataFrameAnalytics({
@ -6667,15 +6602,14 @@ link:{ref}/ml-get-category.html[Documentation] +
[discrete]
=== ml.getDataFrameAnalytics
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.getDataFrameAnalytics({
id: string,
allow_no_match: boolean,
from: number,
size: number,
exclude_generated: boolean
size: number
})
----
link:{ref}/get-dfanalytics.html[Documentation] +
@ -6695,14 +6629,11 @@ _Default:_ `true`
|`number` - specifies a max number of analytics to get +
_Default:_ `100`
|`exclude_generated` or `excludeGenerated`
|`boolean` - Omits fields that are illegal to set on data frame analytics PUT
|===
[discrete]
=== ml.getDataFrameAnalyticsStats
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.getDataFrameAnalyticsStats({
@ -6770,8 +6701,7 @@ WARNING: This parameter has been deprecated.
client.ml.getDatafeeds({
datafeed_id: string,
allow_no_match: boolean,
allow_no_datafeeds: boolean,
exclude_generated: boolean
allow_no_datafeeds: boolean
})
----
link:{ref}/ml-get-datafeed.html[Documentation] +
@ -6788,9 +6718,6 @@ link:{ref}/ml-get-datafeed.html[Documentation] +
WARNING: This parameter has been deprecated.
|`exclude_generated` or `excludeGenerated`
|`boolean` - Omits fields that are illegal to set on datafeed PUT
|===
[discrete]
@ -6906,8 +6833,7 @@ WARNING: This parameter has been deprecated.
client.ml.getJobs({
job_id: string,
allow_no_match: boolean,
allow_no_jobs: boolean,
exclude_generated: boolean
allow_no_jobs: boolean
})
----
link:{ref}/ml-get-job.html[Documentation] +
@ -6924,9 +6850,6 @@ link:{ref}/ml-get-job.html[Documentation] +
WARNING: This parameter has been deprecated.
|`exclude_generated` or `excludeGenerated`
|`boolean` - Omits fields that are illegal to set on job PUT
|===
[discrete]
@ -7088,7 +7011,7 @@ link:{ref}/ml-get-record.html[Documentation] +
[discrete]
=== ml.getTrainedModels
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.getTrainedModels({
@ -7100,7 +7023,7 @@ client.ml.getTrainedModels({
from: number,
size: number,
tags: string | string[],
exclude_generated: boolean
for_export: boolean
})
----
link:{ref}/get-trained-models.html[Documentation] +
@ -7135,14 +7058,14 @@ _Default:_ `100`
|`tags`
|`string \| string[]` - A comma-separated list of tags that the model must have.
|`exclude_generated` or `excludeGenerated`
|`for_export` or `forExport`
|`boolean` - Omits fields that are illegal to set on model PUT
|===
[discrete]
=== ml.getTrainedModelsStats
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.getTrainedModelsStats({
@ -7309,7 +7232,7 @@ link:{ref}/ml-put-calendar-job.html[Documentation] +
[discrete]
=== ml.putDataFrameAnalytics
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.putDataFrameAnalytics({
@ -7409,7 +7332,7 @@ link:{ref}/ml-put-job.html[Documentation] +
[discrete]
=== ml.putTrainedModel
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.putTrainedModel({
@ -7480,7 +7403,7 @@ link:{ref}/ml-set-upgrade-mode.html[Documentation] +
[discrete]
=== ml.startDataFrameAnalytics
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.startDataFrameAnalytics({
@ -7538,7 +7461,7 @@ link:{ref}/ml-start-datafeed.html[Documentation] +
[discrete]
=== ml.stopDataFrameAnalytics
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.stopDataFrameAnalytics({
@ -7610,7 +7533,7 @@ WARNING: This parameter has been deprecated.
[discrete]
=== ml.updateDataFrameAnalytics
*Stability:* beta
*Stability:* experimental
[source,ts]
----
client.ml.updateDataFrameAnalytics({
@ -7733,35 +7656,6 @@ link:{ref}/ml-update-snapshot.html[Documentation] +
|===
[discrete]
=== ml.upgradeJobSnapshot
[source,ts]
----
client.ml.upgradeJobSnapshot({
job_id: string,
snapshot_id: string,
timeout: string,
wait_for_completion: boolean
})
----
link:{ref}/ml-upgrade-job-model-snapshot.html[Documentation] +
[cols=2*]
|===
|`job_id` or `jobId`
|`string` - The ID of the job
|`snapshot_id` or `snapshotId`
|`string` - The ID of the snapshot
|`timeout`
|`string` - How long should the API wait for the job to be opened and the old snapshot to be loaded.
|`wait_for_completion` or `waitForCompletion`
|`boolean` - Should the request wait until the task is complete before responding to the caller. Default is false.
|===
[discrete]
=== ml.validate
@ -8492,31 +8386,6 @@ link:{ref}/rollup-put-job.html[Documentation] +
|===
[discrete]
=== rollup.rollup
[source,ts]
----
client.rollup.rollup({
index: string,
rollup_index: string,
body: object
})
----
link:{ref}/rollup-api.html[Documentation] +
[cols=2*]
|===
|`index`
|`string` - The index to roll up
|`rollup_index` or `rollupIndex`
|`string` - The name of the rollup index to create
|`body`
|`object` - The rollup configuration
|===
[discrete]
=== rollup.rollupSearch
*Stability:* experimental
@ -10143,7 +10012,7 @@ link:{ref}/security-api-ssl.html[Documentation] +
[discrete]
=== tasks.cancel
*Stability:* experimental
[source,ts]
----
client.tasks.cancel({
@ -10176,7 +10045,7 @@ link:{ref}/tasks.html[Documentation] +
[discrete]
=== tasks.get
*Stability:* experimental
[source,ts]
----
client.tasks.get({
@ -10201,7 +10070,7 @@ link:{ref}/tasks.html[Documentation] +
[discrete]
=== tasks.list
*Stability:* experimental
[source,ts]
----
client.tasks.list({
@ -10348,8 +10217,7 @@ client.transform.getTransform({
transform_id: string,
from: number,
size: number,
allow_no_match: boolean,
exclude_generated: boolean
allow_no_match: boolean
})
----
link:{ref}/get-transform.html[Documentation] +
@ -10367,9 +10235,6 @@ link:{ref}/get-transform.html[Documentation] +
|`allow_no_match` or `allowNoMatch`
|`boolean` - Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified)
|`exclude_generated` or `excludeGenerated`
|`boolean` - Omits fields that are illegal to set on transform PUT
|===
[discrete]
@ -10951,23 +10816,6 @@ link:{ref}/watcher-api-put-watch.html[Documentation] +
|===
[discrete]
=== watcher.queryWatches
[source,ts]
----
client.watcher.queryWatches({
body: object
})
----
link:{ref}/watcher-api-query-watches.html[Documentation] +
[cols=2*]
|===
|`body`
|`object` - From, size, query, sort and search_after
|===
[discrete]
=== watcher.start

65
index.d.ts vendored
View File

@ -108,7 +108,6 @@ interface ClientOptions {
auth?: BasicAuth | ApiKeyAuth;
context?: Context;
proxy?: string | URL;
enableMetaHeader?: boolean;
cloud?: {
id: string;
// TODO: remove username and password here in 8
@ -149,10 +148,6 @@ declare class Client {
get<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchGet, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchGet, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
status<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
status<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
status<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchStatus, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
status<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchStatus, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchSubmit<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchSubmit<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -167,10 +162,6 @@ declare class Client {
get<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchGet, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchGet, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
status<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
status<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
status<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchStatus, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
status<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchStatus, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchSubmit<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.AsyncSearchSubmit<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -185,14 +176,14 @@ declare class Client {
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingDeleteAutoscalingPolicy, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingDeleteAutoscalingPolicy, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_capacity<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingCapacity, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_autoscaling_capacity<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_capacity<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingCapacity, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_capacity<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingCapacity, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingCapacity<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingCapacity, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getAutoscalingCapacity<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingCapacity<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingCapacity, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingCapacity<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingCapacity, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_decision<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingDecision, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_autoscaling_decision<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_decision<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingDecision, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_decision<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingDecision, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingDecision<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingDecision, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getAutoscalingDecision<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingDecision<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingDecision, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingDecision<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingDecision, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_policy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_autoscaling_policy<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_policy<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.AutoscalingGetAutoscalingPolicy, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -1045,26 +1036,10 @@ declare class Client {
getUpgrade<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getUpgrade<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesGetUpgrade, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getUpgrade<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesGetUpgrade, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
migrate_to_data_stream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesMigrateToDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
migrate_to_data_stream<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
migrate_to_data_stream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesMigrateToDataStream, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
migrate_to_data_stream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesMigrateToDataStream, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
migrateToDataStream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesMigrateToDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
migrateToDataStream<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
migrateToDataStream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesMigrateToDataStream, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
migrateToDataStream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesMigrateToDataStream, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
open<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesOpen, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
open<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
open<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesOpen, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
open<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesOpen, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
promote_data_stream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPromoteDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
promote_data_stream<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
promote_data_stream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesPromoteDataStream, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
promote_data_stream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesPromoteDataStream, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
promoteDataStream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPromoteDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
promoteDataStream<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
promoteDataStream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesPromoteDataStream, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
promoteDataStream<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesPromoteDataStream, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_alias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutAlias<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
put_alias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_alias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.IndicesPutAlias<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -1753,14 +1728,6 @@ declare class Client {
updateModelSnapshot<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
updateModelSnapshot<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.MlUpdateModelSnapshot<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
updateModelSnapshot<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.MlUpdateModelSnapshot<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
upgrade_job_snapshot<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpgradeJobSnapshot, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
upgrade_job_snapshot<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
upgrade_job_snapshot<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.MlUpgradeJobSnapshot, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
upgrade_job_snapshot<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.MlUpgradeJobSnapshot, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
upgradeJobSnapshot<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpgradeJobSnapshot, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
upgradeJobSnapshot<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
upgradeJobSnapshot<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.MlUpgradeJobSnapshot, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
upgradeJobSnapshot<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.MlUpgradeJobSnapshot, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
validate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlValidate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
validate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
validate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.MlValidate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -1915,10 +1882,6 @@ declare class Client {
putJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.RollupPutJob<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.RollupPutJob<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
rollup<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RollupRollup<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
rollup<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
rollup<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.RollupRollup<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
rollup<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.RollupRollup<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
rollup_search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RollupRollupSearch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
rollup_search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
rollup_search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.RollupRollupSearch<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -2575,14 +2538,6 @@ declare class Client {
putWatch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putWatch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.WatcherPutWatch<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putWatch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.WatcherPutWatch<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
query_watches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherQueryWatches<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
query_watches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
query_watches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.WatcherQueryWatches<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
query_watches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.WatcherQueryWatches<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
queryWatches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherQueryWatches<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
queryWatches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
queryWatches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.WatcherQueryWatches<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
queryWatches<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.WatcherQueryWatches<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
start<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherStart, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
start<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
start<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.WatcherStart, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -2610,10 +2565,8 @@ declare class Client {
}
declare const events: {
SERIALIZATION: string;
REQUEST: string;
DESERIALIZATION: string;
RESPONSE: string;
REQUEST: string;
SNIFF: string;
RESURRECT: string;
};

View File

@ -33,8 +33,6 @@ const Serializer = require('./lib/Serializer')
const errors = require('./lib/errors')
const { ConfigurationError } = errors
const { prepareHeaders } = Connection.internals
const clientVersion = require('./package.json').version
const nodeVersion = process.versions.node
const kInitialOptions = Symbol('elasticsearchjs-initial-options')
const kChild = Symbol('elasticsearchjs-child')
@ -127,18 +125,13 @@ class Client extends ESAPI {
auth: null,
opaqueIdPrefix: null,
context: null,
proxy: null,
enableMetaHeader: true
proxy: null
}, opts)
this[kInitialOptions] = options
this[kExtensions] = []
this.name = options.name
if (options.enableMetaHeader) {
options.headers['x-elastic-client-meta'] = `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}`
}
if (opts[kChild] !== undefined) {
this.serializer = options[kChild].serializer
this.connectionPool = options[kChild].connectionPool
@ -186,13 +179,7 @@ class Client extends ESAPI {
/* istanbul ignore else */
if (Helpers !== null) {
this.helpers = new Helpers({
client: this,
maxRetries: options.maxRetries,
metaHeader: options.enableMetaHeader
? `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}`
: null
})
this.helpers = new Helpers({ client: this, maxRetries: options.maxRetries })
}
}
@ -327,9 +314,7 @@ const events = {
RESPONSE: 'response',
REQUEST: 'request',
SNIFF: 'sniff',
RESURRECT: 'resurrect',
SERIALIZATION: 'serialization',
DESERIALIZATION: 'deserialization'
RESURRECT: 'resurrect'
}
module.exports = {

View File

@ -82,7 +82,6 @@ class Connection {
request (params, callback) {
this._openRequests++
let cleanedListeners = false
const requestParams = this.buildRequestObject(params)
// https://github.com/nodejs/node/commit/b961d9fd83
@ -135,7 +134,7 @@ class Connection {
if (isStream(params.body) === true) {
pump(params.body, request, err => {
/* istanbul ignore if */
if (err != null && cleanedListeners === false) {
if (err != null) {
cleanListeners()
this._openRequests--
callback(err, null)
@ -152,7 +151,6 @@ class Connection {
request.removeListener('timeout', onTimeout)
request.removeListener('error', onError)
request.removeListener('abort', onAbort)
cleanedListeners = true
}
}

View File

@ -28,14 +28,12 @@ const { ResponseError, ConfigurationError } = require('./errors')
const pImmediate = promisify(setImmediate)
const sleep = promisify(setTimeout)
const kClient = Symbol('elasticsearch-client')
const kMetaHeader = Symbol('meta header')
/* istanbul ignore next */
const noop = () => {}
class Helpers {
constructor (opts) {
this[kClient] = opts.client
this[kMetaHeader] = opts.metaHeader
this.maxRetries = opts.maxRetries
}
@ -73,10 +71,6 @@ class Helpers {
* @return {iterator} the async iterator
*/
async * scrollSearch (params, options = {}) {
if (this[kMetaHeader] !== null) {
options.headers = options.headers || {}
options.headers['x-elastic-client-meta'] = this[kMetaHeader] + ',h=s'
}
// TODO: study scroll search slices
const wait = options.wait || 5000
const maxRetries = options.maxRetries || this.maxRetries
@ -105,7 +99,7 @@ class Helpers {
stop = true
await this[kClient].clearScroll(
{ body: { scroll_id } },
{ ignore: [400], ...options }
{ ignore: [400] }
)
}
@ -420,7 +414,6 @@ class Helpers {
bulk (options) {
const client = this[kClient]
const { serialize, deserialize } = client.serializer
const reqOptions = this[kMetaHeader] !== null ? { headers: { 'x-elastic-client-meta': this[kMetaHeader] + ',h=bp' } } : {}
const {
datasource,
onDocument,
@ -683,7 +676,7 @@ class Helpers {
function tryBulk (bulkBody, callback) {
if (shouldAbort === true) return callback(null, [])
client.bulk(Object.assign({}, bulkOptions, { body: bulkBody }), reqOptions, (err, { body }) => {
client.bulk(Object.assign({}, bulkOptions, { body: bulkBody }), (err, { body }) => {
if (err) return callback(err, null)
if (body.errors === false) {
stats.successful += body.items.length

View File

@ -22,7 +22,6 @@
const debug = require('debug')('elasticsearch')
const os = require('os')
const { gzip, unzip, createGzip } = require('zlib')
const buffer = require('buffer')
const ms = require('ms')
const {
ConnectionError,
@ -36,15 +35,12 @@ const noop = () => {}
const clientVersion = require('../package.json').version
const userAgent = `elasticsearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})`
const MAX_BUFFER_LENGTH = buffer.constants.MAX_LENGTH
const MAX_STRING_LENGTH = buffer.constants.MAX_STRING_LENGTH
class Transport {
constructor (opts) {
if (typeof opts.compression === 'string' && opts.compression !== 'gzip') {
throw new ConfigurationError(`Invalid compression: '${opts.compression}'`)
}
this.emit = opts.emit
this.connectionPool = opts.connectionPool
this.serializer = opts.serializer
@ -222,22 +218,6 @@ class Transport {
const contentEncoding = (result.headers['content-encoding'] || '').toLowerCase()
const isCompressed = contentEncoding.indexOf('gzip') > -1 || contentEncoding.indexOf('deflate') > -1
/* istanbul ignore else */
if (result.headers['content-length'] !== undefined) {
const contentLength = Number(result.headers['content-length'])
if (isCompressed && contentLength > MAX_BUFFER_LENGTH) {
response.destroy()
return onConnectionError(
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed buffer (${MAX_BUFFER_LENGTH})`, result)
)
} else if (contentLength > MAX_STRING_LENGTH) {
response.destroy()
return onConnectionError(
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed string (${MAX_STRING_LENGTH})`, result)
)
}
}
// if the response is compressed, we must handle it
// as buffer for allowing decompression later
let payload = isCompressed ? [] : ''
@ -269,8 +249,6 @@ class Transport {
if (!isCompressed) {
response.setEncoding('utf8')
}
this.emit('deserialization', null, result)
response.on('data', onData)
response.on('error', onEnd)
response.on('end', onEnd)
@ -343,7 +321,6 @@ class Transport {
}
}
this.emit('serialization', null, result)
const headers = Object.assign({}, this.headers, lowerCaseHeaders(options.headers))
if (options.opaqueId !== undefined) {
@ -358,7 +335,6 @@ class Transport {
try {
params.body = this.serializer.serialize(params.body)
} catch (err) {
this.emit('request', err, result)
process.nextTick(callback, err, result)
return transportReturn
}
@ -374,7 +350,6 @@ class Transport {
try {
params.body = this.serializer.ndserialize(params.bulkBody)
} catch (err) {
this.emit('request', err, result)
process.nextTick(callback, err, result)
return transportReturn
}
@ -414,7 +389,6 @@ class Transport {
gzip(params.body, (err, buffer) => {
/* istanbul ignore next */
if (err) {
this.emit('request', err, result)
return callback(err, result)
}
params.headers['content-encoding'] = compression

View File

@ -11,7 +11,7 @@
"./": "./"
},
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
"version": "7.11.0",
"version": "7.10.0",
"keywords": [
"elasticsearch",
"elastic",

View File

@ -1,462 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
const { test } = require('tap')
const intoStream = require('into-stream')
const { Client, Connection, events } = require('../../index')
const {
TimeoutError,
ConnectionError,
ResponseError,
RequestAbortedError,
SerializationError,
DeserializationError
} = require('../../lib/errors')
const {
buildServer,
connection: {
MockConnection,
MockConnectionError,
MockConnectionTimeout,
buildMockConnection
}
} = require('../utils')
test('No errors', t => {
t.plan(10)
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.DESERIALIZATION,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.DESERIALIZATION)
})
client.on(events.RESPONSE, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.RESPONSE)
})
client.info((err, result) => {
t.error(err)
t.strictEqual(order.length, 0)
})
})
test('Connection error', t => {
t.plan(10)
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnectionError,
maxRetries: 1
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.REQUEST,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (_err, request) => {
t.fail('Should not be called')
})
client.on(events.RESPONSE, (err, request) => {
t.ok(err instanceof ConnectionError)
t.strictEqual(order.shift(), events.RESPONSE)
})
client.info((err, result) => {
t.ok(err instanceof ConnectionError)
t.strictEqual(order.length, 0)
})
})
test('TimeoutError error', t => {
t.plan(10)
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnectionTimeout,
maxRetries: 1
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.REQUEST,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (_err, request) => {
t.fail('Should not be called')
})
client.on(events.RESPONSE, (err, request) => {
t.ok(err instanceof TimeoutError)
t.strictEqual(order.shift(), events.RESPONSE)
})
client.info((err, result) => {
t.ok(err instanceof TimeoutError)
t.strictEqual(order.length, 0)
})
})
test('RequestAbortedError error', t => {
t.plan(8)
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnectionTimeout,
maxRetries: 1
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (_err, request) => {
t.fail('Should not be called')
})
client.on(events.RESPONSE, (err, request) => {
t.ok(err instanceof RequestAbortedError)
t.strictEqual(order.shift(), events.RESPONSE)
})
const request = client.info((err, result) => {
t.ok(err instanceof RequestAbortedError)
t.strictEqual(order.length, 0)
})
request.abort()
})
test('ResponseError error (no retry)', t => {
t.plan(10)
const MockConnection = buildMockConnection({
onRequest (params) {
return {
statusCode: 400,
body: { hello: 'world' }
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxRetries: 1
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.DESERIALIZATION,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.DESERIALIZATION)
})
client.on(events.RESPONSE, (err, request) => {
t.ok(err instanceof ResponseError)
t.strictEqual(order.shift(), events.RESPONSE)
})
client.info((err, result) => {
t.ok(err instanceof ResponseError)
t.strictEqual(order.length, 0)
})
})
test('ResponseError error (with retry)', t => {
t.plan(14)
const MockConnection = buildMockConnection({
onRequest (params) {
return {
statusCode: 504,
body: { hello: 'world' }
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxRetries: 1
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.DESERIALIZATION,
events.REQUEST,
events.DESERIALIZATION,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.DESERIALIZATION)
})
client.on(events.RESPONSE, (err, request) => {
t.ok(err instanceof ResponseError)
t.strictEqual(order.shift(), events.RESPONSE)
})
client.info((err, result) => {
t.ok(err instanceof ResponseError)
t.strictEqual(order.length, 0)
})
})
test('Serialization Error', t => {
t.plan(6)
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxRetries: 1
})
const order = [
events.SERIALIZATION,
events.REQUEST
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.ok(err instanceof SerializationError)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (_err, request) => {
t.fail('Should not be called')
})
client.on(events.RESPONSE, (_err, request) => {
t.fail('Should not be called')
})
const body = {}
body.o = body
client.index({ index: 'test', body }, (err, result) => {
t.ok(err instanceof SerializationError)
t.strictEqual(order.length, 0)
})
})
test('Deserialization Error', t => {
t.plan(10)
class MockConnection extends Connection {
request (params, callback) {
const body = '{"hello":"wor'
const stream = intoStream(body)
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-length': body.length,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxRetries: 1
})
const order = [
events.SERIALIZATION,
events.REQUEST,
events.DESERIALIZATION,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.DESERIALIZATION)
})
client.on(events.RESPONSE, (err, request) => {
t.ok(err instanceof DeserializationError)
t.strictEqual(order.shift(), events.RESPONSE)
})
client.info((err, result) => {
t.ok(err instanceof DeserializationError)
t.strictEqual(order.length, 0)
})
})
test('Socket destroyed while reading the body', t => {
t.plan(14)
function handler (req, res) {
const body = JSON.stringify({ hello: 'world' })
res.setHeader('Content-Type', 'application/json;utf=8')
res.setHeader('Content-Length', body.length + '')
res.write(body.slice(0, -5))
setTimeout(() => {
res.socket.destroy()
}, 500)
}
buildServer(handler, ({ port }, server) => {
const client = new Client({ node: `http://localhost:${port}`, maxRetries: 1 })
const order = [
events.SERIALIZATION,
events.REQUEST,
events.DESERIALIZATION,
events.REQUEST,
events.DESERIALIZATION,
events.RESPONSE
]
client.on(events.SERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.SERIALIZATION)
})
client.on(events.REQUEST, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.REQUEST)
})
client.on(events.DESERIALIZATION, (err, request) => {
t.error(err)
t.strictEqual(order.shift(), events.DESERIALIZATION)
})
client.on(events.RESPONSE, (err, request) => {
t.ok(err instanceof ConnectionError)
t.strictEqual(order.shift(), events.RESPONSE)
})
client.info((err, result) => {
t.ok(err instanceof ConnectionError)
t.strictEqual(order.length, 0)
server.stop()
})
})
})

View File

@ -49,45 +49,4 @@ function to (promise) {
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
function isXPackTemplate (name) {
if (name.startsWith('.monitoring-')) {
return true
}
if (name.startsWith('.watch') || name.startsWith('.triggered_watches')) {
return true
}
if (name.startsWith('.data-frame-')) {
return true
}
if (name.startsWith('.ml-')) {
return true
}
if (name.startsWith('.transform-')) {
return true
}
switch (name) {
case '.watches':
case 'logstash-index-template':
case '.logstash-management':
case 'security_audit_log':
case '.slm-history':
case '.async-search':
case 'saml-service-provider':
case 'ilm-history':
case 'logs':
case 'logs-settings':
case 'logs-mappings':
case 'metrics':
case 'metrics-settings':
case 'metrics-mappings':
case 'synthetics':
case 'synthetics-settings':
case 'synthetics-mappings':
case '.snapshot-blob-cache':
case '.deprecation-indexing-template':
return true
}
return false
}
module.exports = { runInParallel, delve, to, sleep, isXPackTemplate }
module.exports = { runInParallel, delve, to, sleep }

View File

@ -43,10 +43,9 @@ const MAX_API_TIME = 1000 * 90
const MAX_FILE_TIME = 1000 * 30
const MAX_TEST_TIME = 1000 * 3
const freeSkips = {
const ossSkips = {
'cat.indices/10_basic.yml': ['Test cat indices output for closed index (pre 7.2.0)'],
'cluster.health/10_basic.yml': ['cluster health with closed index (pre 7.2.0)'],
// TODO: remove this once 'arbitrary_key' is implemented
// https://github.com/elastic/elasticsearch/pull/41492
'indices.split/30_copy_settings.yml': ['*'],
@ -57,18 +56,12 @@ const freeSkips = {
// which triggers a retry and the node to be marked as dead
'search.aggregation/240_max_buckets.yml': ['*']
}
const platinumBlackList = {
// this two test cases are broken, we should
// return on those in the future.
'analytics/top_metrics.yml': [
'sort by keyword field fails',
'sort by string script fails'
],
const xPackBlackList = {
// file path: test name
'cat.aliases/10_basic.yml': ['Empty cluster'],
'index/10_with_id.yml': ['Index with ID'],
'indices.get_alias/10_basic.yml': ['Get alias against closed indices'],
'indices.get_alias/20_empty.yml': ['Check empty aliases when getting all aliases via /_alias'],
'text_structure/find_structure.yml': ['*'],
// https://github.com/elastic/elasticsearch/pull/39400
'ml/jobs_crud.yml': ['Test put job with id that is already taken'],
// object keys must me strings, and `0.0.toString()` is `0`
@ -89,7 +82,6 @@ const platinumBlackList = {
'monitoring/bulk/20_privileges.yml': ['*'],
'license/20_put_license.yml': ['*'],
'snapshot/10_basic.yml': ['*'],
'snapshot/20_operator_privileges_disabled.yml': ['*'],
// the body is correct, but the regex is failing
'sql/sql.yml': ['Getting textual representation'],
// we are setting two certificates in the docker config
@ -163,9 +155,9 @@ async function start ({ client, isXPack }) {
log(`Checking out sha ${sha}...`)
await withSHA(sha)
log(`Testing ${isXPack ? 'Platinum' : 'Free'} api...`)
log(`Testing ${isXPack ? 'XPack' : 'oss'} api...`)
const junit = createJunitReporter()
const junitTestSuites = junit.testsuites(`Integration test for ${isXPack ? 'Platinum' : 'Free'} api`)
const junitTestSuites = junit.testsuites(`Integration test for ${isXPack ? 'XPack' : 'oss'} api`)
const stats = {
total: 0,
@ -256,7 +248,7 @@ async function start ({ client, isXPack }) {
junitTestCase.end()
junitTestSuite.end()
junitTestSuites.end()
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
generateJunitXmlReport(junit, isXPack ? 'xpack' : 'oss')
console.error(err)
process.exit(1)
}
@ -284,7 +276,7 @@ async function start ({ client, isXPack }) {
}
}
junitTestSuites.end()
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
generateJunitXmlReport(junit, isXPack ? 'xpack' : 'oss')
log(`Total testing time: ${ms(now() - totalTime)}`)
log(`Test stats:
- Total: ${stats.total}
@ -427,26 +419,26 @@ if (require.main === module) {
}
const shouldSkip = (isXPack, file, name) => {
var list = Object.keys(freeSkips)
var list = Object.keys(ossSkips)
for (var i = 0; i < list.length; i++) {
const freeTest = freeSkips[list[i]]
for (var j = 0; j < freeTest.length; j++) {
if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) {
const ossTest = ossSkips[list[i]]
for (var j = 0; j < ossTest.length; j++) {
if (file.endsWith(list[i]) && (name === ossTest[j] || ossTest[j] === '*')) {
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
log(`Skipping test ${testName} because is blacklisted in the free test`)
log(`Skipping test ${testName} because is blacklisted in the oss test`)
return true
}
}
}
if (file.includes('x-pack') || isXPack) {
list = Object.keys(platinumBlackList)
list = Object.keys(xPackBlackList)
for (i = 0; i < list.length; i++) {
const platTest = platinumBlackList[list[i]]
const platTest = xPackBlackList[list[i]]
for (j = 0; j < platTest.length; j++) {
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
log(`Skipping test ${testName} because is blacklisted in the platinum test`)
log(`Skipping test ${testName} because is blacklisted in the XPack test`)
return true
}
}

View File

@ -27,7 +27,7 @@ const helper = require('./helper')
const deepEqual = require('fast-deep-equal')
const { ConfigurationError } = require('../../lib/errors')
const { delve, to, isXPackTemplate, sleep } = helper
const { delve, to } = helper
const supportedFeatures = [
'gtelte',
@ -52,115 +52,135 @@ function build (opts = {}) {
* Runs a cleanup, removes all indices, aliases, templates, and snapshots
* @returns {Promise}
*/
async function cleanup (isXPack) {
async function cleanup () {
response = null
stash.clear()
if (isXPack) {
// wipe rollup jobs
const { body: jobsList } = await client.rollup.getJobs({ id: '_all' })
const jobsIds = jobsList.jobs.map(j => j.config.id)
try {
await client.indices.deleteAlias({
index: '_all',
name: '_all'
}, { ignore: [404] })
} catch (err) {
assert.ifError(err, 'should not error: indices.deleteAlias')
}
try {
await client.indices.delete({
index: '_all',
expand_wildcards: 'open,closed,hidden'
}, { ignore: [404] })
} catch (err) {
assert.ifError(err, 'should not error: indices.delete')
}
try {
await client.indices.deleteTemplate({ name: '*' })
} catch (err) {
assert.ifError(err, 'should not error: indices.deleteTemplate')
}
try {
const { body: repositories } = await client.snapshot.getRepository()
for (const repository of Object.keys(repositories)) {
await client.snapshot.delete({ repository, snapshot: '*' }, { ignore: [404] })
await client.snapshot.deleteRepository({ repository }, { ignore: [404] })
}
} catch (err) {
assert.ifError(err, 'should not error: snapshot.delete / snapshot.deleteRepository')
}
}
/**
* Runs some additional API calls to prepare ES for the xpack test,
* This set of calls should be executed before the final clenup.
* @returns {Promise}
*/
async function cleanupXPack () {
// tap.comment('XPack Cleanup')
try {
const { body } = await client.security.getRole()
const roles = Object.keys(body).filter(n => !body[n].metadata._reserved)
await helper.runInParallel(
client, 'security.deleteRole',
roles.map(r => ({ name: r }))
)
} catch (err) {
assert.ifError(err, 'should not error: security role cleanup')
}
try {
const { body } = await client.security.getUser()
const users = Object.keys(body).filter(n => !body[n].metadata._reserved)
await helper.runInParallel(
client, 'security.deleteUser',
users.map(r => ({ username: r }))
)
} catch (err) {
assert.ifError(err, 'should not error: security user cleanup')
}
try {
const { body } = await client.security.getPrivileges()
const privileges = []
Object.keys(body).forEach(app => {
Object.keys(body[app]).forEach(priv => {
privileges.push({
name: body[app][priv].name,
application: body[app][priv].application
})
})
})
await helper.runInParallel(client, 'security.deletePrivileges', privileges)
} catch (err) {
assert.ifError(err, 'should not error: security privileges cleanup')
}
try {
await client.ml.stopDatafeed({ datafeedId: '*', force: true })
const { body } = await client.ml.getDatafeeds({ datafeedId: '*' })
const feeds = body.datafeeds.map(f => f.datafeed_id)
await helper.runInParallel(
client, 'ml.deleteDatafeed',
feeds.map(f => ({ datafeedId: f }))
)
} catch (err) {
assert.ifError(err, 'should error: not ml datafeed cleanup')
}
try {
await client.ml.closeJob({ jobId: '*', force: true })
const { body } = await client.ml.getJobs({ jobId: '*' })
const jobs = body.jobs.map(j => j.job_id)
await helper.runInParallel(
client, 'ml.deleteJob',
jobs.map(j => ({ jobId: j, waitForCompletion: true, force: true }))
)
} catch (err) {
assert.ifError(err, 'should not error: ml job cleanup')
}
try {
const { body } = await client.rollup.getJobs({ id: '_all' })
const jobs = body.jobs.map(j => j.config.id)
await helper.runInParallel(
client, 'rollup.stopJob',
jobsIds.map(j => ({ id: j, waitForCompletion: true }))
jobs.map(j => ({ id: j, waitForCompletion: true }))
)
await helper.runInParallel(
client, 'rollup.deleteJob',
jobsIds.map(j => ({ id: j }))
jobs.map(j => ({ id: j }))
)
// delete slm policies
const { body: policies } = await client.slm.getLifecycle()
await helper.runInParallel(
client, 'slm.deleteLifecycle',
Object.keys(policies).map(p => ({ policy_id: p }))
)
// remove 'x_pack_rest_user', used in some xpack test
await client.security.deleteUser({ username: 'x_pack_rest_user' }, { ignore: [404] })
} catch (err) {
assert.ifError(err, 'should not error: rollup jobs cleanup')
}
// clean snapshots
const { body: repositories } = await client.snapshot.getRepository()
for (const repository of Object.keys(repositories)) {
await client.snapshot.delete({ repository, snapshot: '*' }, { ignore: [404] })
await client.snapshot.deleteRepository({ repository }, { ignore: [404] })
}
if (isXPack) {
// clean data streams
await client.indices.deleteDataStream({ name: '*' })
}
// clean all indices
await client.indices.delete({ index: '*,-.ds-ilm-history-*', expand_wildcards: 'open,closed,hidden' }, { ignore: [404] })
if (isXPack) {
// delete templates
const { body: templates } = await client.cat.templates({ h: 'name' })
for (const template of templates.split('\n').filter(Boolean)) {
if (isXPackTemplate(template)) continue
const { body } = await client.indices.deleteTemplate({ name: template }, { ignore: [404] })
if (JSON.stringify(body).includes(`index_template [${template}] missing`)) {
await client.indices.deleteIndexTemplate({ name: template }, { ignore: [404] })
}
}
// delete component template
const { body } = await client.cluster.getComponentTemplate()
const components = body.component_templates.filter(c => !isXPackTemplate(c.name)).map(c => c.name)
if (components.length > 0) {
await client.cluster.deleteComponentTemplate({ name: components.join(',') }, { ignore: [404] })
}
} else {
// clean all templates
await client.indices.deleteTemplate({ name: '*' })
// clean all templates
await client.indices.deleteIndexTemplate({ name: '*' })
// clean all templates
await client.cluster.deleteComponentTemplate({ name: '*' })
}
// Remove any cluster setting
const { body: settings } = await client.cluster.getSettings()
const newSettings = {}
for (const setting in settings) {
if (Object.keys(settings[setting]).length === 0) continue
newSettings[setting] = {}
for (const key in settings[setting]) {
newSettings[setting][`${key}.*`] = null
}
}
if (Object.keys(newSettings).length > 0) {
await client.cluster.putSettings({ body: newSettings })
}
if (isXPack) {
// delete ilm policies
const preserveIlmPolicies = [
'ilm-history-ilm-policy', 'slm-history-ilm-policy',
'watch-history-ilm-policy', 'ml-size-based-ilm-policy',
'logs', 'metrics'
]
const { body: policies } = await client.ilm.getLifecycle()
for (const policy in policies) {
if (preserveIlmPolicies.includes(policy)) continue
await client.ilm.deleteLifecycle({ policy })
}
// delete autofollow patterns
const { body: patterns } = await client.ccr.getAutoFollowPattern()
for (const { name } of patterns.patterns) {
await client.ccr.deleteAutoFollowPattern({ name })
}
// delete all tasks
const { body: nodesTask } = await client.tasks.list()
const tasks = Object.keys(nodesTask.nodes)
try {
const { body } = await client.tasks.list()
const tasks = Object.keys(body.nodes)
.reduce((acc, node) => {
const { tasks } = nodesTask.nodes[node]
const { tasks } = body.nodes[node]
Object.keys(tasks).forEach(id => {
if (tasks[id].cancellable) acc.push(id)
})
@ -171,14 +191,21 @@ function build (opts = {}) {
client, 'tasks.cancel',
tasks.map(id => ({ taskId: id }))
)
} catch (err) {
assert.ifError(err, 'should not error: tasks cleanup')
}
// wait for pending task before resolving the promise
await sleep(100)
while (true) {
const { body } = await client.cluster.pendingTasks()
if (body.tasks.length === 0) break
await sleep(500)
try {
await client.ilm.removePolicy({ index: '_all' })
} catch (err) {
assert.ifError(err, 'should not error: ilm.removePolicy')
}
// refresh the all indexes
try {
await client.indices.refresh({ index: '_all' })
} catch (err) {
assert.ifError(err, 'should not error: indices.refresh')
}
}
@ -226,7 +253,9 @@ function build (opts = {}) {
if (teardown) await exec('Teardown', teardown, stats, junit)
await cleanup(isXPack)
if (isXPack) await cleanupXPack()
await cleanup()
}
/**

View File

@ -21,13 +21,9 @@
const { test } = require('tap')
const { URL } = require('url')
const buffer = require('buffer')
const intoStream = require('into-stream')
const { Client, ConnectionPool, Transport, Connection, errors } = require('../../index')
const { Client, ConnectionPool, Transport, errors } = require('../../index')
const { CloudConnectionPool } = require('../../lib/pool')
const { buildServer } = require('../utils')
const clientVersion = require('../../package.json').version
const nodeVersion = process.versions.node
test('Configure host', t => {
t.test('Single string', t => {
@ -1247,117 +1243,3 @@ test('Socket destryed while reading the body', t => {
})
})
})
test('Content length too big (buffer)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-encoding': 'gzip',
'content-length': buffer.constants.MAX_LENGTH + 10,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({ node: 'http://localhost:9200', Connection: MockConnection })
client.info((err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.is(err.message, `The content length (${buffer.constants.MAX_LENGTH + 10}) is bigger than the maximum allowed buffer (${buffer.constants.MAX_LENGTH})`)
t.strictEqual(result.meta.attempts, 0)
})
})
test('Content length too big (string)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-length': buffer.constants.MAX_STRING_LENGTH + 10,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({ node: 'http://localhost:9200', Connection: MockConnection })
client.info((err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.is(err.message, `The content length (${buffer.constants.MAX_STRING_LENGTH + 10}) is bigger than the maximum allowed string (${buffer.constants.MAX_STRING_LENGTH})`)
t.strictEqual(result.meta.attempts, 0)
})
})
test('Meta header enabled', t => {
t.plan(2)
class MockConnection extends Connection {
request (params, callback) {
t.match(params.headers, { 'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}` })
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-length': '17',
connection: 'keep-alive',
date: new Date().toISOString()
}
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
client.info((err, result) => {
t.error(err)
})
})
test('Meta header disabled', t => {
t.plan(2)
class MockConnection extends Connection {
request (params, callback) {
t.notMatch(params.headers, { 'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}` })
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-length': '17',
connection: 'keep-alive',
date: new Date().toISOString()
}
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
enableMetaHeader: false
})
client.info((err, result) => {
t.error(err)
})
})

View File

@ -23,7 +23,6 @@ const { test } = require('tap')
const { inspect } = require('util')
const { URL } = require('url')
const { Agent } = require('http')
const { Readable } = require('stream')
const hpagent = require('hpagent')
const intoStream = require('into-stream')
const { buildServer } = require('../utils')
@ -919,31 +918,3 @@ test('Proxy agent (https)', t => {
t.true(connection.agent instanceof hpagent.HttpsProxyAgent)
})
test('Abort with a slow body', t => {
t.plan(1)
const connection = new Connection({
url: new URL('https://localhost:9200'),
proxy: 'http://localhost:8080'
})
const slowBody = new Readable({
read (size) {
setTimeout(() => {
this.push('{"size":1, "query":{"match_all":{}}}')
this.push(null) // EOF
}, 1000).unref()
}
})
const request = connection.request({
method: 'GET',
path: '/',
body: slowBody
}, (err, response) => {
t.ok(err instanceof RequestAbortedError)
})
setImmediate(() => request.abort())
})

View File

@ -23,12 +23,7 @@ const { test } = require('tap')
const semver = require('semver')
const { Client, events } = require('../../index')
const { TimeoutError } = require('../../lib/errors')
const {
connection: {
MockConnection,
MockConnectionTimeout
}
} = require('../utils')
const { connection: { MockConnection, MockConnectionTimeout } } = require('../utils')
test('Should emit a request event when a request is performed', t => {
t.plan(3)

View File

@ -27,8 +27,6 @@ const semver = require('semver')
const { test } = require('tap')
const { Client, errors } = require('../../../')
const { buildServer, connection } = require('../../utils')
const clientVersion = require('../../../package.json').version
const nodeVersion = process.versions.node
const dataset = [
{ user: 'jon', age: 23 },
@ -43,10 +41,7 @@ test('bulk index', t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.path, '/_bulk')
t.match(params.headers, {
'content-type': 'application/x-ndjson',
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=bp`
})
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
const [action, payload] = params.body.split('\n')
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
t.deepEqual(JSON.parse(payload), dataset[count++])
@ -89,9 +84,6 @@ test('bulk index', t => {
onRequest (params) {
t.strictEqual(params.path, '/_bulk')
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
t.notMatch(params.headers, {
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=bp`
})
const [action, payload] = params.body.split('\n')
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
t.deepEqual(JSON.parse(payload), dataset[count++])
@ -101,8 +93,7 @@ test('bulk index', t => {
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
enableMetaHeader: false
Connection: MockConnection
})
const result = await client.helpers.bulk({
datasource: dataset.slice(),

View File

@ -22,17 +22,11 @@
const { test } = require('tap')
const { Client, errors } = require('../../../')
const { connection } = require('../../utils')
const clientVersion = require('../../../package.json').version
const nodeVersion = process.versions.node
test('Scroll search', async t => {
var count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.match(params.headers, {
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=s`
})
count += 1
if (params.method === 'POST') {
t.strictEqual(params.querystring, 'scroll=1m')
@ -79,9 +73,6 @@ test('Clear a scroll search', async t => {
var count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.notMatch(params.headers, {
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=s`
})
if (params.method === 'DELETE') {
const body = JSON.parse(params.body)
t.strictEqual(body.scroll_id, 'id')
@ -104,8 +95,7 @@ test('Clear a scroll search', async t => {
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
enableMetaHeader: false
Connection: MockConnection
})
const scrollSearch = client.helpers.scrollSearch({