Compare commits

...

20 Commits

Author SHA1 Message Date
7f0857daf2 Bumped v7.8.0 2020-06-18 16:32:53 +02:00
76b1bc6452 API generation 2020-06-18 15:33:46 +02:00
da98f735a9 API generation 2020-06-17 08:33:01 +02:00
9e9f2a7ff2 Fixed code snippet 2020-06-15 09:27:12 +02:00
08898ac4d5 Reorganized test and force 100% code coverage (#1226) 2020-06-15 08:39:36 +02:00
c08a0fa6ce [Backport 7.x] Helpers: avoid allocating new timers (#1225)
Co-authored-by: Tomas Della Vedova <delvedor@users.noreply.github.com>
2020-06-12 10:45:14 +02:00
71f92e3ace improved test coverage (#1222) 2020-06-08 09:44:49 +02:00
375b322571 migrated to FakeTimers (#1221) 2020-06-08 09:44:49 +02:00
847637d784 [Backport 7.x] Documentation fixes (#1218)
Co-authored-by: Tomas Della Vedova <delvedor@users.noreply.github.com>
2020-06-04 13:39:47 +02:00
0087c49987 [Backport 7.x] Improve helper concurrency (#1216)
Co-authored-by: Tomas Della Vedova <delvedor@users.noreply.github.com>
2020-06-04 13:39:47 +02:00
496f93e501 Bumped v7.8.0-rc.1 2020-06-03 10:45:44 +02:00
3b2a60afac [Backport 7.x] Fixed types code generation (#1213)
Co-authored-by: Tomas Della Vedova <delvedor@users.noreply.github.com>
2020-06-03 10:43:00 +02:00
e32cb8b997 [Backport 7.x] Added timeout support in bulk and msearch helpers (#1211)
Co-authored-by: Tomas Della Vedova <delvedor@users.noreply.github.com>
2020-06-03 10:43:00 +02:00
bf285e2b43 [Backport 7.x] Add multi search helper (#1202)
Co-authored-by: Tomas Della Vedova <delvedor@users.noreply.github.com>
2020-05-25 17:20:44 +02:00
bc820ca89e [Backport 7.x] Use filter_path for improving the search helpers performances (#1201) 2020-05-25 17:20:44 +02:00
5156d55b17 API generation 2020-05-25 09:24:27 +02:00
32b0a66236 [DOCS] Fixes Indices component template link in reference docs (#1197) 2020-05-15 10:40:01 +02:00
d6a3a7072a Updated skip list 2020-05-15 10:16:08 +02:00
137dd06bce API generation 2020-05-15 09:57:57 +02:00
4e78f2afb6 Updated CI configuration 2020-05-15 09:45:54 +02:00
58 changed files with 3827 additions and 141 deletions

View File

@ -1,6 +1,6 @@
---
STACK_VERSION:
- 7.7.0-SNAPSHOT
- 7.8-SNAPSHOT
NODE_JS_VERSION:
- 14

View File

@ -32,9 +32,9 @@ jobs:
run: |
npm run test:unit
- name: Behavior test
- name: Acceptance test
run: |
npm run test:behavior
npm run test:acceptance
- name: Type Definitions
run: |
@ -86,7 +86,7 @@ jobs:
- name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master
with:
stack-version: 7.7.0-SNAPSHOT
stack-version: 7.8-SNAPSHOT
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
@ -121,9 +121,9 @@ jobs:
run: |
npm install
- name: Code coverage
- name: Code coverage report
run: |
npm run test:coverage
npm run test:coverage-report
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
@ -131,6 +131,10 @@ jobs:
file: ./coverage.lcov
fail_ci_if_error: true
- name: Code coverage 100%
run: |
npm run test:coverage-100
license:
name: License check
runs-on: ubuntu-latest

View File

@ -0,0 +1,78 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildAutoscalingDeleteAutoscalingPolicy (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
]
const snakeCase = {
}
/**
* Perform a autoscaling.delete_autoscaling_policy request
* Deletes an autoscaling policy.
* https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-delete-autoscaling-policy.html
*/
return function autoscalingDeleteAutoscalingPolicy (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'DELETE'
path = '/' + '_autoscaling' + '/' + 'policy' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildAutoscalingDeleteAutoscalingPolicy

View File

@ -0,0 +1,78 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildAutoscalingGetAutoscalingPolicy (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
]
const snakeCase = {
}
/**
* Perform a autoscaling.get_autoscaling_policy request
* Retrieves an autoscaling policy.
* https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-get-autoscaling-policy.html
*/
return function autoscalingGetAutoscalingPolicy (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'GET'
path = '/' + '_autoscaling' + '/' + 'policy' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildAutoscalingGetAutoscalingPolicy

View File

@ -0,0 +1,82 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildAutoscalingPutAutoscalingPolicy (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
]
const snakeCase = {
}
/**
* Perform a autoscaling.put_autoscaling_policy request
* Creates a new autoscaling policy.
* https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-put-autoscaling-policy.html
*/
return function autoscalingPutAutoscalingPolicy (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
if (params['body'] == null) {
const err = new ConfigurationError('Missing required parameter: body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'PUT'
path = '/' + '_autoscaling' + '/' + 'policy' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildAutoscalingPutAutoscalingPolicy

View File

@ -30,7 +30,7 @@ function buildClusterDeleteComponentTemplate (opts) {
/**
* Perform a cluster.delete_component_template request
* Deletes a component template
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-templates.html
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
*/
return function clusterDeleteComponentTemplate (params, options, callback) {
options = options || {}

View File

@ -0,0 +1,79 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildClusterDeleteVotingConfigExclusions (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'wait_for_removal',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
waitForRemoval: 'wait_for_removal',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a cluster.delete_voting_config_exclusions request
* Clears cluster voting config exclusions.
* https://www.elastic.co/guide/en/elasticsearch/reference/master/voting-config-exclusions.html
*/
return function clusterDeleteVotingConfigExclusions (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'DELETE'
path = '/' + '_cluster' + '/' + 'voting_config_exclusions'
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildClusterDeleteVotingConfigExclusions

View File

@ -0,0 +1,86 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildClusterExistsComponentTemplate (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'master_timeout',
'local',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
masterTimeout: 'master_timeout',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a cluster.exists_component_template request
* Returns information about whether a particular component template exist
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
*/
return function clusterExistsComponentTemplate (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'HEAD'
path = '/' + '_component_template' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildClusterExistsComponentTemplate

View File

@ -30,7 +30,7 @@ function buildClusterGetComponentTemplate (opts) {
/**
* Perform a cluster.get_component_template request
* Returns one or more component templates
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-templates.html
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
*/
return function clusterGetComponentTemplate (params, options, callback) {
options = options || {}

View File

@ -0,0 +1,82 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildClusterPostVotingConfigExclusions (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'node_ids',
'node_names',
'timeout',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
nodeIds: 'node_ids',
nodeNames: 'node_names',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a cluster.post_voting_config_exclusions request
* Updates the cluster voting config exclusions by node ids or node names.
* https://www.elastic.co/guide/en/elasticsearch/reference/master/voting-config-exclusions.html
*/
return function clusterPostVotingConfigExclusions (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'POST'
path = '/' + '_cluster' + '/' + 'voting_config_exclusions'
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildClusterPostVotingConfigExclusions

View File

@ -31,7 +31,7 @@ function buildClusterPutComponentTemplate (opts) {
/**
* Perform a cluster.put_component_template request
* Creates or updates a component template
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-templates.html
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
*/
return function clusterPutComponentTemplate (params, options, callback) {
options = options || {}

View File

@ -22,7 +22,7 @@ function buildEqlSearch (opts) {
/**
* Perform a eql.search request
* Returns results matching a query expressed in Event Query Language (EQL)
* https://www.elastic.co/guide/en/elasticsearch/reference/current/eql.html
* https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html
*/
return function eqlSearch (params, options, callback) {
options = options || {}

View File

@ -0,0 +1,86 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildIndicesDeleteIndexTemplate (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'timeout',
'master_timeout',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
masterTimeout: 'master_timeout',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a indices.delete_index_template request
* Deletes an index template.
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
*/
return function indicesDeleteIndexTemplate (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'DELETE'
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildIndicesDeleteIndexTemplate

View File

@ -0,0 +1,88 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildIndicesExistsIndexTemplate (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'flat_settings',
'master_timeout',
'local',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
flatSettings: 'flat_settings',
masterTimeout: 'master_timeout',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a indices.exists_index_template request
* Returns information about whether a particular index template exists.
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
*/
return function indicesExistsIndexTemplate (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'HEAD'
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildIndicesExistsIndexTemplate

View File

@ -0,0 +1,87 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildIndicesGetIndexTemplate (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'flat_settings',
'master_timeout',
'local',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
flatSettings: 'flat_settings',
masterTimeout: 'master_timeout',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a indices.get_index_template request
* Returns an index template.
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
*/
return function indicesGetIndexTemplate (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if ((name) != null) {
if (method == null) method = 'GET'
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
} else {
if (method == null) method = 'GET'
path = '/' + '_index_template'
}
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildIndicesGetIndexTemplate

View File

@ -0,0 +1,91 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildIndicesPutIndexTemplate (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'create',
'cause',
'master_timeout',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
masterTimeout: 'master_timeout',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a indices.put_index_template request
* Creates or updates an index template.
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
*/
return function indicesPutIndexTemplate (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
if (params['body'] == null) {
const err = new ConfigurationError('Missing required parameter: body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'PUT'
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildIndicesPutIndexTemplate

View File

@ -0,0 +1,87 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildIndicesSimulateIndexTemplate (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'create',
'cause',
'master_timeout',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
masterTimeout: 'master_timeout',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
/**
* Perform a indices.simulate_index_template request
* Simulate matching the given index name against the index templates in the system
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
*/
return function indicesSimulateIndexTemplate (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['name'] == null) {
const err = new ConfigurationError('Missing required parameter: name')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, name, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'POST'
path = '/' + '_index_template' + '/' + '_simulate_index' + '/' + encodeURIComponent(name)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildIndicesSimulateIndexTemplate

View File

@ -12,7 +12,8 @@ function buildMlDeleteDataFrameAnalytics (opts) {
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'force'
'force',
'timeout'
]
const snakeCase = {

View File

@ -22,6 +22,7 @@ function buildMlValidate (opts) {
/**
* Perform a ml.validate request
* Validates an anomaly detection job.
* https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html
*/
return function mlValidate (params, options, callback) {
options = options || {}

View File

@ -22,6 +22,7 @@ function buildMlValidateDetector (opts) {
/**
* Perform a ml.validate_detector request
* Validates an anomaly detection detector.
* https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html
*/
return function mlValidateDetector (params, options, callback) {
options = options || {}

View File

@ -0,0 +1,83 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildSearchableSnapshotsClearCache (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'ignore_unavailable',
'allow_no_indices',
'expand_wildcards',
'index'
]
const snakeCase = {
ignoreUnavailable: 'ignore_unavailable',
allowNoIndices: 'allow_no_indices',
expandWildcards: 'expand_wildcards'
}
/**
* Perform a searchable_snapshots.clear_cache request
* Clear the cache of searchable snapshots.
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-api-clear-cache.html
*/
return function searchableSnapshotsClearCache (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, index, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if ((index) != null) {
if (method == null) method = 'POST'
path = '/' + encodeURIComponent(index) + '/' + '_searchable_snapshots' + '/' + 'cache' + '/' + 'clear'
} else {
if (method == null) method = 'POST'
path = '/' + '_searchable_snapshots' + '/' + 'cache' + '/' + 'clear'
}
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildSearchableSnapshotsClearCache

View File

@ -0,0 +1,94 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildSearchableSnapshotsMount (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
'master_timeout',
'wait_for_completion'
]
const snakeCase = {
masterTimeout: 'master_timeout',
waitForCompletion: 'wait_for_completion'
}
/**
* Perform a searchable_snapshots.mount request
* Mount a snapshot as a searchable index.
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-api-mount-snapshot.html
*/
return function searchableSnapshotsMount (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['repository'] == null) {
const err = new ConfigurationError('Missing required parameter: repository')
return handleError(err, callback)
}
if (params['snapshot'] == null) {
const err = new ConfigurationError('Missing required parameter: snapshot')
return handleError(err, callback)
}
if (params['body'] == null) {
const err = new ConfigurationError('Missing required parameter: body')
return handleError(err, callback)
}
// check required url components
if (params['snapshot'] != null && (params['repository'] == null)) {
const err = new ConfigurationError('Missing required parameter of the url: repository')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, repository, snapshot, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'POST'
path = '/' + '_snapshot' + '/' + encodeURIComponent(repository) + '/' + encodeURIComponent(snapshot) + '/' + '_mount'
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildSearchableSnapshotsMount

View File

@ -0,0 +1,78 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildSearchableSnapshotsRepositoryStats (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
]
const snakeCase = {
}
/**
* Perform a searchable_snapshots.repository_stats request
* Retrieve usage statistics about a snapshot repository.
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-repository-stats.html
*/
return function searchableSnapshotsRepositoryStats (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['repository'] == null) {
const err = new ConfigurationError('Missing required parameter: repository')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, repository, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if (method == null) method = 'GET'
path = '/' + '_snapshot' + '/' + encodeURIComponent(repository) + '/' + '_stats'
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildSearchableSnapshotsRepositoryStats

View File

@ -0,0 +1,77 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildSearchableSnapshotsStats (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
const acceptedQuerystring = [
]
const snakeCase = {
}
/**
* Perform a searchable_snapshots.stats request
* Retrieve various statistics about searchable snapshots.
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-api-stats.html
*/
return function searchableSnapshotsStats (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, index, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if ((index) != null) {
if (method == null) method = 'GET'
path = '/' + encodeURIComponent(index) + '/' + '_searchable_snapshots' + '/' + 'stats'
} else {
if (method == null) method = 'GET'
path = '/' + '_searchable_snapshots' + '/' + 'stats'
}
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildSearchableSnapshotsStats

View File

@ -30,7 +30,7 @@ function buildSnapshotCleanupRepository (opts) {
/**
* Perform a snapshot.cleanup_repository request
* Removes stale data from repository.
* https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html
* https://www.elastic.co/guide/en/elasticsearch/reference/master/clean-up-snapshot-repo-api.html
*/
return function snapshotCleanupRepository (params, options, callback) {
options = options || {}

View File

@ -15,6 +15,7 @@ function buildTasksCancel (opts) {
'nodes',
'actions',
'parent_task_id',
'wait_for_completion',
'pretty',
'human',
'error_trace',
@ -24,6 +25,7 @@ function buildTasksCancel (opts) {
const snakeCase = {
parentTaskId: 'parent_task_id',
waitForCompletion: 'wait_for_completion',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}

View File

@ -27,8 +27,14 @@ function ESAPI (opts) {
submit: lazyLoad('async_search.submit', opts)
},
autoscaling: {
delete_autoscaling_policy: lazyLoad('autoscaling.delete_autoscaling_policy', opts),
deleteAutoscalingPolicy: lazyLoad('autoscaling.delete_autoscaling_policy', opts),
get_autoscaling_decision: lazyLoad('autoscaling.get_autoscaling_decision', opts),
getAutoscalingDecision: lazyLoad('autoscaling.get_autoscaling_decision', opts)
getAutoscalingDecision: lazyLoad('autoscaling.get_autoscaling_decision', opts),
get_autoscaling_policy: lazyLoad('autoscaling.get_autoscaling_policy', opts),
getAutoscalingPolicy: lazyLoad('autoscaling.get_autoscaling_policy', opts),
put_autoscaling_policy: lazyLoad('autoscaling.put_autoscaling_policy', opts),
putAutoscalingPolicy: lazyLoad('autoscaling.put_autoscaling_policy', opts)
},
bulk: lazyLoad('bulk', opts),
cat: {
@ -96,6 +102,10 @@ function ESAPI (opts) {
allocationExplain: lazyLoad('cluster.allocation_explain', opts),
delete_component_template: lazyLoad('cluster.delete_component_template', opts),
deleteComponentTemplate: lazyLoad('cluster.delete_component_template', opts),
delete_voting_config_exclusions: lazyLoad('cluster.delete_voting_config_exclusions', opts),
deleteVotingConfigExclusions: lazyLoad('cluster.delete_voting_config_exclusions', opts),
exists_component_template: lazyLoad('cluster.exists_component_template', opts),
existsComponentTemplate: lazyLoad('cluster.exists_component_template', opts),
get_component_template: lazyLoad('cluster.get_component_template', opts),
getComponentTemplate: lazyLoad('cluster.get_component_template', opts),
get_settings: lazyLoad('cluster.get_settings', opts),
@ -103,6 +113,8 @@ function ESAPI (opts) {
health: lazyLoad('cluster.health', opts),
pending_tasks: lazyLoad('cluster.pending_tasks', opts),
pendingTasks: lazyLoad('cluster.pending_tasks', opts),
post_voting_config_exclusions: lazyLoad('cluster.post_voting_config_exclusions', opts),
postVotingConfigExclusions: lazyLoad('cluster.post_voting_config_exclusions', opts),
put_component_template: lazyLoad('cluster.put_component_template', opts),
putComponentTemplate: lazyLoad('cluster.put_component_template', opts),
put_settings: lazyLoad('cluster.put_settings', opts),
@ -188,11 +200,15 @@ function ESAPI (opts) {
deleteAlias: lazyLoad('indices.delete_alias', opts),
delete_data_stream: lazyLoad('indices.delete_data_stream', opts),
deleteDataStream: lazyLoad('indices.delete_data_stream', opts),
delete_index_template: lazyLoad('indices.delete_index_template', opts),
deleteIndexTemplate: lazyLoad('indices.delete_index_template', opts),
delete_template: lazyLoad('indices.delete_template', opts),
deleteTemplate: lazyLoad('indices.delete_template', opts),
exists: lazyLoad('indices.exists', opts),
exists_alias: lazyLoad('indices.exists_alias', opts),
existsAlias: lazyLoad('indices.exists_alias', opts),
exists_index_template: lazyLoad('indices.exists_index_template', opts),
existsIndexTemplate: lazyLoad('indices.exists_index_template', opts),
exists_template: lazyLoad('indices.exists_template', opts),
existsTemplate: lazyLoad('indices.exists_template', opts),
exists_type: lazyLoad('indices.exists_type', opts),
@ -209,6 +225,8 @@ function ESAPI (opts) {
getDataStreams: lazyLoad('indices.get_data_streams', opts),
get_field_mapping: lazyLoad('indices.get_field_mapping', opts),
getFieldMapping: lazyLoad('indices.get_field_mapping', opts),
get_index_template: lazyLoad('indices.get_index_template', opts),
getIndexTemplate: lazyLoad('indices.get_index_template', opts),
get_mapping: lazyLoad('indices.get_mapping', opts),
getMapping: lazyLoad('indices.get_mapping', opts),
get_settings: lazyLoad('indices.get_settings', opts),
@ -220,6 +238,8 @@ function ESAPI (opts) {
open: lazyLoad('indices.open', opts),
put_alias: lazyLoad('indices.put_alias', opts),
putAlias: lazyLoad('indices.put_alias', opts),
put_index_template: lazyLoad('indices.put_index_template', opts),
putIndexTemplate: lazyLoad('indices.put_index_template', opts),
put_mapping: lazyLoad('indices.put_mapping', opts),
putMapping: lazyLoad('indices.put_mapping', opts),
put_settings: lazyLoad('indices.put_settings', opts),
@ -235,6 +255,8 @@ function ESAPI (opts) {
shard_stores: lazyLoad('indices.shard_stores', opts),
shardStores: lazyLoad('indices.shard_stores', opts),
shrink: lazyLoad('indices.shrink', opts),
simulate_index_template: lazyLoad('indices.simulate_index_template', opts),
simulateIndexTemplate: lazyLoad('indices.simulate_index_template', opts),
split: lazyLoad('indices.split', opts),
stats: lazyLoad('indices.stats', opts),
unfreeze: lazyLoad('indices.unfreeze', opts),
@ -442,6 +464,22 @@ function ESAPI (opts) {
searchShards: lazyLoad('search_shards', opts),
search_template: lazyLoad('search_template', opts),
searchTemplate: lazyLoad('search_template', opts),
searchable_snapshots: {
clear_cache: lazyLoad('searchable_snapshots.clear_cache', opts),
clearCache: lazyLoad('searchable_snapshots.clear_cache', opts),
mount: lazyLoad('searchable_snapshots.mount', opts),
repository_stats: lazyLoad('searchable_snapshots.repository_stats', opts),
repositoryStats: lazyLoad('searchable_snapshots.repository_stats', opts),
stats: lazyLoad('searchable_snapshots.stats', opts)
},
searchableSnapshots: {
clear_cache: lazyLoad('searchable_snapshots.clear_cache', opts),
clearCache: lazyLoad('searchable_snapshots.clear_cache', opts),
mount: lazyLoad('searchable_snapshots.mount', opts),
repository_stats: lazyLoad('searchable_snapshots.repository_stats', opts),
repositoryStats: lazyLoad('searchable_snapshots.repository_stats', opts),
stats: lazyLoad('searchable_snapshots.stats', opts)
},
security: {
authenticate: lazyLoad('security.authenticate', opts),
change_password: lazyLoad('security.change_password', opts),

View File

@ -269,6 +269,16 @@ export interface ClusterDeleteComponentTemplate extends Generic {
master_timeout?: string;
}
export interface ClusterDeleteVotingConfigExclusions extends Generic {
wait_for_removal?: boolean;
}
export interface ClusterExistsComponentTemplate extends Generic {
name: string;
master_timeout?: string;
local?: boolean;
}
export interface ClusterGetComponentTemplate extends Generic {
name?: string | string[];
master_timeout?: string;
@ -302,6 +312,12 @@ export interface ClusterPendingTasks extends Generic {
master_timeout?: string;
}
export interface ClusterPostVotingConfigExclusions extends Generic {
node_ids?: string;
node_names?: string;
timeout?: string;
}
export interface ClusterPutComponentTemplate<T = RequestBody> extends Generic {
name: string;
create?: boolean;
@ -646,6 +662,12 @@ export interface IndicesDeleteDataStream extends Generic {
name: string;
}
export interface IndicesDeleteIndexTemplate extends Generic {
name: string;
timeout?: string;
master_timeout?: string;
}
export interface IndicesDeleteTemplate extends Generic {
name: string;
timeout?: string;
@ -671,6 +693,13 @@ export interface IndicesExistsAlias extends Generic {
local?: boolean;
}
export interface IndicesExistsIndexTemplate extends Generic {
name: string;
flat_settings?: boolean;
master_timeout?: string;
local?: boolean;
}
export interface IndicesExistsTemplate extends Generic {
name: string | string[];
flat_settings?: boolean;
@ -735,7 +764,7 @@ export interface IndicesGetAlias extends Generic {
}
export interface IndicesGetDataStreams extends Generic {
name?: string | string[];
name?: string;
}
export interface IndicesGetFieldMapping extends Generic {
@ -750,6 +779,13 @@ export interface IndicesGetFieldMapping extends Generic {
local?: boolean;
}
export interface IndicesGetIndexTemplate extends Generic {
name?: string | string[];
flat_settings?: boolean;
master_timeout?: string;
local?: boolean;
}
export interface IndicesGetMapping extends Generic {
index?: string | string[];
type?: string | string[];
@ -806,6 +842,14 @@ export interface IndicesPutAlias<T = RequestBody> extends Generic {
body?: T;
}
export interface IndicesPutIndexTemplate<T = RequestBody> extends Generic {
name: string;
create?: boolean;
cause?: string;
master_timeout?: string;
body: T;
}
export interface IndicesPutMapping<T = RequestBody> extends Generic {
index?: string | string[];
type?: string;
@ -889,6 +933,14 @@ export interface IndicesShrink<T = RequestBody> extends Generic {
body?: T;
}
export interface IndicesSimulateIndexTemplate<T = RequestBody> extends Generic {
name: string;
create?: boolean;
cause?: string;
master_timeout?: string;
body?: T;
}
export interface IndicesSplit<T = RequestBody> extends Generic {
index: string;
target: string;
@ -1284,6 +1336,7 @@ export interface TasksCancel extends Generic {
nodes?: string | string[];
actions?: string | string[];
parent_task_id?: string;
wait_for_completion?: boolean;
}
export interface TasksGet extends Generic {
@ -1447,9 +1500,22 @@ export interface AsyncSearchSubmit<T = RequestBody> extends Generic {
body?: T;
}
export interface AutoscalingDeleteAutoscalingPolicy extends Generic {
name: string;
}
export interface AutoscalingGetAutoscalingDecision extends Generic {
}
export interface AutoscalingGetAutoscalingPolicy extends Generic {
name: string;
}
export interface AutoscalingPutAutoscalingPolicy<T = RequestBody> extends Generic {
name: string;
body: T;
}
export interface CatMlDataFrameAnalytics extends Generic {
id?: string;
allow_no_match?: boolean;
@ -1727,6 +1793,7 @@ export interface MlDeleteCalendarJob extends Generic {
export interface MlDeleteDataFrameAnalytics extends Generic {
id: string;
force?: boolean;
timeout?: string;
}
export interface MlDeleteDatafeed extends Generic {
@ -1734,7 +1801,8 @@ export interface MlDeleteDatafeed extends Generic {
force?: boolean;
}
export interface MlDeleteExpiredData extends Generic {
export interface MlDeleteExpiredData<T = RequestBody> extends Generic {
body?: T;
}
export interface MlDeleteFilter extends Generic {
@ -2139,6 +2207,29 @@ export interface RollupStopJob extends Generic {
timeout?: string;
}
export interface SearchableSnapshotsClearCache extends Generic {
index?: string | string[];
ignore_unavailable?: boolean;
allow_no_indices?: boolean;
expand_wildcards?: 'open' | 'closed' | 'none' | 'all';
}
export interface SearchableSnapshotsMount<T = RequestBody> extends Generic {
repository: string;
snapshot: string;
master_timeout?: string;
wait_for_completion?: boolean;
body: T;
}
export interface SearchableSnapshotsRepositoryStats extends Generic {
repository: string;
}
export interface SearchableSnapshotsStats extends Generic {
index?: string | string[];
}
export interface SecurityAuthenticate extends Generic {
}

View File

@ -106,27 +106,22 @@ const client = new Client({
=== SSL configuration
Without any additional configuration you can specify `https://` node urls, but
the certificates used to sign these requests will not verified
(`rejectUnauthorized: false`). To turn on certificate verification, you must
specify an `ssl` object either in the top level config or in each host config
object and set `rejectUnauthorized: true`. The ssl config object can contain
many of the same configuration options that
https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[tls.connect()]
accepts.
Without any additional configuration you can specify `https://` node urls, and
the certificates used to sign these requests will be verified. To turn off certificate verification, you must specify an `ssl` object in the top level config and set `rejectUnauthorized: false`. The default `ssl` values are the same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
uses.
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
node: 'http://localhost:9200',
node: 'https://localhost:9200',
auth: {
username: 'elastic',
password: 'changeme'
},
ssl: {
ca: fs.readFileSync('./cacert.pem'),
rejectUnauthorized: true
rejectUnauthorized: false
}
})
----

View File

@ -95,6 +95,16 @@ error, such as `statusCode`, `headers`, `body`, and `message`.
version, you can specify the host and port in a variety of ways. With the new
client, there is only one way to do it, via the `node` parameter.
* Certificates are verified by default, if you want to disable certificates verification, you should set the `rejectUnauthorized` option to `false` inside the `ssl` configuration:
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
ssl: { rejectUnauthorized: false }
})
----
* The `plugins` option has been removed. If you want to extend the client now,
you should use the `client.extend` API.

View File

@ -76,11 +76,11 @@ auth: {
_Default:_ `3`
|`requestTimeout`
|`number` - Max request timeout for each request. +
|`number` - Max request timeout in milliseconds for each request. +
_Default:_ `30000`
|`pingTimeout`
|`number` - Max ping request timeout for each request. +
|`number` - Max ping request timeout in milliseconds for each request. +
_Default:_ `3000`
|`sniffInterval`

View File

@ -7,6 +7,9 @@ CAUTION: The client helpers are experimental, and the API may change in the next
The helpers will not work in any Node.js version lower than 10.
=== Bulk Helper
~Added~ ~in~ ~`v7.7.0`~
Running Bulk requests can be complex due to the shape of the API, this helper aims to provide a nicer developer experience around the Bulk API.
==== Usage
@ -93,6 +96,16 @@ const b = client.helpers.bulk({
})
----
|`flushInterval`
a|How much time (in milliseconds) the helper will wait before flushing the body from the last document read. +
_Default:_ `30000`
[source,js]
----
const b = client.helpers.bulk({
flushInterval: 30000
})
----
|`concurrency`
a|How many request will be executed at the same time. +
_Default:_ `5`
@ -114,7 +127,7 @@ const b = client.helpers.bulk({
----
|`wait`
a|How much time to wait before retries in milliseconds.+
a|How much time to wait before retries in milliseconds. +
_Default:_ 5000.
[source,js]
----
@ -137,6 +150,64 @@ const b = client.helpers.bulk({
|===
==== Supported operations
===== Index
[source,js]
----
client.helpers.bulk({
datasource: myDatasource,
onDocument (doc) {
return {
index: { _index: 'my-index' }
}
}
})
----
===== Create
[source,js]
----
client.helpers.bulk({
datasource: myDatasource,
onDocument (doc) {
return {
create: { _index: 'my-index', _id: doc.id }
}
}
})
----
===== Update
[source,js]
----
client.helpers.bulk({
datasource: myDatasource,
onDocument (doc) {
// Note that the update operation requires you to return
// an array, where the first element is the actio, while
// the second are the document option
return [
{ update: { _index: 'my-index', _id: doc.id } },
{ doc_as_upsert: true }
]
}
})
----
===== Delete
[source,js]
----
client.helpers.bulk({
datasource: myDatasource,
onDocument (doc) {
return {
delete: { _index: 'my-index', _id: doc.id }
}
}
})
----
==== Abort a bulk operation
If needed, you can abort a bulk operation at any time. The bulk helper returns a https://promisesaplus.com/[thenable], which has an `abort` method.
@ -211,8 +282,135 @@ const result = await client.helpers.bulk({
console.log(result)
----
=== Multi Search Helper
~Added~ ~in~ ~`v7.8.0`~
If you are sending search request at a high rate, this helper might be useful for you.
It will use the mutli search API under the hood to batch the requests and improve the overall performances of your application. +
The `result` exposes a `documents` property as well, which allows you to access directly the hits sources.
==== Usage
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({ node: 'http://localhost:9200' })
const m = client.helpers.msearch()
// promise style API
m.search(
{ index: 'stackoverflow' },
{ query: { match: { title: 'javascript' } } }
)
.then(result => console.log(result.body)) // or result.documents
.catch(err => console.error(err))
// callback style API
m.search(
{ index: 'stackoverflow' },
{ query: { match: { title: 'ruby' } } },
(err, result) => {
if (err) console.error(err)
console.log(result.body)) // or result.documents
}
)
----
To create a new instance of the Msearch helper, you should access it as shown in the example above, the configuration options are:
[cols=2*]
|===
|`operations`
a|How many search operations should be sent in a single msearch request. +
_Default:_ `5`
[source,js]
----
const m = client.helpers.msearch({
operations: 10
})
----
|`flushInterval`
a|How much time (in milliseconds) the helper will wait before flushing the operations from the last operation read. +
_Default:_ `500`
[source,js]
----
const m = client.helpers.msearch({
flushInterval: 500
})
----
|`concurrency`
a|How many request will be executed at the same time. +
_Default:_ `5`
[source,js]
----
const m = client.helpers.msearch({
concurrency: 10
})
----
|`retries`
a|How many times an operation will be retried before to resolve the request. An operation will be retried only in case of a 429 error. +
_Default:_ Client max retries.
[source,js]
----
const m = client.helpers.msearch({
retries: 3
})
----
|`wait`
a|How much time to wait before retries in milliseconds. +
_Default:_ 5000.
[source,js]
----
const m = client.helpers.msearch({
wait: 3000
})
----
|===
==== Stopping the Msearch Helper
If needed, you can stop a msearch processor at any time. The msearch helper returns a https://promisesaplus.com/[thenable], which has an `stop` method.
If you are creating multiple msearch helpers instances and using them for a limitied period of time, remember to always use the `stop` method once you have finished using them, otherwise your application will start leaking memory.
The `stop` method accepts an optional error, that will be dispatched every subsequent search request.
NOTE: The stop method will stop the execution of the msearch processor, but if you are using a concurrency higher than one, the operations that are already running will not be stopped.
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({ node: 'http://localhost:9200' })
const m = client.helpers.msearch()
m.search(
{ index: 'stackoverflow' },
{ query: { match: { title: 'javascript' } } }
)
.then(result => console.log(result.body))
.catch(err => console.error(err))
m.search(
{ index: 'stackoverflow' },
{ query: { match: { title: 'ruby' } } }
)
.then(result => console.log(result.body))
.catch(err => console.error(err))
setImmediate(() => m.stop())
----
=== Search Helper
A simple wrapper around the search API. Instead of returning the entire `result` object it will return only the search documents result.
~Added~ ~in~ ~`v7.7.0`~
A simple wrapper around the search API. Instead of returning the entire `result` object it will return only the search documents source.
For improving the performances, this helper automatically adds `filter_path=hits.hits._source` to the querystring.
[source,js]
----
@ -233,12 +431,15 @@ for (const doc of documents) {
----
=== Scroll Search Helper
~Added~ ~in~ ~`v7.7.0`~
This helpers offers a simple and intuitive way to use the scroll search API. Once called, it returns an https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of[async iterator] which can be used in conjuction with a for-await...of. +
It handles automatically the `429` error and uses the client's `maxRetries` option.
[source,js]
----
const scrollSearch = await client.helpers.scrollSearch({
const scrollSearch = client.helpers.scrollSearch({
index: 'stackoverflow',
body: {
query: {
@ -280,11 +481,14 @@ for await (const result of scrollSearch) {
=== Scroll Documents Helper
~Added~ ~in~ ~`v7.7.0`~
It works in the same way as the scroll search helper, but it returns only the documents instead. Note, every loop cycle will return you a single document, and you can't use the `clear` method.
For improving the performances, this helper automatically adds `filter_path=hits.hits._source` to the querystring.
[source,js]
----
const scrollSearch = await client.helpers.scrollDocuments({
const scrollSearch = client.helpers.scrollDocuments({
index: 'stackoverflow',
body: {
query: {

View File

@ -1023,7 +1023,9 @@ link:{ref}/cat-thread-pool.html[Documentation] +
|`string` - a short version of the Accept header, e.g. json, yaml
|`size`
|`'' \| 'k' \| 'm' \| 'g' \| 't' \| 'p'` - The multiplier in which to display values
|`'' \| 'k' \| 'm' \| 'g' \| 't' \| 'p'` - The multiplier in which to display values +
WARNING: This parameter has been deprecated.
|`local`
|`boolean` - Return local information, do not retrieve the state from master node (default: false)
@ -1101,7 +1103,7 @@ client.cluster.deleteComponentTemplate({
master_timeout: string
})
----
link:{ref}/indices-component-template.html[Documentation] +
[cols=2*]
|===
|`name`
@ -1115,6 +1117,47 @@ client.cluster.deleteComponentTemplate({
|===
=== cluster.deleteVotingConfigExclusions
[source,ts]
----
client.cluster.deleteVotingConfigExclusions({
wait_for_removal: boolean
})
----
link:{ref}/voting-config-exclusions.html[Documentation] +
[cols=2*]
|===
|`wait_for_removal` or `waitForRemoval`
|`boolean` - Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting configuration exclusions list. +
_Default:_ `true`
|===
=== cluster.existsComponentTemplate
*Stability:* experimental
[source,ts]
----
client.cluster.existsComponentTemplate({
name: string,
master_timeout: string,
local: boolean
})
----
link:{ref}/indices-component-template.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - The name of the template
|`master_timeout` or `masterTimeout`
|`string` - Explicit operation timeout for connection to master node
|`local`
|`boolean` - Return local information, do not retrieve the state from master node (default: false)
|===
=== cluster.getComponentTemplate
*Stability:* experimental
[source,ts]
@ -1125,7 +1168,7 @@ client.cluster.getComponentTemplate({
local: boolean
})
----
link:{ref}/indices-component-template.html[Documentation] +
[cols=2*]
|===
|`name`
@ -1249,6 +1292,31 @@ link:{ref}/cluster-pending.html[Documentation] +
|===
=== cluster.postVotingConfigExclusions
[source,ts]
----
client.cluster.postVotingConfigExclusions({
node_ids: string,
node_names: string,
timeout: string
})
----
link:{ref}/voting-config-exclusions.html[Documentation] +
[cols=2*]
|===
|`node_ids` or `nodeIds`
|`string` - A comma-separated list of the persistent ids of the nodes to exclude from the voting configuration. If specified, you may not also specify ?node_names.
|`node_names` or `nodeNames`
|`string` - A comma-separated list of the names of the nodes to exclude from the voting configuration. If specified, you may not also specify ?node_ids.
|`timeout`
|`string` - Explicit operation timeout +
_Default:_ `30s`
|===
=== cluster.putComponentTemplate
*Stability:* experimental
[source,ts]
@ -1261,7 +1329,7 @@ client.cluster.putComponentTemplate({
body: object
})
----
link:{ref}/indices-component-template.html[Documentation] +
[cols=2*]
|===
|`name`
@ -2601,6 +2669,30 @@ link:{ref}/data-streams.html[Documentation] +
|===
=== indices.deleteIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.deleteIndexTemplate({
name: string,
timeout: string,
master_timeout: string
})
----
link:{ref}/indices-templates.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - The name of the template
|`timeout`
|`string` - Explicit operation timeout
|`master_timeout` or `masterTimeout`
|`string` - Specify timeout for connection to master
|===
=== indices.deleteTemplate
[source,ts]
@ -2703,6 +2795,34 @@ _Default:_ `all`
|===
=== indices.existsIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.existsIndexTemplate({
name: string,
flat_settings: boolean,
master_timeout: string,
local: boolean
})
----
link:{ref}/indices-templates.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - The name of the template
|`flat_settings` or `flatSettings`
|`boolean` - Return settings in flat format (default: false)
|`master_timeout` or `masterTimeout`
|`string` - Explicit operation timeout for connection to master node
|`local`
|`boolean` - Return local information, do not retrieve the state from master node (default: false)
|===
=== indices.existsTemplate
[source,ts]
@ -2966,14 +3086,14 @@ _Default:_ `all`
[source,ts]
----
client.indices.getDataStreams({
name: string | string[]
name: string
})
----
link:{ref}/data-streams.html[Documentation] +
[cols=2*]
|===
|`name`
|`string \| string[]` - The comma separated names of data streams
|`string` - The name or wildcard expression of the requested data streams
|===
@ -3028,6 +3148,34 @@ _Default:_ `open`
|===
=== indices.getIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.getIndexTemplate({
name: string | string[],
flat_settings: boolean,
master_timeout: string,
local: boolean
})
----
link:{ref}/indices-templates.html[Documentation] +
[cols=2*]
|===
|`name`
|`string \| string[]` - The comma separated names of the index templates
|`flat_settings` or `flatSettings`
|`boolean` - Return settings in flat format (default: false)
|`master_timeout` or `masterTimeout`
|`string` - Explicit operation timeout for connection to master node
|`local`
|`boolean` - Return local information, do not retrieve the state from master node (default: false)
|===
=== indices.getMapping
[source,ts]
@ -3258,6 +3406,38 @@ link:{ref}/indices-aliases.html[Documentation] +
|===
=== indices.putIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.putIndexTemplate({
name: string,
create: boolean,
cause: string,
master_timeout: string,
body: object
})
----
link:{ref}/indices-templates.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - The name of the template
|`create`
|`boolean` - Whether the index template should only be added if new or can also replace an existing one
|`cause`
|`string` - User defined reason for creating/updating the index template
|`master_timeout` or `masterTimeout`
|`string` - Specify timeout for connection to master
|`body`
|`object` - The template definition
|===
=== indices.putMapping
[source,ts]
@ -3597,6 +3777,38 @@ link:{ref}/indices-shrink-index.html[Documentation] +
|===
=== indices.simulateIndexTemplate
*Stability:* experimental
[source,ts]
----
client.indices.simulateIndexTemplate({
name: string,
create: boolean,
cause: string,
master_timeout: string,
body: object
})
----
link:{ref}/indices-templates.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - The name of the index (it must be a concrete index name)
|`create`
|`boolean` - Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an existing one
|`cause`
|`string` - User defined reason for dry-run creating the new template for simulation purposes
|`master_timeout` or `masterTimeout`
|`string` - Specify timeout for connection to master
|`body`
|`object` - New index template definition, which will be included in the simulation, as if it already exists in the system
|===
=== indices.split
[source,ts]
@ -4905,7 +5117,7 @@ client.snapshot.cleanupRepository({
timeout: string
})
----
link:{ref}/modules-snapshots.html[Documentation] +
link:{ref}/clean-up-snapshot-repo-api.html[Documentation] +
[cols=2*]
|===
|`repository`
@ -5021,7 +5233,7 @@ link:{ref}/modules-snapshots.html[Documentation] +
[cols=2*]
|===
|`repository`
|`string \| string[]` - A comma-separated list of repository names
|`string \| string[]` - Name of the snapshot repository to unregister. Wildcard (`*`) patterns are supported.
|`master_timeout` or `masterTimeout`
|`string` - Explicit operation timeout for connection to master node
@ -5179,7 +5391,8 @@ client.tasks.cancel({
task_id: string,
nodes: string | string[],
actions: string | string[],
parent_task_id: string
parent_task_id: string,
wait_for_completion: boolean
})
----
link:{ref}/tasks.html[Documentation] +
@ -5197,6 +5410,9 @@ link:{ref}/tasks.html[Documentation] +
|`parent_task_id` or `parentTaskId`
|`string` - Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all.
|`wait_for_completion` or `waitForCompletion`
|`boolean` - Should the request block until the cancellation of the task and its descendant tasks is completed. Defaults to false
|===
=== tasks.get
@ -5843,6 +6059,22 @@ _Default:_ `5`
|===
=== autoscaling.deleteAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.deleteAutoscalingPolicy({
name: string
})
----
link:{ref}/autoscaling-delete-autoscaling-policy.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - the name of the autoscaling policy
|===
=== autoscaling.getAutoscalingDecision
*Stability:* experimental
[source,ts]
@ -5852,6 +6084,42 @@ client.autoscaling.getAutoscalingDecision()
link:{ref}/autoscaling-get-autoscaling-decision.html[Documentation] +
=== autoscaling.getAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.getAutoscalingPolicy({
name: string
})
----
link:{ref}/autoscaling-get-autoscaling-policy.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - the name of the autoscaling policy
|===
=== autoscaling.putAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.putAutoscalingPolicy({
name: string,
body: object
})
----
link:{ref}/autoscaling-put-autoscaling-policy.html[Documentation] +
[cols=2*]
|===
|`name`
|`string` - the name of the autoscaling policy
|`body`
|`object` - the specification of the autoscaling policy
|===
=== cat.mlDataFrameAnalytics
[source,ts]
@ -6415,7 +6683,7 @@ client.eql.search({
body: object
})
----
link:{ref}/eql.html[Documentation] +
link:{ref}/eql-search-api.html[Documentation] +
[cols=2*]
|===
|`index`
@ -6939,7 +7207,8 @@ link:{ref}/ml-delete-calendar-job.html[Documentation] +
----
client.ml.deleteDataFrameAnalytics({
id: string,
force: boolean
force: boolean,
timeout: string
})
----
link:{ref}/delete-dfanalytics.html[Documentation] +
@ -6951,6 +7220,9 @@ link:{ref}/delete-dfanalytics.html[Documentation] +
|`force`
|`boolean` - True if the job should be forcefully deleted
|`timeout`
|`string` - Controls the time to wait until a job is deleted. Defaults to 1 minute
|===
=== ml.deleteDatafeed
@ -6977,10 +7249,17 @@ link:{ref}/ml-delete-datafeed.html[Documentation] +
[source,ts]
----
client.ml.deleteExpiredData()
client.ml.deleteExpiredData({
body: object
})
----
link:{ref}/ml-delete-expired-data.html[Documentation] +
[cols=2*]
|===
|`body`
|`object` - deleting expired data parameters
|===
=== ml.deleteFilter
@ -8388,6 +8667,7 @@ client.ml.validate({
body: object
})
----
link:https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html[Documentation] +
[cols=2*]
|===
|`body`
@ -8403,6 +8683,7 @@ client.ml.validateDetector({
body: object
})
----
link:https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html[Documentation] +
[cols=2*]
|===
|`body`
@ -8602,6 +8883,99 @@ link:{ref}/rollup-stop-job.html[Documentation] +
|===
=== searchableSnapshots.clearCache
*Stability:* experimental
[source,ts]
----
client.searchableSnapshots.clearCache({
index: string | string[],
ignore_unavailable: boolean,
allow_no_indices: boolean,
expand_wildcards: 'open' | 'closed' | 'none' | 'all'
})
----
link:{ref}/searchable-snapshots-api-clear-cache.html[Documentation] +
[cols=2*]
|===
|`index`
|`string \| string[]` - A comma-separated list of index names
|`ignore_unavailable` or `ignoreUnavailable`
|`boolean` - Whether specified concrete indices should be ignored when unavailable (missing or closed)
|`allow_no_indices` or `allowNoIndices`
|`boolean` - Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
|`expand_wildcards` or `expandWildcards`
|`'open' \| 'closed' \| 'none' \| 'all'` - Whether to expand wildcard expression to concrete indices that are open, closed or both. +
_Default:_ `open`
|===
=== searchableSnapshots.mount
*Stability:* experimental
[source,ts]
----
client.searchableSnapshots.mount({
repository: string,
snapshot: string,
master_timeout: string,
wait_for_completion: boolean,
body: object
})
----
link:{ref}/searchable-snapshots-api-mount-snapshot.html[Documentation] +
[cols=2*]
|===
|`repository`
|`string` - The name of the repository containing the snapshot of the index to mount
|`snapshot`
|`string` - The name of the snapshot of the index to mount
|`master_timeout` or `masterTimeout`
|`string` - Explicit operation timeout for connection to master node
|`wait_for_completion` or `waitForCompletion`
|`boolean` - Should this request wait until the operation has completed before returning
|`body`
|`object` - The restore configuration for mounting the snapshot as searchable
|===
=== searchableSnapshots.repositoryStats
*Stability:* experimental
[source,ts]
----
client.searchableSnapshots.repositoryStats({
repository: string
})
----
link:{ref}/searchable-snapshots-repository-stats.html[Documentation] +
[cols=2*]
|===
|`repository`
|`string` - The repository for which to get the stats for
|===
=== searchableSnapshots.stats
*Stability:* experimental
[source,ts]
----
client.searchableSnapshots.stats({
index: string | string[]
})
----
link:{ref}/searchable-snapshots-api-stats.html[Documentation] +
[cols=2*]
|===
|`index`
|`string \| string[]` - A comma-separated list of index names
|===
=== security.authenticate
[source,ts]

View File

@ -145,7 +145,7 @@ The supported request specific options are:
_Default:_ `null`
|`requestTimeout`
|`number` - Max request timeout for the request, it overrides the client default. +
|`number` - Max request timeout for the request in milliseconds, it overrides the client default. +
_Default:_ `30000`
|`maxRetries`
@ -194,32 +194,64 @@ console.log(errors)
You can find the errors exported by the client in the table below.
[cols=2*]
[cols=3*]
|===
|*Error*
|*Description*
|*Properties*
|`ElasticsearchClientError`
|Every error inherits from this class, it is the basic error generated by the client.
a|* `name` - `string`
* `message` - `string`
|`TimeoutError`
|Generated when a request exceeds the `requestTimeout` option.
a|* `name` - `string`
* `message` - `string`
* `meta` - `object`, contains all the information about the request
|`ConnectionError`
|Generated when an error occurs during the request, it can be a connection error or a malformed stream of data.
a|* `name` - `string`
* `message` - `string`
* `meta` - `object`, contains all the information about the request
|`RequestAbortedError`
|Generated if the user calls the `request.abort()` method.
a|* `name` - `string`
* `message` - `string`
* `meta` - `object`, contains all the information about the request
|`NoLivingConnectionsError`
|Given the configuration, the ConnectionPool was not able to find a usable Connection for this request.
a|* `name` - `string`
* `message` - `string`
* `meta` - `object`, contains all the information about the request
|`SerializationError`
|Generated if the serialization fails.
a|* `name` - `string`
* `message` - `string`
* `data` - `object`, the object to serialize
|`DeserializationError`
|Generated if the deserialization fails.
a|* `name` - `string`
* `message` - `string`
* `data` - `string`, the string to deserialize
|`ConfigurationError`
|Generated if there is a malformed configuration or parameter.
a|* `name` - `string`
* `message` - `string`
|`ResponseError`
|Generated when in case of a `4xx` or `5xx` response.
a|* `name` - `string`
* `message` - `string`
* `meta` - `object`, contains all the information about the request
* `body` - `object`, the response body
* `statusCode` - `object`, the response headers
* `headers` - `object`, the response status code
|===

156
index.d.ts vendored
View File

@ -141,6 +141,14 @@ declare class Client extends EventEmitter {
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.AsyncSearchSubmit<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
}
autoscaling: {
delete_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.AutoscalingDeleteAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingDeleteAutoscalingPolicy, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingDeleteAutoscalingPolicy, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.AutoscalingDeleteAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingDeleteAutoscalingPolicy, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingDeleteAutoscalingPolicy, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_decision<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.AutoscalingGetAutoscalingDecision, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_autoscaling_decision<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_decision<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingGetAutoscalingDecision, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -149,6 +157,22 @@ declare class Client extends EventEmitter {
getAutoscalingDecision<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingDecision<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingGetAutoscalingDecision, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingDecision<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingGetAutoscalingDecision, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.AutoscalingGetAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingGetAutoscalingPolicy, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_autoscaling_policy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingGetAutoscalingPolicy, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.AutoscalingGetAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingGetAutoscalingPolicy, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getAutoscalingPolicy<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingGetAutoscalingPolicy, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_autoscaling_policy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
put_autoscaling_policy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_autoscaling_policy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_autoscaling_policy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putAutoscalingPolicy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putAutoscalingPolicy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putAutoscalingPolicy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putAutoscalingPolicy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
}
bulk<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = unknown>(params?: RequestParams.Bulk<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
bulk<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -399,6 +423,22 @@ declare class Client extends EventEmitter {
deleteComponentTemplate<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteComponentTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterDeleteComponentTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteComponentTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterDeleteComponentTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterDeleteVotingConfigExclusions, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterDeleteVotingConfigExclusions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterDeleteVotingConfigExclusions, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterDeleteVotingConfigExclusions, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
deleteVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterDeleteVotingConfigExclusions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterDeleteVotingConfigExclusions, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_component_template<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterExistsComponentTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
exists_component_template<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_component_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterExistsComponentTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_component_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterExistsComponentTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsComponentTemplate<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterExistsComponentTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
existsComponentTemplate<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsComponentTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterExistsComponentTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsComponentTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterExistsComponentTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_component_template<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterGetComponentTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_component_template<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_component_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterGetComponentTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -427,6 +467,14 @@ declare class Client extends EventEmitter {
pendingTasks<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
pendingTasks<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterPendingTasks, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
pendingTasks<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterPendingTasks, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
post_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterPostVotingConfigExclusions, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
post_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
post_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterPostVotingConfigExclusions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
post_voting_config_exclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterPostVotingConfigExclusions, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
postVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterPostVotingConfigExclusions, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
postVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
postVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterPostVotingConfigExclusions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
postVotingConfigExclusions<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterPostVotingConfigExclusions, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_component_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.ClusterPutComponentTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
put_component_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_component_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.ClusterPutComponentTemplate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -737,6 +785,14 @@ declare class Client extends EventEmitter {
deleteDataStream<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteDataStream<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesDeleteDataStream, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteDataStream<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesDeleteDataStream, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_index_template<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesDeleteIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_index_template<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_index_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesDeleteIndexTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_index_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesDeleteIndexTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesDeleteIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
deleteIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesDeleteIndexTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesDeleteIndexTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_template<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesDeleteTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_template<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesDeleteTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -757,6 +813,14 @@ declare class Client extends EventEmitter {
existsAlias<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsAlias<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesExistsAlias, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsAlias<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesExistsAlias, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_index_template<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesExistsIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
exists_index_template<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_index_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesExistsIndexTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_index_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesExistsIndexTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesExistsIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
existsIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesExistsIndexTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
existsIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesExistsIndexTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_template<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesExistsTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
exists_template<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
exists_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesExistsTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -821,6 +885,14 @@ declare class Client extends EventEmitter {
getFieldMapping<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getFieldMapping<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesGetFieldMapping, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getFieldMapping<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesGetFieldMapping, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_index_template<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesGetIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_index_template<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_index_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesGetIndexTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_index_template<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesGetIndexTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesGetIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesGetIndexTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getIndexTemplate<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesGetIndexTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_mapping<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesGetMapping, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_mapping<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_mapping<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesGetMapping, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -865,6 +937,14 @@ declare class Client extends EventEmitter {
putAlias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putAlias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesPutAlias<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putAlias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesPutAlias<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesPutIndexTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
put_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesPutIndexTemplate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesPutIndexTemplate<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesPutIndexTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
putIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesPutIndexTemplate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
putIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesPutIndexTemplate<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_mapping<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesPutMapping<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
put_mapping<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
put_mapping<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesPutMapping<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -925,6 +1005,14 @@ declare class Client extends EventEmitter {
shrink<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
shrink<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesShrink<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
shrink<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesShrink<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
simulate_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesSimulateIndexTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
simulate_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
simulate_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesSimulateIndexTemplate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
simulate_index_template<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesSimulateIndexTemplate<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
simulateIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesSimulateIndexTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
simulateIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
simulateIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesSimulateIndexTemplate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
simulateIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesSimulateIndexTemplate<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
split<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.IndicesSplit<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
split<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
split<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.IndicesSplit<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -1105,14 +1193,14 @@ declare class Client extends EventEmitter {
deleteDatafeed<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteDatafeed<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteDatafeed, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteDatafeed<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteDatafeed, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_expired_data<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.MlDeleteExpiredData, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_expired_data<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_expired_data<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_expired_data<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteExpiredData<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.MlDeleteExpiredData, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
deleteExpiredData<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteExpiredData<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteExpiredData<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_expired_data<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.MlDeleteExpiredData<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_expired_data<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_expired_data<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_expired_data<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteExpiredData<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.MlDeleteExpiredData<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
deleteExpiredData<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteExpiredData<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
deleteExpiredData<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteExpiredData<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_filter<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.MlDeleteFilter, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_filter<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
delete_filter<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.MlDeleteFilter, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -1708,6 +1796,58 @@ declare class Client extends EventEmitter {
searchTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
searchTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.SearchTemplate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
searchTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.SearchTemplate<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
searchable_snapshots: {
clear_cache<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsClearCache, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
clear_cache<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clear_cache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clear_cache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearCache<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsClearCache, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
clearCache<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearCache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearCache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsMount<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsMount<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsMount<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repository_stats<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsRepositoryStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
repository_stats<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repository_stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repository_stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsRepositoryStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
stats<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
stats<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
}
searchableSnapshots: {
clear_cache<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsClearCache, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
clear_cache<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clear_cache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clear_cache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearCache<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsClearCache, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
clearCache<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearCache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearCache<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsClearCache, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsMount<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsMount<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsMount<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repository_stats<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsRepositoryStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
repository_stats<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repository_stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repository_stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsRepositoryStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
repositoryStats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsRepositoryStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
stats<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SearchableSnapshotsStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
stats<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
stats<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.SearchableSnapshotsStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
}
security: {
authenticate<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.SecurityAuthenticate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
authenticate<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback

View File

@ -13,7 +13,7 @@ const Transport = require('./lib/Transport')
const Connection = require('./lib/Connection')
const { ConnectionPool, CloudConnectionPool } = require('./lib/pool')
// Helpers works only in Node.js >= 10
const Helpers = nodeMajor < 10 ? null : require('./lib/Helpers')
const Helpers = nodeMajor < 10 ? /* istanbul ignore next */ null : require('./lib/Helpers')
const Serializer = require('./lib/Serializer')
const errors = require('./lib/errors')
const { ConfigurationError } = errors
@ -130,6 +130,7 @@ class Client extends EventEmitter {
opaqueIdPrefix: options.opaqueIdPrefix
})
/* istanbul ignore else */
if (Helpers !== null) {
this.helpers = new Helpers({ client: this, maxRetries: options.maxRetries })
}
@ -237,6 +238,7 @@ function getAuth (node) {
return null
function getUsernameAndPassword (node) {
/* istanbul ignore else */
if (typeof node === 'string') {
const { username, password } = new URL(node)
return {

View File

@ -20,7 +20,7 @@ const {
} = require('./errors')
class Connection {
constructor (opts = {}) {
constructor (opts) {
this.url = opts.url
this.ssl = opts.ssl || null
this.id = opts.id || stripAuth(opts.url.href)
@ -64,6 +64,7 @@ class Connection {
// https://github.com/nodejs/node/commit/b961d9fd83
if (INVALID_PATH_REGEX.test(requestParams.path) === true) {
callback(new TypeError(`ERR_UNESCAPED_CHARACTERS: ${requestParams.path}`), null)
/* istanbul ignore next */
return { abort: () => {} }
}
@ -73,6 +74,7 @@ class Connection {
// listen for the response event
// TODO: handle redirects?
request.on('response', response => {
/* istanbul ignore else */
if (ended === false) {
ended = true
this._openRequests--
@ -87,6 +89,7 @@ class Connection {
// handles request timeout
request.on('timeout', () => {
/* istanbul ignore else */
if (ended === false) {
ended = true
this._openRequests--
@ -97,6 +100,7 @@ class Connection {
// handles request error
request.on('error', err => {
/* istanbul ignore else */
if (ended === false) {
ended = true
this._openRequests--
@ -107,6 +111,7 @@ class Connection {
// updates the ended state
request.on('abort', () => {
debug('Request aborted', params)
/* istanbul ignore else */
if (ended === false) {
ended = true
this._openRequests--
@ -121,7 +126,7 @@ class Connection {
if (isStream(params.body) === true) {
pump(params.body, request, err => {
/* istanbul ignore if */
if (err != null && ended === false) {
if (err != null && /* istanbul ignore next */ ended === false) {
ended = true
this._openRequests--
callback(err, null)
@ -300,6 +305,7 @@ function resolve (host, path) {
function prepareHeaders (headers = {}, auth) {
if (auth != null && headers.authorization == null) {
/* istanbul ignore else */
if (auth.apiKey) {
if (typeof auth.apiKey === 'object') {
headers.authorization = 'ApiKey ' + Buffer.from(`${auth.apiKey.id}:${auth.apiKey.api_key}`).toString('base64')

27
lib/Helpers.d.ts vendored
View File

@ -3,13 +3,14 @@
// See the LICENSE file in the project root for more information
import { Readable as ReadableStream } from 'stream'
import { TransportRequestOptions, ApiResponse, RequestBody } from './Transport'
import { Search, Bulk } from '../api/requestParams'
import { TransportRequestOptions, ApiError, ApiResponse, RequestBody } from './Transport'
import { Search, Msearch, Bulk } from '../api/requestParams'
export default class Helpers {
search<TDocument = unknown, TRequestBody extends RequestBody = Record<string, any>>(params: Search<TRequestBody>, options?: TransportRequestOptions): Promise<TDocument[]>
scrollSearch<TDocument = unknown, TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: Search<TRequestBody>, options?: TransportRequestOptions): AsyncIterable<ScrollSearchResponse<TDocument, TResponse, TContext>>
scrollDocuments<TDocument = unknown, TRequestBody extends RequestBody = Record<string, any>>(params: Search<TRequestBody>, options?: TransportRequestOptions): AsyncIterable<TDocument>
msearch(options?: MsearchHelperOptions): MsearchHelper
bulk<TDocument = unknown>(options: BulkHelperOptions<TDocument>): BulkHelper<BulkStats>
}
@ -68,10 +69,11 @@ export interface BulkHelperOptions<TDocument = unknown> extends Omit<Bulk, 'body
datasource: TDocument[] | Buffer | ReadableStream | AsyncIterator<TDocument>
onDocument: (doc: TDocument) => Action
flushBytes?: number
flushInterval?: number
concurrency?: number
retries?: number
wait?: number,
onDrop?: (doc: OnDropDocument<TDocument>) => void,
wait?: number
onDrop?: (doc: OnDropDocument<TDocument>) => void
refreshOnCompletion?: boolean | string
}
@ -87,4 +89,19 @@ export interface OnDropDocument<TDocument = unknown> {
}
document: TDocument
retried: boolean
}
}
export interface MsearchHelperOptions extends Omit<Msearch, 'body'> {
operations?: number
flushInterval?: number
concurrency?: number
retries?: number
wait?: number
}
declare type callbackFn<Response, Context> = (err: ApiError, result: ApiResponse<Response, Context>) => void;
export interface MsearchHelper extends Promise<void> {
stop(error?: Error): void
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(header: Omit<Search, 'body'>, body: TRequestBody): Promise<ApiResponse<TResponse, TContext>>
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(header: Omit<Search, 'body'>, body: TRequestBody, callback: callbackFn<TResponse, TContext>): void
}

View File

@ -4,13 +4,16 @@
'use strict'
/* eslint camelcase: 0 */
const { Readable } = require('stream')
const { promisify } = require('util')
const { ResponseError, ConfigurationError } = require('./errors')
const pImmediate = promisify(setImmediate)
const sleep = promisify(setTimeout)
const kGetHits = Symbol('elasticsearch-get-hits')
const kClient = Symbol('elasticsearch-client')
/* istanbul ignore next */
const noop = () => {}
class Helpers {
@ -19,23 +22,22 @@ class Helpers {
this.maxRetries = opts.maxRetries
}
[kGetHits] (body) {
if (body.hits && body.hits.hits) {
return body.hits.hits.map(d => d._source)
}
return []
}
/**
* Runs a search operation. The only difference between client.search and this utility,
* is that we are only returning the hits to the user and not the full ES response.
* This helper automatically adds `filter_path=hits.hits._source` to the querystring,
* as it will only need the documents source.
* @param {object} params - The Elasticsearch's search parameters.
* @param {object} options - The client optional configuration for this request.
* @return {array} The documents that matched the request.
*/
async search (params, options) {
const response = await this[kClient].search(params, options)
return this[kGetHits](response.body)
appendFilterPath('hits.hits._source', params, true)
const { body } = await this[kClient].search(params, options)
if (body.hits && body.hits.hits) {
return body.hits.hits.map(d => d._source)
}
return []
}
/**
@ -63,6 +65,8 @@ class Helpers {
options.ignore = [429]
}
params.scroll = params.scroll || '1m'
appendFilterPath('_scroll_id', params, false)
const { method, body, index, ...querystring } = params
let response = null
for (let i = 0; i < maxRetries; i++) {
@ -74,33 +78,31 @@ class Helpers {
throw new ResponseError(response)
}
let scrollId = response.body._scroll_id
let scroll_id = response.body._scroll_id
let stop = false
const clear = async () => {
stop = true
await this[kClient].clearScroll(
{ body: { scroll_id: scrollId } },
{ body: { scroll_id } },
{ ignore: [400] }
)
}
while (response.body.hits.hits.length > 0) {
scrollId = response.body._scroll_id
while (response.body.hits && response.body.hits.hits.length > 0) {
scroll_id = response.body._scroll_id
response.clear = clear
response.documents = this[kGetHits](response.body)
addDocumentsGetter(response)
yield response
if (!scrollId || stop === true) {
if (!scroll_id || stop === true) {
break
}
for (let i = 0; i < maxRetries; i++) {
response = await this[kClient].scroll({
scroll: params.scroll,
body: {
scroll_id: scrollId
}
...querystring,
body: { scroll_id }
}, options)
if (response.statusCode !== 429) break
await sleep(wait)
@ -120,11 +122,14 @@ class Helpers {
* }
* ```
* Each document is what you will find by running a scrollSearch and iterating on the hits array.
* This helper automatically adds `filter_path=hits.hits._source` to the querystring,
* as it will only need the documents source.
* @param {object} params - The Elasticsearch's search parameters.
* @param {object} options - The client optional configuration for this request.
* @return {iterator} the async iterator
*/
async * scrollDocuments (params, options) {
appendFilterPath('hits.hits._source', params, true)
for await (const { documents } of this.scrollSearch(params)) {
for (const document of documents) {
yield document
@ -132,21 +137,266 @@ class Helpers {
}
}
/**
* Creates a msearch helper instance. Once you configure it, you can use the provided
* `search` method to add new searches in the queue.
* @param {object} options - The configuration of the msearch operations.
* @return {object} The possible operations to run.
*/
msearch (options = {}) {
const client = this[kClient]
const {
operations = 5,
concurrency = 5,
flushInterval = 500,
retries = this.maxRetries,
wait = 5000,
...msearchOptions
} = options
let stopReading = false
let stopError = null
let timeoutRef = null
const operationsStream = new Readable({
objectMode: true,
read (size) {}
})
const p = iterate()
const helper = {
then (onFulfilled, onRejected) {
return p.then(onFulfilled, onRejected)
},
catch (onRejected) {
return p.catch(onRejected)
},
stop (error = null) {
if (stopReading === true) return
stopReading = true
stopError = error
operationsStream.push(null)
},
// TODO: support abort a single search?
// NOTE: the validation checks are synchronous and the callback/promise will
// be resolved in the same tick. We might want to fix this in the future.
search (header, body, callback) {
if (stopReading === true) {
const error = stopError === null
? new ConfigurationError('The msearch processor has been stopped')
: stopError
return callback ? callback(error, {}) : Promise.reject(error)
}
if (!(typeof header === 'object' && header !== null && !Array.isArray(header))) {
const error = new ConfigurationError('The header should be an object')
return callback ? callback(error, {}) : Promise.reject(error)
}
if (!(typeof body === 'object' && body !== null && !Array.isArray(body))) {
const error = new ConfigurationError('The body should be an object')
return callback ? callback(error, {}) : Promise.reject(error)
}
let promise = null
if (callback === undefined) {
let onFulfilled = null
let onRejected = null
promise = new Promise((resolve, reject) => {
onFulfilled = resolve
onRejected = reject
})
callback = function callback (err, result) {
err ? onRejected(err) : onFulfilled(result)
}
}
operationsStream.push([header, body, callback])
if (promise !== null) {
return promise
}
}
}
return helper
async function iterate () {
const { semaphore, finish } = buildSemaphore()
const msearchBody = []
const callbacks = []
let loadedOperations = 0
timeoutRef = setTimeout(onFlushTimeout, flushInterval)
for await (const operation of operationsStream) {
timeoutRef.refresh()
loadedOperations += 1
msearchBody.push(operation[0], operation[1])
callbacks.push(operation[2])
if (loadedOperations >= operations) {
const send = await semaphore()
send(msearchBody.slice(), callbacks.slice())
msearchBody.length = 0
callbacks.length = 0
loadedOperations = 0
}
}
clearTimeout(timeoutRef)
// In some cases the previos http call does not have finished,
// or we didn't reach the flush bytes threshold, so we force one last operation.
if (loadedOperations > 0) {
const send = await semaphore()
send(msearchBody, callbacks)
}
await finish()
if (stopError !== null) {
throw stopError
}
async function onFlushTimeout () {
if (loadedOperations === 0) return
const msearchBodyCopy = msearchBody.slice()
const callbacksCopy = callbacks.slice()
msearchBody.length = 0
callbacks.length = 0
loadedOperations = 0
try {
const send = await semaphore()
send(msearchBodyCopy, callbacksCopy)
} catch (err) {
/* istanbul ignore next */
helper.stop(err)
}
}
}
// This function builds a semaphore using the concurrency
// options of the msearch helper. It is used inside the iterator
// to guarantee that no more than the number of operations
// allowed to run at the same time are executed.
// It returns a semaphore function which resolves in the next tick
// if we didn't reach the maximim concurrency yet, otherwise it returns
// a promise that resolves as soon as one of the running request has finshed.
// The semaphore function resolves a send function, which will be used
// to send the actual msearch request.
// It also returns a finish function, which returns a promise that is resolved
// when there are no longer request running.
function buildSemaphore () {
let resolveSemaphore = null
let resolveFinish = null
let running = 0
return { semaphore, finish }
function finish () {
return new Promise((resolve, reject) => {
if (running === 0) {
resolve()
} else {
resolveFinish = resolve
}
})
}
function semaphore () {
if (running < concurrency) {
running += 1
return pImmediate(send)
} else {
return new Promise((resolve, reject) => {
resolveSemaphore = resolve
})
}
}
function send (msearchBody, callbacks) {
/* istanbul ignore if */
if (running > concurrency) {
throw new Error('Max concurrency reached')
}
msearchOperation(msearchBody, callbacks, () => {
running -= 1
if (resolveSemaphore) {
running += 1
resolveSemaphore(send)
resolveSemaphore = null
} else if (resolveFinish && running === 0) {
resolveFinish()
}
})
}
}
function msearchOperation (msearchBody, callbacks, done) {
let retryCount = retries
// Instead of going full on async-await, which would make the code easier to read,
// we have decided to use callback style instead.
// This because every time we use async await, V8 will create multiple promises
// behind the scenes, making the code slightly slower.
tryMsearch(msearchBody, callbacks, retrySearch)
function retrySearch (msearchBody, callbacks) {
if (msearchBody.length > 0 && retryCount > 0) {
retryCount -= 1
setTimeout(tryMsearch, wait, msearchBody, callbacks, retrySearch)
return
}
done()
}
// This function never returns an error, if the msearch operation fails,
// the error is dispatched to all search executors.
function tryMsearch (msearchBody, callbacks, done) {
client.msearch(Object.assign({}, msearchOptions, { body: msearchBody }), (err, results) => {
const retryBody = []
const retryCallbacks = []
if (err) {
addDocumentsGetter(results)
for (const callback of callbacks) {
callback(err, results)
}
return done(retryBody, retryCallbacks)
}
const { responses } = results.body
for (let i = 0, len = responses.length; i < len; i++) {
const response = responses[i]
if (response.status === 429 && retryCount > 0) {
retryBody.push(msearchBody[i * 2])
retryBody.push(msearchBody[(i * 2) + 1])
retryCallbacks.push(callbacks[i])
continue
}
const result = { ...results, body: response }
addDocumentsGetter(result)
if (response.status >= 400) {
callbacks[i](new ResponseError(result), result)
} else {
callbacks[i](null, result)
}
}
done(retryBody, retryCallbacks)
})
}
}
}
/**
* Creates a bulk helper instance. Once you configure it, you can pick which operation
* to execute with the given dataset, index, create, update, and delete.
* @param {object} options - The configuration of the bulk operation.
* @return {object} The possible orations to run with the datasource.
* @return {object} The possible operations to run with the datasource.
*/
bulk (options) {
// TODO: add an interval to force flush the body
// to handle the slow producer problem
const client = this[kClient]
const { serialize, deserialize } = client.serializer
const {
datasource,
onDocument,
flushBytes = 5000000,
flushInterval = 30000,
concurrency = 5,
retries = this.maxRetries,
wait = 5000,
@ -166,6 +416,7 @@ class Helpers {
}
let shouldAbort = false
let timeoutRef = null
const stats = {
total: 0,
failed: 0,
@ -177,8 +428,7 @@ class Helpers {
}
const p = iterate()
return {
const helper = {
then (onFulfilled, onRejected) {
return p.then(onFulfilled, onRejected)
},
@ -186,12 +436,15 @@ class Helpers {
return p.catch(onRejected)
},
abort () {
clearTimeout(timeoutRef)
shouldAbort = true
stats.aborted = true
return this
}
}
return helper
/**
* Function that iterates over the given datasource and start a bulk operation as soon
* as it reaches the configured bulk size. It's designed to use the Node.js asynchronous
@ -208,9 +461,11 @@ class Helpers {
let actionBody = ''
let payloadBody = ''
let chunkBytes = 0
timeoutRef = setTimeout(onFlushTimeout, flushInterval)
for await (const chunk of datasource) {
if (shouldAbort === true) break
timeoutRef.refresh()
const action = onDocument(chunk)
const operation = Array.isArray(action)
? Object.keys(action[0])[0]
@ -219,21 +474,20 @@ class Helpers {
actionBody = serialize(action)
payloadBody = typeof chunk === 'string' ? chunk : serialize(chunk)
chunkBytes += Buffer.byteLength(actionBody) + Buffer.byteLength(payloadBody)
bulkBody.push(actionBody)
bulkBody.push(payloadBody)
bulkBody.push(actionBody, payloadBody)
} else if (operation === 'update') {
actionBody = serialize(action[0])
payloadBody = typeof chunk === 'string'
? `{doc:${chunk}}`
? `{"doc":${chunk}}`
: serialize({ doc: chunk, ...action[1] })
chunkBytes += Buffer.byteLength(actionBody) + Buffer.byteLength(payloadBody)
bulkBody.push(actionBody)
bulkBody.push(payloadBody)
bulkBody.push(actionBody, payloadBody)
} else if (operation === 'delete') {
actionBody = serialize(action)
chunkBytes += Buffer.byteLength(actionBody)
bulkBody.push(actionBody)
} else {
clearTimeout(timeoutRef)
throw new ConfigurationError(`Bulk helper invalid action: '${operation}'`)
}
@ -246,6 +500,7 @@ class Helpers {
}
}
clearTimeout(timeoutRef)
// In some cases the previos http call does not have finished,
// or we didn't reach the flush bytes threshold, so we force one last operation.
if (shouldAbort === false && chunkBytes > 0) {
@ -268,6 +523,21 @@ class Helpers {
stats.total = stats.successful + stats.failed
return stats
async function onFlushTimeout () {
if (chunkBytes === 0) return
stats.bytes += chunkBytes
const bulkBodyCopy = bulkBody.slice()
bulkBody.length = 0
chunkBytes = 0
try {
const send = await semaphore()
send(bulkBodyCopy)
} catch (err) {
/* istanbul ignore next */
helper.abort()
}
}
}
// This function builds a semaphore using the concurrency
@ -308,6 +578,7 @@ class Helpers {
function semaphore () {
if (running < concurrency) {
running += 1
return pImmediate(send)
} else {
return new Promise((resolve, reject) => {
@ -318,10 +589,9 @@ class Helpers {
function send (bulkBody) {
/* istanbul ignore if */
if (running >= concurrency) {
if (running > concurrency) {
throw new Error('Max concurrency reached')
}
running += 1
bulkOperation(bulkBody, err => {
running -= 1
if (err) {
@ -329,6 +599,7 @@ class Helpers {
error = err
}
if (resolveSemaphore) {
running += 1
resolveSemaphore(send)
resolveSemaphore = null
} else if (resolveFinish && running === 0) {
@ -371,6 +642,7 @@ class Helpers {
operation: deserialize(bulkBody[i]),
document: operation !== 'delete'
? deserialize(bulkBody[i + 1])
/* istanbul ignore next */
: null,
retried: isRetrying
})
@ -402,6 +674,7 @@ class Helpers {
// but the ES node were handling too many operations.
if (status === 429) {
retry.push(bulkBody[indexSlice])
/* istanbul ignore next */
if (operation !== 'delete') {
retry.push(bulkBody[indexSlice + 1])
}
@ -428,4 +701,27 @@ class Helpers {
}
}
// Using a getter will improve the overall performances of the code,
// as we will reed the documents only if needed.
function addDocumentsGetter (result) {
Object.defineProperty(result, 'documents', {
get () {
if (this.body.hits && this.body.hits.hits) {
return this.body.hits.hits.map(d => d._source)
}
return []
}
})
}
function appendFilterPath (filter, params, force) {
if (params.filter_path !== undefined) {
params.filter_path += ',' + filter
} else if (params.filterPath !== undefined) {
params.filterPath += ',' + filter
} else if (force === true) {
params.filter_path = filter
}
}
module.exports = Helpers

View File

@ -22,7 +22,7 @@ const clientVersion = require('../package.json').version
const userAgent = `elasticsearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})`
class Transport {
constructor (opts = {}) {
constructor (opts) {
if (typeof opts.compression === 'string' && opts.compression !== 'gzip') {
throw new ConfigurationError(`Invalid compression: '${opts.compression}'`)
}
@ -51,7 +51,6 @@ class Transport {
} else if (opts.nodeSelector === 'round-robin') {
this.nodeSelector = roundRobinSelector()
} else if (opts.nodeSelector === 'random') {
/* istanbul ignore next */
this.nodeSelector = randomSelector
} else {
this.nodeSelector = roundRobinSelector()
@ -385,7 +384,7 @@ class Transport {
}
debug('Sniffing ended successfully', result.body)
const protocol = result.meta.connection.url.protocol || 'http:'
const protocol = result.meta.connection.url.protocol || /* istanbul ignore next */ 'http:'
const hosts = this.connectionPool.nodesToHost(result.body.nodes, protocol)
this.connectionPool.update(hosts)

View File

@ -52,6 +52,7 @@ class BaseConnectionPool {
}
if (opts.ssl == null) opts.ssl = this._ssl
/* istanbul ignore else */
if (opts.agent == null) opts.agent = this._agent
const connection = new this.Connection(opts)
@ -201,6 +202,7 @@ class BaseConnectionPool {
}
address = address.slice(0, 4) === 'http'
/* istanbul ignore next */
? address
: `${protocol}//${address}`
const roles = node.roles.reduce((acc, role) => {

View File

@ -7,7 +7,7 @@
const BaseConnectionPool = require('./BaseConnectionPool')
class CloudConnectionPool extends BaseConnectionPool {
constructor (opts = {}) {
constructor (opts) {
super(opts)
this.cloudConnection = null
}

View File

@ -11,7 +11,7 @@ const Connection = require('../Connection')
const noop = () => {}
class ConnectionPool extends BaseConnectionPool {
constructor (opts = {}) {
constructor (opts) {
super(opts)
this.dead = []

View File

@ -4,7 +4,7 @@
"main": "index.js",
"types": "index.d.ts",
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
"version": "7.7.1",
"version": "7.8.0",
"keywords": [
"elasticsearch",
"elastic",
@ -16,21 +16,19 @@
"index"
],
"scripts": {
"test": "npm run lint && npm run test:unit && npm run test:behavior && npm run test:types",
"test:node8": "npm run lint && tap test/unit/*.test.js -t 300 --no-coverage && npm run test:behavior && npm run test:types",
"test:unit": "tap test/unit/*.test.js test/unit/**/*.test.js -t 300 --no-coverage",
"test:behavior": "tap test/behavior/*.test.js -t 300 --no-coverage",
"test": "npm run lint && tap test/{unit,acceptance}/{*,**/*}.test.js && npm run test:types",
"test:node8": "npm run lint && tap test/{unit,acceptance}/*.test.js && npm run test:types",
"test:unit": "tap test/unit/{*,**/*}.test.js",
"test:acceptance": "tap test/acceptance/*.test.js",
"test:integration": "node test/integration/index.js",
"test:integration:helpers": "tap test/integration/helpers/*.test.js --no-coverage -J",
"test:integration:helpers": "tap test/integration/helpers/*.test.js",
"test:types": "tsd",
"test:coverage": "tap test/unit/*.test.js test/unit/**/*.test.js test/behavior/*.test.js -t 300 && nyc report --reporter=text-lcov > coverage.lcov",
"test:coverage-ui": "tap test/unit/*.test.js test/unit/**/*.test.js test/behavior/*.test.js -t 300 --coverage-report=html",
"test:coverage-100": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --100 --nyc-arg=\"--exclude=api\"",
"test:coverage-report": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --nyc-arg=\"--exclude=api\" && nyc report --reporter=text-lcov > coverage.lcov",
"test:coverage-ui": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --coverage-report=html --nyc-arg=\"--exclude=api\"",
"lint": "standard",
"lint:fix": "standard --fix",
"ci": "npm run license-checker && npm test && npm run test:integration:helpers && npm run test:integration && npm run test:coverage",
"license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause'",
"elasticsearch": "./scripts/es-docker.sh",
"elasticsearch:xpack": "./scripts/es-docker-platinum.sh"
"license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause'"
},
"author": {
"name": "Tomas Della Vedova",
@ -41,6 +39,7 @@
"company": "Elasticsearch BV"
},
"devDependencies": {
"@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1",
"@types/node": "^12.6.2",
"convert-hrtime": "^3.0.0",
"dedent": "^0.7.0",
@ -50,7 +49,6 @@
"into-stream": "^5.1.1",
"js-yaml": "^3.13.1",
"license-checker": "^25.0.1",
"lolex": "^4.0.1",
"minimist": "^1.2.0",
"ora": "^3.4.0",
"pretty-hrtime": "^1.0.3",
@ -86,5 +84,13 @@
},
"tsd": {
"directory": "test/types"
},
"tap": {
"esm": false,
"ts": false,
"jsx": false,
"flow": false,
"coverage": false,
"jobs-auto": true
}
}

View File

@ -123,6 +123,8 @@ export interface ${toPascalCase(name)}${body ? `<T = ${bodyGeneric}>` : ''} exte
case 'double':
case 'long':
return 'number'
case 'boolean|long':
return 'boolean | number'
default:
return type
}

View File

@ -1,7 +1,7 @@
'use strict'
const { test } = require('tap')
const lolex = require('lolex')
const FakeTimers = require('@sinonjs/fake-timers')
const { Client, Transport } = require('../../index')
const {
connection: { MockConnection, MockConnectionSniff }
@ -125,7 +125,7 @@ test('Request id', t => {
t.test('Resurrect should use the same request id of the request that starts it', t => {
t.plan(2)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
@ -281,7 +281,7 @@ test('Client name', t => {
t.test('Resurrect should have the client name configured', t => {
t.plan(2)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
@ -305,7 +305,7 @@ test('Client name', t => {
t.test('Resurrect should have the client name configured (child client)', t => {
t.plan(2)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,

View File

@ -6,7 +6,7 @@
const { test } = require('tap')
const { URL } = require('url')
const lolex = require('lolex')
const FakeTimers = require('@sinonjs/fake-timers')
const workq = require('workq')
const { buildCluster } = require('../utils')
const { Client, events } = require('../../index')
@ -24,7 +24,7 @@ const { Client, events } = require('../../index')
test('Should execute the recurrect API with the ping strategy', t => {
t.plan(8)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
const q = workq()
buildCluster({ numberOfNodes: 2 }, cluster => {
@ -77,7 +77,7 @@ test('Should execute the recurrect API with the ping strategy', t => {
test('Resurrect a node and handle 502/3/4 status code', t => {
t.plan(15)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
const q = workq()
var count = 0
@ -146,7 +146,7 @@ test('Resurrect a node and handle 502/3/4 status code', t => {
test('Should execute the recurrect API with the optimistic strategy', t => {
t.plan(8)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
const q = workq()
buildCluster({ numberOfNodes: 2 }, cluster => {

View File

@ -6,7 +6,7 @@
const { test } = require('tap')
const { URL } = require('url')
const lolex = require('lolex')
const FakeTimers = require('@sinonjs/fake-timers')
const workq = require('workq')
const { buildCluster } = require('../utils')
const { Client, Connection, Transport, events, errors } = require('../../index')
@ -115,7 +115,7 @@ test('Should handle hostnames in publish_address', t => {
test('Sniff interval', t => {
t.plan(11)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
const q = workq()
buildCluster(({ nodes, shutdown, kill }) => {

View File

@ -0,0 +1,106 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
const { createReadStream } = require('fs')
const { join } = require('path')
const split = require('split2')
const { test, beforeEach, afterEach } = require('tap')
const { waitCluster } = require('../../utils')
const { Client, errors } = require('../../../')
const INDEX = `test-helpers-${process.pid}`
const client = new Client({
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
})
beforeEach(async () => {
await waitCluster(client)
await client.indices.create({ index: INDEX })
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
const result = await client.helpers.bulk({
datasource: stream.pipe(split()),
refreshOnCompletion: true,
onDocument (doc) {
return {
index: { _index: INDEX }
}
}
})
if (result.failed > 0) {
throw new Error('Failed bulk indexing docs')
}
})
afterEach(async () => {
await client.indices.delete({ index: INDEX }, { ignore: 404 })
})
test('Basic', t => {
t.plan(4)
const m = client.helpers.msearch({ operations: 1 })
m.search(
{ index: INDEX },
{ query: { match: { title: 'javascript' } } },
(err, result) => {
t.error(err)
t.strictEqual(result.body.hits.total.value, 106)
}
)
m.search(
{ index: INDEX },
{ query: { match: { title: 'ruby' } } },
(err, result) => {
t.error(err)
t.strictEqual(result.body.hits.total.value, 29)
}
)
t.teardown(() => m.stop())
})
test('Bad request', t => {
t.plan(3)
const m = client.helpers.msearch({ operations: 1 })
m.search(
{ index: INDEX },
{ query: { match: { title: 'javascript' } } },
(err, result) => {
t.error(err)
t.strictEqual(result.body.hits.total.value, 106)
}
)
m.search(
{ index: INDEX },
{ query: { foo: { title: 'ruby' } } },
(err, result) => {
t.true(err instanceof errors.ResponseError)
}
)
t.teardown(() => m.stop())
})
test('Send multiple request concurrently over the concurrency limit', t => {
t.plan(20)
const m = client.helpers.msearch({ operations: 1 })
for (let i = 0; i < 10; i++) {
m.search(
{ index: INDEX },
{ query: { match: { title: 'javascript' } } },
(err, result) => {
t.error(err)
t.strictEqual(result.body.hits.total.value, 106)
}
)
}
t.teardown(() => m.stop())
})

View File

@ -55,6 +55,8 @@ const xPackBlackList = {
'Attempt to open job when upgrade_mode is enabled',
'Setting upgrade mode to disabled from enabled'
],
// The cleanup fails with a index not found when retrieving the jobs
'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'],
// investigate why this is failing
'monitoring/bulk/10_basic.yml': ['*'],
'monitoring/bulk/20_privileges.yml': ['*'],

View File

@ -9,8 +9,10 @@ import {
BulkStats,
BulkHelperOptions,
ScrollSearchResponse,
OnDropDocument
OnDropDocument,
MsearchHelper
} from '../../lib/Helpers'
import { ApiResponse, ApiError } from '../../lib/Transport'
const client = new Client({
node: 'http://localhost:9200'
@ -25,6 +27,7 @@ const b = client.helpers.bulk<Record<string, any>>({
return { index: { _index: 'test' } }
},
flushBytes: 5000000,
flushInterval: 30000,
concurrency: 5,
retries: 3,
wait: 5000,
@ -56,7 +59,7 @@ expectError(
const options = {
datasource: [],
onDocument (doc: Record<string, any>) {
return { index: { _index: 'test' } }
return { index: { _index: 'test' } }
}
}
expectAssignable<BulkHelperOptions<Record<string, any>>>(options)
@ -137,20 +140,20 @@ expectError(
}
// with type defs
{
{
interface ShardsResponse {
total: number;
successful: number;
failed: number;
skipped: number;
}
interface Explanation {
value: number;
description: string;
details: Explanation[];
}
interface SearchResponse<T> {
took: number;
timed_out: boolean;
@ -176,7 +179,7 @@ expectError(
};
aggregations?: any;
}
interface Source {
foo: string
}
@ -206,20 +209,20 @@ expectError(
match: { foo: string }
}
}
interface ShardsResponse {
total: number;
successful: number;
failed: number;
skipped: number;
}
interface Explanation {
value: number;
description: string;
details: Explanation[];
}
interface SearchResponse<T> {
took: number;
timed_out: boolean;
@ -245,7 +248,7 @@ expectError(
};
aggregations?: any;
}
interface Source {
foo: string
}
@ -308,7 +311,7 @@ expectError(
}
// with type defs
{
{
interface Source {
foo: string
}
@ -335,7 +338,7 @@ expectError(
match: { foo: string }
}
}
interface Source {
foo: string
}
@ -413,7 +416,7 @@ expectError(
match: { foo: string }
}
}
interface Source {
foo: string
}
@ -429,4 +432,30 @@ expectError(
expectType<Promise<Source[]>>(p)
expectType<Source[]>(await p)
}
}
/// .helpers.msearch
const s = client.helpers.msearch({
operations: 5,
flushInterval: 500,
concurrency: 5,
retries: 5,
wait: 5000
})
expectType<MsearchHelper>(s)
expectType<void>(s.stop())
expectType<void>(s.stop(new Error('kaboom')))
expectType<Promise<ApiResponse<Record<string, any>, unknown>>>(s.search({ index: 'foo'}, { query: {} }))
expectType<Promise<ApiResponse<string, string>>>(s.search<string, Record<string, any>, string>({ index: 'foo'}, { query: {} }))
expectType<void>(s.search({ index: 'foo'}, { query: {} }, (err, result) => {
expectType<ApiError>(err)
expectType<ApiResponse>(result)
}))
expectType<void>(s.search<string, Record<string, any>, string>({ index: 'foo'}, { query: {} }, (err, result) => {
expectType<ApiError>(err)
expectType<ApiResponse<string, string>>(result)
}))

View File

@ -1067,3 +1067,28 @@ test('Correctly handles the same header cased differently', t => {
})
})
})
test('Random selector', t => {
t.plan(2)
function handler (req, res) {
res.setHeader('Content-Type', 'application/json;utf=8')
res.end(JSON.stringify({ hello: 'world' }))
}
buildServer(handler, ({ port }, server) => {
const client = new Client({
node: `http://localhost:${port}`,
nodeSelector: 'random'
})
client.search({
index: 'test',
q: 'foo:bar'
}, (err, { body }) => {
t.error(err)
t.deepEqual(body, { hello: 'world' })
server.stop()
})
})
})

View File

@ -617,6 +617,14 @@ test('Connection id should not contain credentials', t => {
t.end()
})
test('Ipv6 support', t => {
const connection = new Connection({
url: new URL('http://[::1]:9200')
})
t.strictEqual(connection.buildRequestObject({}).hostname, '::1')
t.end()
})
test('Should throw if the protocol is not http or https', t => {
try {
new Connection({ // eslint-disable-line
@ -900,3 +908,18 @@ test('Abort a request asyncronously', t => {
setImmediate(() => request.abort())
})
})
test('Should correctly resolve request pathname', t => {
t.plan(1)
const connection = new Connection({
url: new URL(`http://localhost:80/test`)
})
t.strictEqual(
connection.buildRequestObject({
path: 'hello'
}).pathname,
'/test/hello'
)
})

View File

@ -7,6 +7,7 @@
const { createReadStream } = require('fs')
const { join } = require('path')
const split = require('split2')
const FakeTimers = require('@sinonjs/fake-timers')
const semver = require('semver')
const { test } = require('tap')
const { Client, errors } = require('../../../')
@ -188,6 +189,51 @@ test('bulk index', t => {
})
})
t.test('refreshOnCompletion custom index', async t => {
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
if (params.method === 'GET') {
t.strictEqual(params.path, '/test/_refresh')
return { body: { acknowledged: true } }
} else {
t.strictEqual(params.path, '/_bulk')
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
const [action, payload] = params.body.split('\n')
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
t.deepEqual(JSON.parse(payload), dataset[count++])
return { body: { errors: false, items: [{}] } }
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const result = await client.helpers.bulk({
datasource: dataset.slice(),
flushBytes: 1,
concurrency: 1,
refreshOnCompletion: 'test',
onDocument (doc) {
return {
index: { _index: 'test' }
}
}
})
t.type(result.time, 'number')
t.type(result.bytes, 'number')
t.match(result, {
total: 3,
successful: 3,
retry: 0,
failed: 0,
aborted: false
})
})
t.test('Should perform a bulk request (custom action)', async t => {
let count = 0
const MockConnection = connection.buildMockConnection({
@ -806,6 +852,53 @@ test('bulk update', t => {
aborted: false
})
})
t.test('Should perform a bulk request dataset as string)', async t => {
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.path, '/_bulk')
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
const [action, payload] = params.body.split('\n')
t.deepEqual(JSON.parse(action), { update: { _index: 'test', _id: count } })
t.deepEqual(JSON.parse(payload), { doc: dataset[count++] })
return { body: { errors: false, items: [{}] } }
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
let id = 0
const result = await client.helpers.bulk({
datasource: dataset.map(d => JSON.stringify(d)),
flushBytes: 1,
concurrency: 1,
onDocument (doc) {
return [{
update: {
_index: 'test',
_id: id++
}
}]
},
onDrop (doc) {
t.fail('This should never be called')
}
})
t.type(result.time, 'number')
t.type(result.bytes, 'number')
t.match(result, {
total: 3,
successful: 3,
retry: 0,
failed: 0,
aborted: false
})
})
t.end()
})
@ -855,10 +948,6 @@ test('bulk delete', t => {
})
t.test('Should perform a bulk request (failure)', async t => {
if (semver.lt(process.versions.node, '10.0.0')) {
t.skip('This test will not pass on Node v8')
return
}
async function handler (req, res) {
t.strictEqual(req.url, '/_bulk')
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
@ -987,3 +1076,118 @@ test('errors', t => {
t.end()
})
test('Flush interval', t => {
t.test('Slow producer', async t => {
const clock = FakeTimers.install({ toFake: ['setTimeout', 'clearTimeout'] })
t.teardown(() => clock.uninstall())
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.path, '/_bulk')
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
const [action, payload] = params.body.split('\n')
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
t.deepEqual(JSON.parse(payload), dataset[count++])
return { body: { errors: false, items: [{}] } }
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const result = await client.helpers.bulk({
datasource: (async function * generator () {
for (const chunk of dataset) {
await clock.nextAsync()
yield chunk
}
})(),
flushBytes: 5000000,
concurrency: 1,
onDocument (doc) {
return {
index: { _index: 'test' }
}
},
onDrop (doc) {
t.fail('This should never be called')
}
})
t.type(result.time, 'number')
t.type(result.bytes, 'number')
t.match(result, {
total: 3,
successful: 3,
retry: 0,
failed: 0,
aborted: false
})
})
t.test('Abort operation', async t => {
const clock = FakeTimers.install({ toFake: ['setTimeout', 'clearTimeout'] })
t.teardown(() => clock.uninstall())
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.true(count < 2)
t.strictEqual(params.path, '/_bulk')
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
const [action, payload] = params.body.split('\n')
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
t.deepEqual(JSON.parse(payload), dataset[count++])
return { body: { errors: false, items: [{}] } }
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const b = client.helpers.bulk({
datasource: (async function * generator () {
for (const chunk of dataset) {
await clock.nextAsync()
if (chunk.user === 'tyrion') {
// Needed otherwise in Node.js 10
// the second request will never be sent
await Promise.resolve()
b.abort()
}
yield chunk
}
})(),
flushBytes: 5000000,
concurrency: 1,
onDocument (doc) {
return {
index: { _index: 'test' }
}
},
onDrop (doc) {
t.fail('This should never be called')
}
})
const result = await b
t.type(result.time, 'number')
t.type(result.bytes, 'number')
t.match(result, {
total: 2,
successful: 2,
retry: 0,
failed: 0,
aborted: true
})
})
t.end()
})

View File

@ -0,0 +1,743 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
'use strict'
const { test } = require('tap')
const { Client, errors } = require('../../../')
const { connection } = require('../../utils')
const FakeTimers = require('@sinonjs/fake-timers')
test('Basic', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: [{
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}]
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch({ operations: 1 })
const result = await m.search(
{ index: 'test' },
{ query: { match: { foo: 'bar' } } }
)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
})
t.deepEqual(result.documents, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
t.teardown(() => m.stop())
})
test('Multiple searches (inside async iterator)', t => {
t.plan(6)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: [{
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}, {
status: 200,
hits: {
hits: [
{ _source: { four: 'four' } },
{ _source: { five: 'five' } },
{ _source: { six: 'six' } }
]
}
}]
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch({ operations: 2 })
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.error(err)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
})
t.deepEqual(result.documents, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
})
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.error(err)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { four: 'four' } },
{ _source: { five: 'five' } },
{ _source: { six: 'six' } }
]
}
})
t.deepEqual(result.documents, [
{ four: 'four' },
{ five: 'five' },
{ six: 'six' }
])
})
t.teardown(() => m.stop())
})
test('Multiple searches (async iterator exits)', t => {
t.plan(6)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: [{
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}, {
status: 200,
hits: {
hits: [
{ _source: { four: 'four' } },
{ _source: { five: 'five' } },
{ _source: { six: 'six' } }
]
}
}]
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.search({ index: 'test' }, { query: {} }, (err, result) => {
t.error(err)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
})
t.deepEqual(result.documents, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
})
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.error(err)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { four: 'four' } },
{ _source: { five: 'five' } },
{ _source: { six: 'six' } }
]
}
})
t.deepEqual(result.documents, [
{ four: 'four' },
{ five: 'five' },
{ six: 'six' }
])
})
setImmediate(() => m.stop())
})
test('Stop a msearch processor (promises)', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch({ operations: 1 })
m.stop()
try {
await m.search(
{ index: 'test' },
{ query: { match: { foo: 'bar' } } }
)
} catch (err) {
t.strictEqual(err.message, 'The msearch processor has been stopped')
}
t.teardown(() => m.stop())
})
test('Stop a msearch processor (callbacks)', t => {
t.plan(1)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.stop()
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.strictEqual(err.message, 'The msearch processor has been stopped')
})
})
test('Bad header', t => {
t.plan(2)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.search(null, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.strictEqual(err.message, 'The header should be an object')
})
m.search(null, { query: { match: { foo: 'bar' } } })
.catch(err => {
t.strictEqual(err.message, 'The header should be an object')
})
t.teardown(() => m.stop())
})
test('Bad body', t => {
t.plan(2)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.search({ index: 'test' }, null, (err, result) => {
t.strictEqual(err.message, 'The body should be an object')
})
m.search({ index: 'test' }, null)
.catch(err => {
t.strictEqual(err.message, 'The body should be an object')
})
t.teardown(() => m.stop())
})
test('Retry on 429', async t => {
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
if (count++ === 0) {
return {
body: {
responses: [{
status: 429,
error: {}
}]
}
}
} else {
return {
body: {
responses: [{
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}]
}
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch({ operations: 1, wait: 10 })
const result = await m.search(
{ index: 'test' },
{ query: { match: { foo: 'bar' } } }
)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
})
t.deepEqual(result.documents, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
t.teardown(() => m.stop())
})
test('Single search errors', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: [{
status: 400,
error: { foo: 'bar' }
}]
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch({ operations: 1 })
try {
await m.search(
{ index: 'test' },
{ query: { match: { foo: 'bar' } } }
)
} catch (err) {
t.true(err instanceof errors.ResponseError)
}
t.teardown(() => m.stop())
})
test('Entire msearch fails', t => {
t.plan(4)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
statusCode: 500,
body: {
status: 500,
error: { foo: 'bar' }
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch({ operations: 1 })
m.search({ index: 'test' }, { query: {} }, (err, result) => {
t.true(err instanceof errors.ResponseError)
t.deepEqual(result.documents, [])
})
m.search({ index: 'test' }, { query: {} }, (err, result) => {
t.true(err instanceof errors.ResponseError)
t.deepEqual(result.documents, [])
})
t.teardown(() => m.stop())
})
test('Resolves the msearch helper', t => {
t.plan(1)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.stop()
m.then(
() => t.pass('called'),
e => t.fail('Should not fail')
)
m.catch(e => t.fail('Should not fail'))
})
test('Stop the msearch helper with an error', t => {
t.plan(3)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.stop(new Error('kaboom'))
m.then(
() => t.fail('Should fail'),
err => t.is(err.message, 'kaboom')
)
m.catch(err => t.is(err.message, 'kaboom'))
m.search({ index: 'test' }, { query: {} }, (err, result) => {
t.is(err.message, 'kaboom')
})
})
test('Multiple searches (concurrency = 1)', t => {
t.plan(6)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: [{
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}]
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch({ operations: 1, concurrency: 1 })
m.search({ index: 'test' }, { query: {} }, (err, result) => {
t.error(err)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
})
t.deepEqual(result.documents, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
})
m.search({ index: 'test' }, { query: {} }, (err, result) => {
t.error(err)
t.deepEqual(result.body, {
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
})
t.deepEqual(result.documents, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
})
t.teardown(() => m.stop())
})
test('Flush interval', t => {
t.plan(4)
const clock = FakeTimers.install({ toFake: ['setTimeout', 'clearTimeout'] })
t.teardown(() => clock.uninstall())
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: [{
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}, {
status: 200,
hits: {
hits: [
{ _source: { four: 'four' } },
{ _source: { five: 'five' } },
{ _source: { six: 'six' } }
]
}
}]
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.error(err)
t.is(result.documents.length, 3)
})
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.error(err)
t.is(result.documents.length, 3)
})
setImmediate(clock.next)
t.teardown(() => m.stop())
})
test('Flush interval - early stop', t => {
t.plan(3)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: [{
status: 200,
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}]
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.error(err)
t.is(result.documents.length, 3)
})
setImmediate(() => {
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
t.ok(err instanceof errors.ConfigurationError)
})
})
m.stop()
})
test('Stop should resolve the helper', t => {
t.plan(1)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: []
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
setImmediate(m.stop)
m.then(() => t.pass('Called'))
.catch(() => t.fail('Should not fail'))
})
test('Stop should resolve the helper (error)', t => {
t.plan(3)
const MockConnection = connection.buildMockConnection({
onRequest (params) {
return {
body: {
responses: []
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const m = client.helpers.msearch()
setImmediate(m.stop, new Error('kaboom'))
m.then(() => t.fail('Should not fail'))
.catch(err => t.is(err.message, 'kaboom'))
m.catch(err => t.is(err.message, 'kaboom'))
m.then(() => t.fail('Should not fail'), err => t.is(err.message, 'kaboom'))
})

View File

@ -183,6 +183,8 @@ test('Scroll search (retry throws later)', async t => {
var count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
// filter_path should not be added if is not already present
t.strictEqual(params.querystring, 'scroll=1m')
if (count > 1) {
count += 1
return { body: {}, statusCode: 429 }
@ -232,6 +234,7 @@ test('Scroll search documents', async t => {
var count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.querystring, 'filter_path=hits.hits._source%2C_scroll_id&scroll=1m')
return {
body: {
_scroll_id: count === 3 ? undefined : 'id',

View File

@ -11,6 +11,7 @@ const { connection } = require('../../utils')
test('Search should have an additional documents property', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.querystring, 'filter_path=hits.hits._source')
return {
body: {
hits: {
@ -44,6 +45,7 @@ test('Search should have an additional documents property', async t => {
test('kGetHits fallback', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.querystring, 'filter_path=hits.hits._source')
return { body: {} }
}
})
@ -59,3 +61,73 @@ test('kGetHits fallback', async t => {
})
t.deepEqual(result, [])
})
test('Merge filter paths (snake_case)', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.querystring, 'filter_path=foo%2Chits.hits._source')
return {
body: {
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const result = await client.helpers.search({
index: 'test',
filter_path: 'foo',
body: { foo: 'bar' }
})
t.deepEqual(result, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
})
test('Merge filter paths (camelCase)', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.strictEqual(params.querystring, 'filter_path=foo%2Chits.hits._source')
return {
body: {
hits: {
hits: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}
}
}
})
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
const result = await client.helpers.search({
index: 'test',
filterPath: 'foo',
body: { foo: 'bar' }
})
t.deepEqual(result, [
{ one: 'one' },
{ two: 'two' },
{ three: 'three' }
])
})

View File

@ -6,7 +6,7 @@
const { test } = require('tap')
const { URL } = require('url')
const lolex = require('lolex')
const FakeTimers = require('@sinonjs/fake-timers')
const { createGunzip } = require('zlib')
const os = require('os')
const intoStream = require('into-stream')
@ -1072,7 +1072,7 @@ test('sniff', t => {
t.test('sniffInterval', t => {
t.plan(6)
const clock = lolex.install({ toFake: ['Date'] })
const clock = FakeTimers.install({ toFake: ['Date'] })
t.teardown(() => clock.uninstall())
class MyTransport extends Transport {