Compare commits
49 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 2907a84334 | |||
| d966a06a62 | |||
| 4bca80bafc | |||
| 080c3af904 | |||
| 14536d6855 | |||
| 11b4cea493 | |||
| 3673255959 | |||
| e0fc57766d | |||
| 9c2ec755d3 | |||
| a221a84a58 | |||
| 7d78dceed5 | |||
| c3832dc081 | |||
| af5c978131 | |||
| e864c1db99 | |||
| 581b0f3a35 | |||
| d73cb1a29b | |||
| 28f2be397c | |||
| 0dd5c3c186 | |||
| e77430ac4e | |||
| 13918d052a | |||
| 3e57383b04 | |||
| 615481d5ed | |||
| 40c47902f3 | |||
| bd6f4f256b | |||
| 91ba1c3b6e | |||
| 52f6326570 | |||
| 9c76898fb1 | |||
| f074d4b0a2 | |||
| d8695532c1 | |||
| aec79375bd | |||
| a82aae8cfb | |||
| 24961869cc | |||
| acce06c2af | |||
| fd59d66076 | |||
| 93cf8aa3c0 | |||
| 072931f7af | |||
| a84c42c54d | |||
| c99eac4699 | |||
| 8eacc288c7 | |||
| a4093a7338 | |||
| 4121e1e7ff | |||
| 57fbbd0a8f | |||
| 35e587663c | |||
| 8a1e9576aa | |||
| 4364e882f8 | |||
| d233ae69f3 | |||
| f47bd8147f | |||
| 109ac3e9aa | |||
| f69c51eb42 |
@ -1,6 +1,6 @@
|
||||
---
|
||||
STACK_VERSION:
|
||||
- 7.7.0-SNAPSHOT
|
||||
- 7.9.0-SNAPSHOT
|
||||
|
||||
NODE_JS_VERSION:
|
||||
- 14
|
||||
|
||||
14
.github/workflows/nodejs.yml
vendored
14
.github/workflows/nodejs.yml
vendored
@ -32,9 +32,9 @@ jobs:
|
||||
run: |
|
||||
npm run test:unit
|
||||
|
||||
- name: Behavior test
|
||||
- name: Acceptance test
|
||||
run: |
|
||||
npm run test:behavior
|
||||
npm run test:acceptance
|
||||
|
||||
- name: Type Definitions
|
||||
run: |
|
||||
@ -86,7 +86,7 @@ jobs:
|
||||
- name: Runs Elasticsearch
|
||||
uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
with:
|
||||
stack-version: 7.7.0-SNAPSHOT
|
||||
stack-version: 7.9.0-SNAPSHOT
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
@ -121,9 +121,9 @@ jobs:
|
||||
run: |
|
||||
npm install
|
||||
|
||||
- name: Code coverage
|
||||
- name: Code coverage report
|
||||
run: |
|
||||
npm run test:coverage
|
||||
npm run test:coverage-report
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
@ -131,6 +131,10 @@ jobs:
|
||||
file: ./coverage.lcov
|
||||
fail_ci_if_error: true
|
||||
|
||||
- name: Code coverage 100%
|
||||
run: |
|
||||
npm run test:coverage-100
|
||||
|
||||
license:
|
||||
name: License check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
12
README.md
12
README.md
@ -79,13 +79,21 @@ You can use both the callback-style API and the promise-style API, both behave t
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
|
||||
78
api/api/autoscaling.delete_autoscaling_policy.js
Normal file
78
api/api/autoscaling.delete_autoscaling_policy.js
Normal file
@ -0,0 +1,78 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildAutoscalingDeleteAutoscalingPolicy (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a autoscaling.delete_autoscaling_policy request
|
||||
* Deletes an autoscaling policy.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-delete-autoscaling-policy.html
|
||||
*/
|
||||
return function autoscalingDeleteAutoscalingPolicy (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_autoscaling' + '/' + 'policy' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildAutoscalingDeleteAutoscalingPolicy
|
||||
78
api/api/autoscaling.get_autoscaling_policy.js
Normal file
78
api/api/autoscaling.get_autoscaling_policy.js
Normal file
@ -0,0 +1,78 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildAutoscalingGetAutoscalingPolicy (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a autoscaling.get_autoscaling_policy request
|
||||
* Retrieves an autoscaling policy.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-get-autoscaling-policy.html
|
||||
*/
|
||||
return function autoscalingGetAutoscalingPolicy (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_autoscaling' + '/' + 'policy' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildAutoscalingGetAutoscalingPolicy
|
||||
82
api/api/autoscaling.put_autoscaling_policy.js
Normal file
82
api/api/autoscaling.put_autoscaling_policy.js
Normal file
@ -0,0 +1,82 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildAutoscalingPutAutoscalingPolicy (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a autoscaling.put_autoscaling_policy request
|
||||
* Creates a new autoscaling policy.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-put-autoscaling-policy.html
|
||||
*/
|
||||
return function autoscalingPutAutoscalingPolicy (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['body'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + '_autoscaling' + '/' + 'policy' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildAutoscalingPutAutoscalingPolicy
|
||||
@ -27,7 +27,7 @@ function buildClearScroll (opts) {
|
||||
/**
|
||||
* Perform a clear_scroll request
|
||||
* Explicitly clears the search context for a scroll.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#_clear_scroll_api
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-scroll-api.html
|
||||
*/
|
||||
return function clearScroll (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
@ -30,7 +30,7 @@ function buildClusterDeleteComponentTemplate (opts) {
|
||||
/**
|
||||
* Perform a cluster.delete_component_template request
|
||||
* Deletes a component template
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-templates.html
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
|
||||
*/
|
||||
return function clusterDeleteComponentTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
79
api/api/cluster.delete_voting_config_exclusions.js
Normal file
79
api/api/cluster.delete_voting_config_exclusions.js
Normal file
@ -0,0 +1,79 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildClusterDeleteVotingConfigExclusions (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'wait_for_removal',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
waitForRemoval: 'wait_for_removal',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a cluster.delete_voting_config_exclusions request
|
||||
* Clears cluster voting config exclusions.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/voting-config-exclusions.html
|
||||
*/
|
||||
return function clusterDeleteVotingConfigExclusions (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_cluster' + '/' + 'voting_config_exclusions'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildClusterDeleteVotingConfigExclusions
|
||||
86
api/api/cluster.exists_component_template.js
Normal file
86
api/api/cluster.exists_component_template.js
Normal file
@ -0,0 +1,86 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildClusterExistsComponentTemplate (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'master_timeout',
|
||||
'local',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a cluster.exists_component_template request
|
||||
* Returns information about whether a particular component template exist
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
|
||||
*/
|
||||
return function clusterExistsComponentTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'HEAD'
|
||||
path = '/' + '_component_template' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildClusterExistsComponentTemplate
|
||||
@ -30,7 +30,7 @@ function buildClusterGetComponentTemplate (opts) {
|
||||
/**
|
||||
* Perform a cluster.get_component_template request
|
||||
* Returns one or more component templates
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-templates.html
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
|
||||
*/
|
||||
return function clusterGetComponentTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
82
api/api/cluster.post_voting_config_exclusions.js
Normal file
82
api/api/cluster.post_voting_config_exclusions.js
Normal file
@ -0,0 +1,82 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildClusterPostVotingConfigExclusions (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'node_ids',
|
||||
'node_names',
|
||||
'timeout',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
nodeIds: 'node_ids',
|
||||
nodeNames: 'node_names',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a cluster.post_voting_config_exclusions request
|
||||
* Updates the cluster voting config exclusions by node ids or node names.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/voting-config-exclusions.html
|
||||
*/
|
||||
return function clusterPostVotingConfigExclusions (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_cluster' + '/' + 'voting_config_exclusions'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildClusterPostVotingConfigExclusions
|
||||
@ -31,7 +31,7 @@ function buildClusterPutComponentTemplate (opts) {
|
||||
/**
|
||||
* Perform a cluster.put_component_template request
|
||||
* Creates or updates a component template
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-templates.html
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html
|
||||
*/
|
||||
return function clusterPutComponentTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
88
api/api/dangling_indices.delete_dangling_index.js
Normal file
88
api/api/dangling_indices.delete_dangling_index.js
Normal file
@ -0,0 +1,88 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDanglingIndicesDeleteDanglingIndex (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'accept_data_loss',
|
||||
'timeout',
|
||||
'master_timeout',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
acceptDataLoss: 'accept_data_loss',
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a dangling_indices.delete_dangling_index request
|
||||
* Deletes the specified dangling index
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-gateway-dangling-indices.html
|
||||
*/
|
||||
return function danglingIndicesDeleteDanglingIndex (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['index_uuid'] == null && params['indexUuid'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: index_uuid or indexUuid')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, indexUuid, index_uuid, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_dangling' + '/' + encodeURIComponent(index_uuid || indexUuid)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDanglingIndicesDeleteDanglingIndex
|
||||
88
api/api/dangling_indices.import_dangling_index.js
Normal file
88
api/api/dangling_indices.import_dangling_index.js
Normal file
@ -0,0 +1,88 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDanglingIndicesImportDanglingIndex (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'accept_data_loss',
|
||||
'timeout',
|
||||
'master_timeout',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
acceptDataLoss: 'accept_data_loss',
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a dangling_indices.import_dangling_index request
|
||||
* Imports the specified dangling index
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-gateway-dangling-indices.html
|
||||
*/
|
||||
return function danglingIndicesImportDanglingIndex (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['index_uuid'] == null && params['indexUuid'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: index_uuid or indexUuid')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, indexUuid, index_uuid, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_dangling' + '/' + encodeURIComponent(index_uuid || indexUuid)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDanglingIndicesImportDanglingIndex
|
||||
77
api/api/dangling_indices.list_dangling_indices.js
Normal file
77
api/api/dangling_indices.list_dangling_indices.js
Normal file
@ -0,0 +1,77 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDanglingIndicesListDanglingIndices (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a dangling_indices.list_dangling_indices request
|
||||
* Returns all dangling indices.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-gateway-dangling-indices.html
|
||||
*/
|
||||
return function danglingIndicesListDanglingIndices (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_dangling'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDanglingIndicesListDanglingIndices
|
||||
78
api/api/eql.delete.js
Normal file
78
api/api/eql.delete.js
Normal file
@ -0,0 +1,78 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildEqlDelete (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a eql.delete request
|
||||
* Deletes an async EQL search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html
|
||||
*/
|
||||
return function eqlDelete (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['id'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: id')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, id, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_eql' + '/' + 'search' + '/' + encodeURIComponent(id)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildEqlDelete
|
||||
80
api/api/eql.get.js
Normal file
80
api/api/eql.get.js
Normal file
@ -0,0 +1,80 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildEqlGet (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'wait_for_completion_timeout',
|
||||
'keep_alive'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
waitForCompletionTimeout: 'wait_for_completion_timeout',
|
||||
keepAlive: 'keep_alive'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a eql.get request
|
||||
* Returns async results from previously executed Event Query Language (EQL) search
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html
|
||||
*/
|
||||
return function eqlGet (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['id'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: id')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, id, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_eql' + '/' + 'search' + '/' + encodeURIComponent(id)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildEqlGet
|
||||
@ -12,17 +12,21 @@ function buildEqlSearch (opts) {
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
'wait_for_completion_timeout',
|
||||
'keep_on_completion',
|
||||
'keep_alive'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
waitForCompletionTimeout: 'wait_for_completion_timeout',
|
||||
keepOnCompletion: 'keep_on_completion',
|
||||
keepAlive: 'keep_alive'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a eql.search request
|
||||
* Returns results matching a query expressed in Event Query Language (EQL)
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/eql.html
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html
|
||||
*/
|
||||
return function eqlSearch (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
102
api/api/indices.add_block.js
Normal file
102
api/api/indices.add_block.js
Normal file
@ -0,0 +1,102 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesAddBlock (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'timeout',
|
||||
'master_timeout',
|
||||
'ignore_unavailable',
|
||||
'allow_no_indices',
|
||||
'expand_wildcards',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
ignoreUnavailable: 'ignore_unavailable',
|
||||
allowNoIndices: 'allow_no_indices',
|
||||
expandWildcards: 'expand_wildcards',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.add_block request
|
||||
* Adds a block to an index.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/index-modules-blocks.html
|
||||
*/
|
||||
return function indicesAddBlock (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['index'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: index')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['block'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: block')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// check required url components
|
||||
if (params['block'] != null && (params['index'] == null)) {
|
||||
const err = new ConfigurationError('Missing required parameter of the url: index')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, index, block, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_block' + '/' + encodeURIComponent(block)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesAddBlock
|
||||
@ -12,21 +12,16 @@ function buildIndicesCreateDataStream (opts) {
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.create_data_stream request
|
||||
* Creates or updates a data stream
|
||||
* Creates a data stream
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html
|
||||
*/
|
||||
return function indicesCreateDataStream (params, options, callback) {
|
||||
@ -46,10 +41,6 @@ function buildIndicesCreateDataStream (opts) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['body'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
|
||||
77
api/api/indices.data_streams_stats.js
Normal file
77
api/api/indices.data_streams_stats.js
Normal file
@ -0,0 +1,77 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesDataStreamsStats (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.data_streams_stats request
|
||||
* Provides statistics on operations happening in a data stream.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html
|
||||
*/
|
||||
return function indicesDataStreamsStats (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if ((name) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_data_stream' + '/' + encodeURIComponent(name) + '/' + '_stats'
|
||||
} else {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_data_stream' + '/' + '_stats'
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesDataStreamsStats
|
||||
@ -12,16 +12,11 @@ function buildIndicesDeleteDataStream (opts) {
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
86
api/api/indices.delete_index_template.js
Normal file
86
api/api/indices.delete_index_template.js
Normal file
@ -0,0 +1,86 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesDeleteIndexTemplate (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'timeout',
|
||||
'master_timeout',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.delete_index_template request
|
||||
* Deletes an index template.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
|
||||
*/
|
||||
return function indicesDeleteIndexTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesDeleteIndexTemplate
|
||||
88
api/api/indices.exists_index_template.js
Normal file
88
api/api/indices.exists_index_template.js
Normal file
@ -0,0 +1,88 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesExistsIndexTemplate (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'flat_settings',
|
||||
'master_timeout',
|
||||
'local',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
flatSettings: 'flat_settings',
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.exists_index_template request
|
||||
* Returns information about whether a particular index template exists.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
|
||||
*/
|
||||
return function indicesExistsIndexTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'HEAD'
|
||||
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesExistsIndexTemplate
|
||||
@ -7,29 +7,24 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesGetDataStreams (opts) {
|
||||
function buildIndicesGetDataStream (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.get_data_streams request
|
||||
* Perform a indices.get_data_stream request
|
||||
* Returns data streams.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html
|
||||
*/
|
||||
return function indicesGetDataStreams (params, options, callback) {
|
||||
return function indicesGetDataStream (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -60,10 +55,10 @@ function buildIndicesGetDataStreams (opts) {
|
||||
|
||||
if ((name) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_data_streams' + '/' + encodeURIComponent(name)
|
||||
path = '/' + '_data_stream' + '/' + encodeURIComponent(name)
|
||||
} else {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_data_streams'
|
||||
path = '/' + '_data_stream'
|
||||
}
|
||||
|
||||
// build request object
|
||||
@ -79,4 +74,4 @@ function buildIndicesGetDataStreams (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesGetDataStreams
|
||||
module.exports = buildIndicesGetDataStream
|
||||
87
api/api/indices.get_index_template.js
Normal file
87
api/api/indices.get_index_template.js
Normal file
@ -0,0 +1,87 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesGetIndexTemplate (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'flat_settings',
|
||||
'master_timeout',
|
||||
'local',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
flatSettings: 'flat_settings',
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.get_index_template request
|
||||
* Returns an index template.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
|
||||
*/
|
||||
return function indicesGetIndexTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if ((name) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
|
||||
} else {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_index_template'
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesGetIndexTemplate
|
||||
91
api/api/indices.put_index_template.js
Normal file
91
api/api/indices.put_index_template.js
Normal file
@ -0,0 +1,91 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesPutIndexTemplate (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'create',
|
||||
'cause',
|
||||
'master_timeout',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.put_index_template request
|
||||
* Creates or updates an index template.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
|
||||
*/
|
||||
return function indicesPutIndexTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['body'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + '_index_template' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesPutIndexTemplate
|
||||
@ -18,6 +18,7 @@ function buildIndicesPutMapping (opts) {
|
||||
'ignore_unavailable',
|
||||
'allow_no_indices',
|
||||
'expand_wildcards',
|
||||
'write_index_only',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
@ -31,6 +32,7 @@ function buildIndicesPutMapping (opts) {
|
||||
ignoreUnavailable: 'ignore_unavailable',
|
||||
allowNoIndices: 'allow_no_indices',
|
||||
expandWildcards: 'expand_wildcards',
|
||||
writeIndexOnly: 'write_index_only',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
85
api/api/indices.resolve_index.js
Normal file
85
api/api/indices.resolve_index.js
Normal file
@ -0,0 +1,85 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesResolveIndex (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'expand_wildcards',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
expandWildcards: 'expand_wildcards',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.resolve_index request
|
||||
* Returns information about any matching indices, aliases, and data streams
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-index-api.html
|
||||
*/
|
||||
return function indicesResolveIndex (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_resolve' + '/' + 'index' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesResolveIndex
|
||||
87
api/api/indices.simulate_index_template.js
Normal file
87
api/api/indices.simulate_index_template.js
Normal file
@ -0,0 +1,87 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesSimulateIndexTemplate (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'create',
|
||||
'cause',
|
||||
'master_timeout',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.simulate_index_template request
|
||||
* Simulate matching the given index name against the index templates in the system
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
|
||||
*/
|
||||
return function indicesSimulateIndexTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_index_template' + '/' + '_simulate_index' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesSimulateIndexTemplate
|
||||
86
api/api/indices.simulate_template.js
Normal file
86
api/api/indices.simulate_template.js
Normal file
@ -0,0 +1,86 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildIndicesSimulateTemplate (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'create',
|
||||
'cause',
|
||||
'master_timeout',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
'source',
|
||||
'filter_path'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a indices.simulate_template request
|
||||
* Simulate resolving the given template name or body
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html
|
||||
*/
|
||||
return function indicesSimulateTemplate (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if ((name) != null) {
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_index_template' + '/' + '_simulate' + '/' + encodeURIComponent(name)
|
||||
} else {
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_index_template' + '/' + '_simulate'
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildIndicesSimulateTemplate
|
||||
@ -12,7 +12,8 @@ function buildMlDeleteDataFrameAnalytics (opts) {
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'force'
|
||||
'force',
|
||||
'timeout'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
@ -12,10 +12,12 @@ function buildMlDeleteExpiredData (opts) {
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
'requests_per_second',
|
||||
'timeout'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
requestsPerSecond: 'requests_per_second'
|
||||
|
||||
}
|
||||
|
||||
@ -43,7 +45,7 @@ function buildMlDeleteExpiredData (opts) {
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, ...querystring } = params
|
||||
var { method, body, jobId, job_id, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
@ -53,8 +55,13 @@ function buildMlDeleteExpiredData (opts) {
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_ml' + '/' + '_delete_expired_data'
|
||||
if ((job_id || jobId) != null) {
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_ml' + '/' + '_delete_expired_data' + '/' + encodeURIComponent(job_id || jobId)
|
||||
} else {
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_ml' + '/' + '_delete_expired_data'
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
|
||||
@ -13,11 +13,13 @@ function buildMlForecast (opts) {
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'duration',
|
||||
'expires_in'
|
||||
'expires_in',
|
||||
'max_model_memory'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
expiresIn: 'expires_in'
|
||||
expiresIn: 'expires_in',
|
||||
maxModelMemory: 'max_model_memory'
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -13,11 +13,12 @@ function buildMlGetCategories (opts) {
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'from',
|
||||
'size'
|
||||
'size',
|
||||
'partition_field_value'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
partitionFieldValue: 'partition_field_value'
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -17,14 +17,15 @@ function buildMlGetTrainedModels (opts) {
|
||||
'decompress_definition',
|
||||
'from',
|
||||
'size',
|
||||
'tags'
|
||||
'tags',
|
||||
'for_export'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
allowNoMatch: 'allow_no_match',
|
||||
includeModelDefinition: 'include_model_definition',
|
||||
decompressDefinition: 'decompress_definition'
|
||||
|
||||
decompressDefinition: 'decompress_definition',
|
||||
forExport: 'for_export'
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
82
api/api/ml.update_data_frame_analytics.js
Normal file
82
api/api/ml.update_data_frame_analytics.js
Normal file
@ -0,0 +1,82 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildMlUpdateDataFrameAnalytics (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a ml.update_data_frame_analytics request
|
||||
* Updates certain properties of a data frame analytics job.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/update-dfanalytics.html
|
||||
*/
|
||||
return function mlUpdateDataFrameAnalytics (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['id'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: id')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['body'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, id, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_ml' + '/' + 'data_frame' + '/' + 'analytics' + '/' + encodeURIComponent(id) + '/' + '_update'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildMlUpdateDataFrameAnalytics
|
||||
@ -22,6 +22,7 @@ function buildMlValidate (opts) {
|
||||
/**
|
||||
* Perform a ml.validate request
|
||||
* Validates an anomaly detection job.
|
||||
* https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html
|
||||
*/
|
||||
return function mlValidate (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
@ -22,6 +22,7 @@ function buildMlValidateDetector (opts) {
|
||||
/**
|
||||
* Perform a ml.validate_detector request
|
||||
* Validates an anomaly detection detector.
|
||||
* https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html
|
||||
*/
|
||||
return function mlValidateDetector (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
83
api/api/searchable_snapshots.clear_cache.js
Normal file
83
api/api/searchable_snapshots.clear_cache.js
Normal file
@ -0,0 +1,83 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildSearchableSnapshotsClearCache (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'ignore_unavailable',
|
||||
'allow_no_indices',
|
||||
'expand_wildcards',
|
||||
'index'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
ignoreUnavailable: 'ignore_unavailable',
|
||||
allowNoIndices: 'allow_no_indices',
|
||||
expandWildcards: 'expand_wildcards'
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a searchable_snapshots.clear_cache request
|
||||
* Clear the cache of searchable snapshots.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-api-clear-cache.html
|
||||
*/
|
||||
return function searchableSnapshotsClearCache (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, index, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if ((index) != null) {
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_searchable_snapshots' + '/' + 'cache' + '/' + 'clear'
|
||||
} else {
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_searchable_snapshots' + '/' + 'cache' + '/' + 'clear'
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildSearchableSnapshotsClearCache
|
||||
94
api/api/searchable_snapshots.mount.js
Normal file
94
api/api/searchable_snapshots.mount.js
Normal file
@ -0,0 +1,94 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildSearchableSnapshotsMount (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'master_timeout',
|
||||
'wait_for_completion'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
waitForCompletion: 'wait_for_completion'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a searchable_snapshots.mount request
|
||||
* Mount a snapshot as a searchable index.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-api-mount-snapshot.html
|
||||
*/
|
||||
return function searchableSnapshotsMount (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['repository'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: repository')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['snapshot'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: snapshot')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['body'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// check required url components
|
||||
if (params['snapshot'] != null && (params['repository'] == null)) {
|
||||
const err = new ConfigurationError('Missing required parameter of the url: repository')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, repository, snapshot, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_snapshot' + '/' + encodeURIComponent(repository) + '/' + encodeURIComponent(snapshot) + '/' + '_mount'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildSearchableSnapshotsMount
|
||||
78
api/api/searchable_snapshots.repository_stats.js
Normal file
78
api/api/searchable_snapshots.repository_stats.js
Normal file
@ -0,0 +1,78 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildSearchableSnapshotsRepositoryStats (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a searchable_snapshots.repository_stats request
|
||||
* Retrieve usage statistics about a snapshot repository.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-repository-stats.html
|
||||
*/
|
||||
return function searchableSnapshotsRepositoryStats (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['repository'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: repository')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, repository, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_snapshot' + '/' + encodeURIComponent(repository) + '/' + '_stats'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildSearchableSnapshotsRepositoryStats
|
||||
77
api/api/searchable_snapshots.stats.js
Normal file
77
api/api/searchable_snapshots.stats.js
Normal file
@ -0,0 +1,77 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildSearchableSnapshotsStats (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a searchable_snapshots.stats request
|
||||
* Retrieve various statistics about searchable snapshots.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/searchable-snapshots-api-stats.html
|
||||
*/
|
||||
return function searchableSnapshotsStats (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, index, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if ((index) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_searchable_snapshots' + '/' + 'stats'
|
||||
} else {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_searchable_snapshots' + '/' + 'stats'
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildSearchableSnapshotsStats
|
||||
78
api/api/security.clear_cached_privileges.js
Normal file
78
api/api/security.clear_cached_privileges.js
Normal file
@ -0,0 +1,78 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildSecurityClearCachedPrivileges (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a security.clear_cached_privileges request
|
||||
* Evicts application privileges from the native application privileges cache.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-privilege-cache.html
|
||||
*/
|
||||
return function securityClearCachedPrivileges (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['application'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: application')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, application, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_security' + '/' + 'privilege' + '/' + encodeURIComponent(application) + '/' + '_clear_cache'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildSecurityClearCachedPrivileges
|
||||
@ -30,7 +30,7 @@ function buildSnapshotCleanupRepository (opts) {
|
||||
/**
|
||||
* Perform a snapshot.cleanup_repository request
|
||||
* Removes stale data from repository.
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/clean-up-snapshot-repo-api.html
|
||||
*/
|
||||
return function snapshotCleanupRepository (params, options, callback) {
|
||||
options = options || {}
|
||||
|
||||
@ -15,6 +15,7 @@ function buildTasksCancel (opts) {
|
||||
'nodes',
|
||||
'actions',
|
||||
'parent_task_id',
|
||||
'wait_for_completion',
|
||||
'pretty',
|
||||
'human',
|
||||
'error_trace',
|
||||
@ -24,6 +25,7 @@ function buildTasksCancel (opts) {
|
||||
|
||||
const snakeCase = {
|
||||
parentTaskId: 'parent_task_id',
|
||||
waitForCompletion: 'wait_for_completion',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
@ -12,11 +12,12 @@ function buildXpackInfo (opts) {
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'categories'
|
||||
'categories',
|
||||
'accept_enterprise'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
acceptEnterprise: 'accept_enterprise'
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
74
api/index.js
74
api/index.js
@ -27,8 +27,14 @@ function ESAPI (opts) {
|
||||
submit: lazyLoad('async_search.submit', opts)
|
||||
},
|
||||
autoscaling: {
|
||||
delete_autoscaling_policy: lazyLoad('autoscaling.delete_autoscaling_policy', opts),
|
||||
deleteAutoscalingPolicy: lazyLoad('autoscaling.delete_autoscaling_policy', opts),
|
||||
get_autoscaling_decision: lazyLoad('autoscaling.get_autoscaling_decision', opts),
|
||||
getAutoscalingDecision: lazyLoad('autoscaling.get_autoscaling_decision', opts)
|
||||
getAutoscalingDecision: lazyLoad('autoscaling.get_autoscaling_decision', opts),
|
||||
get_autoscaling_policy: lazyLoad('autoscaling.get_autoscaling_policy', opts),
|
||||
getAutoscalingPolicy: lazyLoad('autoscaling.get_autoscaling_policy', opts),
|
||||
put_autoscaling_policy: lazyLoad('autoscaling.put_autoscaling_policy', opts),
|
||||
putAutoscalingPolicy: lazyLoad('autoscaling.put_autoscaling_policy', opts)
|
||||
},
|
||||
bulk: lazyLoad('bulk', opts),
|
||||
cat: {
|
||||
@ -96,6 +102,10 @@ function ESAPI (opts) {
|
||||
allocationExplain: lazyLoad('cluster.allocation_explain', opts),
|
||||
delete_component_template: lazyLoad('cluster.delete_component_template', opts),
|
||||
deleteComponentTemplate: lazyLoad('cluster.delete_component_template', opts),
|
||||
delete_voting_config_exclusions: lazyLoad('cluster.delete_voting_config_exclusions', opts),
|
||||
deleteVotingConfigExclusions: lazyLoad('cluster.delete_voting_config_exclusions', opts),
|
||||
exists_component_template: lazyLoad('cluster.exists_component_template', opts),
|
||||
existsComponentTemplate: lazyLoad('cluster.exists_component_template', opts),
|
||||
get_component_template: lazyLoad('cluster.get_component_template', opts),
|
||||
getComponentTemplate: lazyLoad('cluster.get_component_template', opts),
|
||||
get_settings: lazyLoad('cluster.get_settings', opts),
|
||||
@ -103,6 +113,8 @@ function ESAPI (opts) {
|
||||
health: lazyLoad('cluster.health', opts),
|
||||
pending_tasks: lazyLoad('cluster.pending_tasks', opts),
|
||||
pendingTasks: lazyLoad('cluster.pending_tasks', opts),
|
||||
post_voting_config_exclusions: lazyLoad('cluster.post_voting_config_exclusions', opts),
|
||||
postVotingConfigExclusions: lazyLoad('cluster.post_voting_config_exclusions', opts),
|
||||
put_component_template: lazyLoad('cluster.put_component_template', opts),
|
||||
putComponentTemplate: lazyLoad('cluster.put_component_template', opts),
|
||||
put_settings: lazyLoad('cluster.put_settings', opts),
|
||||
@ -115,6 +127,22 @@ function ESAPI (opts) {
|
||||
},
|
||||
count: lazyLoad('count', opts),
|
||||
create: lazyLoad('create', opts),
|
||||
dangling_indices: {
|
||||
delete_dangling_index: lazyLoad('dangling_indices.delete_dangling_index', opts),
|
||||
deleteDanglingIndex: lazyLoad('dangling_indices.delete_dangling_index', opts),
|
||||
import_dangling_index: lazyLoad('dangling_indices.import_dangling_index', opts),
|
||||
importDanglingIndex: lazyLoad('dangling_indices.import_dangling_index', opts),
|
||||
list_dangling_indices: lazyLoad('dangling_indices.list_dangling_indices', opts),
|
||||
listDanglingIndices: lazyLoad('dangling_indices.list_dangling_indices', opts)
|
||||
},
|
||||
danglingIndices: {
|
||||
delete_dangling_index: lazyLoad('dangling_indices.delete_dangling_index', opts),
|
||||
deleteDanglingIndex: lazyLoad('dangling_indices.delete_dangling_index', opts),
|
||||
import_dangling_index: lazyLoad('dangling_indices.import_dangling_index', opts),
|
||||
importDanglingIndex: lazyLoad('dangling_indices.import_dangling_index', opts),
|
||||
list_dangling_indices: lazyLoad('dangling_indices.list_dangling_indices', opts),
|
||||
listDanglingIndices: lazyLoad('dangling_indices.list_dangling_indices', opts)
|
||||
},
|
||||
delete: lazyLoad('delete', opts),
|
||||
delete_by_query: lazyLoad('delete_by_query', opts),
|
||||
deleteByQuery: lazyLoad('delete_by_query', opts),
|
||||
@ -134,6 +162,8 @@ function ESAPI (opts) {
|
||||
stats: lazyLoad('enrich.stats', opts)
|
||||
},
|
||||
eql: {
|
||||
delete: lazyLoad('eql.delete', opts),
|
||||
get: lazyLoad('eql.get', opts),
|
||||
search: lazyLoad('eql.search', opts)
|
||||
},
|
||||
exists: lazyLoad('exists', opts),
|
||||
@ -175,6 +205,8 @@ function ESAPI (opts) {
|
||||
},
|
||||
index: lazyLoad('index', opts),
|
||||
indices: {
|
||||
add_block: lazyLoad('indices.add_block', opts),
|
||||
addBlock: lazyLoad('indices.add_block', opts),
|
||||
analyze: lazyLoad('indices.analyze', opts),
|
||||
clear_cache: lazyLoad('indices.clear_cache', opts),
|
||||
clearCache: lazyLoad('indices.clear_cache', opts),
|
||||
@ -183,16 +215,22 @@ function ESAPI (opts) {
|
||||
create: lazyLoad('indices.create', opts),
|
||||
create_data_stream: lazyLoad('indices.create_data_stream', opts),
|
||||
createDataStream: lazyLoad('indices.create_data_stream', opts),
|
||||
data_streams_stats: lazyLoad('indices.data_streams_stats', opts),
|
||||
dataStreamsStats: lazyLoad('indices.data_streams_stats', opts),
|
||||
delete: lazyLoad('indices.delete', opts),
|
||||
delete_alias: lazyLoad('indices.delete_alias', opts),
|
||||
deleteAlias: lazyLoad('indices.delete_alias', opts),
|
||||
delete_data_stream: lazyLoad('indices.delete_data_stream', opts),
|
||||
deleteDataStream: lazyLoad('indices.delete_data_stream', opts),
|
||||
delete_index_template: lazyLoad('indices.delete_index_template', opts),
|
||||
deleteIndexTemplate: lazyLoad('indices.delete_index_template', opts),
|
||||
delete_template: lazyLoad('indices.delete_template', opts),
|
||||
deleteTemplate: lazyLoad('indices.delete_template', opts),
|
||||
exists: lazyLoad('indices.exists', opts),
|
||||
exists_alias: lazyLoad('indices.exists_alias', opts),
|
||||
existsAlias: lazyLoad('indices.exists_alias', opts),
|
||||
exists_index_template: lazyLoad('indices.exists_index_template', opts),
|
||||
existsIndexTemplate: lazyLoad('indices.exists_index_template', opts),
|
||||
exists_template: lazyLoad('indices.exists_template', opts),
|
||||
existsTemplate: lazyLoad('indices.exists_template', opts),
|
||||
exists_type: lazyLoad('indices.exists_type', opts),
|
||||
@ -205,10 +243,12 @@ function ESAPI (opts) {
|
||||
get: lazyLoad('indices.get', opts),
|
||||
get_alias: lazyLoad('indices.get_alias', opts),
|
||||
getAlias: lazyLoad('indices.get_alias', opts),
|
||||
get_data_streams: lazyLoad('indices.get_data_streams', opts),
|
||||
getDataStreams: lazyLoad('indices.get_data_streams', opts),
|
||||
get_data_stream: lazyLoad('indices.get_data_stream', opts),
|
||||
getDataStream: lazyLoad('indices.get_data_stream', opts),
|
||||
get_field_mapping: lazyLoad('indices.get_field_mapping', opts),
|
||||
getFieldMapping: lazyLoad('indices.get_field_mapping', opts),
|
||||
get_index_template: lazyLoad('indices.get_index_template', opts),
|
||||
getIndexTemplate: lazyLoad('indices.get_index_template', opts),
|
||||
get_mapping: lazyLoad('indices.get_mapping', opts),
|
||||
getMapping: lazyLoad('indices.get_mapping', opts),
|
||||
get_settings: lazyLoad('indices.get_settings', opts),
|
||||
@ -220,6 +260,8 @@ function ESAPI (opts) {
|
||||
open: lazyLoad('indices.open', opts),
|
||||
put_alias: lazyLoad('indices.put_alias', opts),
|
||||
putAlias: lazyLoad('indices.put_alias', opts),
|
||||
put_index_template: lazyLoad('indices.put_index_template', opts),
|
||||
putIndexTemplate: lazyLoad('indices.put_index_template', opts),
|
||||
put_mapping: lazyLoad('indices.put_mapping', opts),
|
||||
putMapping: lazyLoad('indices.put_mapping', opts),
|
||||
put_settings: lazyLoad('indices.put_settings', opts),
|
||||
@ -230,11 +272,17 @@ function ESAPI (opts) {
|
||||
refresh: lazyLoad('indices.refresh', opts),
|
||||
reload_search_analyzers: lazyLoad('indices.reload_search_analyzers', opts),
|
||||
reloadSearchAnalyzers: lazyLoad('indices.reload_search_analyzers', opts),
|
||||
resolve_index: lazyLoad('indices.resolve_index', opts),
|
||||
resolveIndex: lazyLoad('indices.resolve_index', opts),
|
||||
rollover: lazyLoad('indices.rollover', opts),
|
||||
segments: lazyLoad('indices.segments', opts),
|
||||
shard_stores: lazyLoad('indices.shard_stores', opts),
|
||||
shardStores: lazyLoad('indices.shard_stores', opts),
|
||||
shrink: lazyLoad('indices.shrink', opts),
|
||||
simulate_index_template: lazyLoad('indices.simulate_index_template', opts),
|
||||
simulateIndexTemplate: lazyLoad('indices.simulate_index_template', opts),
|
||||
simulate_template: lazyLoad('indices.simulate_template', opts),
|
||||
simulateTemplate: lazyLoad('indices.simulate_template', opts),
|
||||
split: lazyLoad('indices.split', opts),
|
||||
stats: lazyLoad('indices.stats', opts),
|
||||
unfreeze: lazyLoad('indices.unfreeze', opts),
|
||||
@ -378,6 +426,8 @@ function ESAPI (opts) {
|
||||
stopDataFrameAnalytics: lazyLoad('ml.stop_data_frame_analytics', opts),
|
||||
stop_datafeed: lazyLoad('ml.stop_datafeed', opts),
|
||||
stopDatafeed: lazyLoad('ml.stop_datafeed', opts),
|
||||
update_data_frame_analytics: lazyLoad('ml.update_data_frame_analytics', opts),
|
||||
updateDataFrameAnalytics: lazyLoad('ml.update_data_frame_analytics', opts),
|
||||
update_datafeed: lazyLoad('ml.update_datafeed', opts),
|
||||
updateDatafeed: lazyLoad('ml.update_datafeed', opts),
|
||||
update_filter: lazyLoad('ml.update_filter', opts),
|
||||
@ -442,10 +492,28 @@ function ESAPI (opts) {
|
||||
searchShards: lazyLoad('search_shards', opts),
|
||||
search_template: lazyLoad('search_template', opts),
|
||||
searchTemplate: lazyLoad('search_template', opts),
|
||||
searchable_snapshots: {
|
||||
clear_cache: lazyLoad('searchable_snapshots.clear_cache', opts),
|
||||
clearCache: lazyLoad('searchable_snapshots.clear_cache', opts),
|
||||
mount: lazyLoad('searchable_snapshots.mount', opts),
|
||||
repository_stats: lazyLoad('searchable_snapshots.repository_stats', opts),
|
||||
repositoryStats: lazyLoad('searchable_snapshots.repository_stats', opts),
|
||||
stats: lazyLoad('searchable_snapshots.stats', opts)
|
||||
},
|
||||
searchableSnapshots: {
|
||||
clear_cache: lazyLoad('searchable_snapshots.clear_cache', opts),
|
||||
clearCache: lazyLoad('searchable_snapshots.clear_cache', opts),
|
||||
mount: lazyLoad('searchable_snapshots.mount', opts),
|
||||
repository_stats: lazyLoad('searchable_snapshots.repository_stats', opts),
|
||||
repositoryStats: lazyLoad('searchable_snapshots.repository_stats', opts),
|
||||
stats: lazyLoad('searchable_snapshots.stats', opts)
|
||||
},
|
||||
security: {
|
||||
authenticate: lazyLoad('security.authenticate', opts),
|
||||
change_password: lazyLoad('security.change_password', opts),
|
||||
changePassword: lazyLoad('security.change_password', opts),
|
||||
clear_cached_privileges: lazyLoad('security.clear_cached_privileges', opts),
|
||||
clearCachedPrivileges: lazyLoad('security.clear_cached_privileges', opts),
|
||||
clear_cached_realms: lazyLoad('security.clear_cached_realms', opts),
|
||||
clearCachedRealms: lazyLoad('security.clear_cached_realms', opts),
|
||||
clear_cached_roles: lazyLoad('security.clear_cached_roles', opts),
|
||||
|
||||
459
api/kibana.d.ts
vendored
Normal file
459
api/kibana.d.ts
vendored
Normal file
@ -0,0 +1,459 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
/// <reference types="node" />
|
||||
|
||||
import {
|
||||
ClientOptions,
|
||||
ConnectionPool,
|
||||
Serializer,
|
||||
Transport,
|
||||
errors,
|
||||
RequestEvent,
|
||||
ResurrectEvent,
|
||||
ApiError
|
||||
} from '../index'
|
||||
import Helpers from '../lib/Helpers'
|
||||
import {
|
||||
ApiResponse,
|
||||
RequestBody,
|
||||
RequestNDBody,
|
||||
TransportRequestParams,
|
||||
TransportRequestOptions,
|
||||
TransportRequestPromise,
|
||||
Context
|
||||
} from '../lib/Transport'
|
||||
import * as RequestParams from './requestParams'
|
||||
|
||||
// Extend API
|
||||
interface ClientExtendsCallbackOptions {
|
||||
ConfigurationError: errors.ConfigurationError,
|
||||
makeRequest(params: TransportRequestParams, options?: TransportRequestOptions): Promise<void> | void;
|
||||
result: {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
headers: null,
|
||||
warnings: null
|
||||
}
|
||||
}
|
||||
declare type extendsCallback = (options: ClientExtendsCallbackOptions) => any;
|
||||
// /Extend API
|
||||
|
||||
interface KibanaClient {
|
||||
connectionPool: ConnectionPool
|
||||
transport: Transport
|
||||
serializer: Serializer
|
||||
extend(method: string, fn: extendsCallback): void
|
||||
extend(method: string, opts: { force: boolean }, fn: extendsCallback): void;
|
||||
helpers: Helpers
|
||||
child(opts?: ClientOptions): KibanaClient
|
||||
close(): Promise<void>;
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
on(event: 'request', listener: (err: ApiError, meta: RequestEvent) => void): this;
|
||||
on(event: 'response', listener: (err: ApiError, meta: RequestEvent) => void): this;
|
||||
on(event: 'sniff', listener: (err: ApiError, meta: RequestEvent) => void): this;
|
||||
on(event: 'resurrect', listener: (err: null, meta: ResurrectEvent) => void): this;
|
||||
once(event: 'request', listener: (err: ApiError, meta: RequestEvent) => void): this;
|
||||
once(event: 'response', listener: (err: ApiError, meta: RequestEvent) => void): this;
|
||||
once(event: 'sniff', listener: (err: ApiError, meta: RequestEvent) => void): this;
|
||||
once(event: 'resurrect', listener: (err: null, meta: ResurrectEvent) => void): this;
|
||||
off(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
/* GENERATED */
|
||||
asyncSearch: {
|
||||
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchDelete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchGet, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
submit<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.AsyncSearchSubmit<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
autoscaling: {
|
||||
deleteAutoscalingPolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingDeleteAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getAutoscalingDecision<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingDecision, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getAutoscalingPolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingGetAutoscalingPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putAutoscalingPolicy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.AutoscalingPutAutoscalingPolicy<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
bulk<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.Bulk<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
cat: {
|
||||
aliases<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatAliases, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
allocation<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatAllocation, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
count<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatCount, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
fielddata<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatFielddata, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
health<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatHealth, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
help<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatHelp, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
indices<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatIndices, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
master<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatMaster, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
mlDataFrameAnalytics<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatMlDataFrameAnalytics, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
mlDatafeeds<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatMlDatafeeds, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
mlJobs<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatMlJobs, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
mlTrainedModels<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatMlTrainedModels, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
nodeattrs<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatNodeattrs, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
nodes<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatNodes, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
pendingTasks<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatPendingTasks, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
plugins<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatPlugins, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
recovery<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatRecovery, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
repositories<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatRepositories, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
segments<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatSegments, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
shards<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatShards, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
snapshots<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatSnapshots, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
tasks<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatTasks, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
templates<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatTemplates, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
threadPool<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatThreadPool, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
transforms<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CatTransforms, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
ccr: {
|
||||
deleteAutoFollowPattern<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrDeleteAutoFollowPattern, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
follow<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.CcrFollow<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
followInfo<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrFollowInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
followStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrFollowStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
forgetFollower<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.CcrForgetFollower<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getAutoFollowPattern<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrGetAutoFollowPattern, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
pauseAutoFollowPattern<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrPauseAutoFollowPattern, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
pauseFollow<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrPauseFollow, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putAutoFollowPattern<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.CcrPutAutoFollowPattern<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
resumeAutoFollowPattern<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrResumeAutoFollowPattern, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
resumeFollow<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.CcrResumeFollow<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
unfollow<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.CcrUnfollow, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
clearScroll<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.ClearScroll<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
cluster: {
|
||||
allocationExplain<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterAllocationExplain<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteComponentTemplate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterDeleteComponentTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteVotingConfigExclusions<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterDeleteVotingConfigExclusions, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
existsComponentTemplate<TResponse = boolean, TContext = Context>(params?: RequestParams.ClusterExistsComponentTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getComponentTemplate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterGetComponentTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getSettings<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterGetSettings, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
health<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterHealth, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
pendingTasks<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterPendingTasks, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
postVotingConfigExclusions<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterPostVotingConfigExclusions, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putComponentTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterPutComponentTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putSettings<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterPutSettings<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
remoteInfo<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterRemoteInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
reroute<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterReroute<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
state<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterState, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ClusterStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
count<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Count<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
create<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Create<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
danglingIndices: {
|
||||
deleteDanglingIndex<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.DanglingIndicesDeleteDanglingIndex, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
importDanglingIndex<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.DanglingIndicesImportDanglingIndex, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
listDanglingIndices<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.DanglingIndicesListDanglingIndices, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.Delete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteByQuery<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.DeleteByQuery<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteByQueryRethrottle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.DeleteByQueryRethrottle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteScript<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.DeleteScript, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
enrich: {
|
||||
deletePolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.EnrichDeletePolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
executePolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.EnrichExecutePolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getPolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.EnrichGetPolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putPolicy<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.EnrichPutPolicy<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.EnrichStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
eql: {
|
||||
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.EqlDelete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.EqlGet, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.EqlSearch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
exists<TResponse = boolean, TContext = Context>(params?: RequestParams.Exists, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
existsSource<TResponse = boolean, TContext = Context>(params?: RequestParams.ExistsSource, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
explain<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Explain<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
fieldCaps<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.FieldCaps<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.Get, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getScript<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.GetScript, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getScriptContext<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.GetScriptContext, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getScriptLanguages<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.GetScriptLanguages, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getSource<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.GetSource, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
graph: {
|
||||
explore<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.GraphExplore<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
ilm: {
|
||||
deleteLifecycle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmDeleteLifecycle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
explainLifecycle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmExplainLifecycle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getLifecycle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmGetLifecycle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getStatus<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmGetStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
moveToStep<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IlmMoveToStep<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putLifecycle<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IlmPutLifecycle<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
removePolicy<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmRemovePolicy, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
retry<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmRetry, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
start<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmStart, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stop<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IlmStop, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
index<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Index<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
indices: {
|
||||
addBlock<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesAddBlock, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
analyze<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesAnalyze<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clearCache<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesClearCache, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clone<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesClone<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
close<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesClose, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
create<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesCreate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
createDataStream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesCreateDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
dataStreamsStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDataStreamsStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDelete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteAlias<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDeleteAlias, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteDataStream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDeleteDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteIndexTemplate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDeleteIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteTemplate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDeleteTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
exists<TResponse = boolean, TContext = Context>(params?: RequestParams.IndicesExists, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
existsAlias<TResponse = boolean, TContext = Context>(params?: RequestParams.IndicesExistsAlias, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
existsIndexTemplate<TResponse = boolean, TContext = Context>(params?: RequestParams.IndicesExistsIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
existsTemplate<TResponse = boolean, TContext = Context>(params?: RequestParams.IndicesExistsTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
existsType<TResponse = boolean, TContext = Context>(params?: RequestParams.IndicesExistsType, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
flush<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesFlush, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
flushSynced<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesFlushSynced, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
forcemerge<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesForcemerge, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
freeze<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesFreeze, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGet, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getAlias<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetAlias, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getDataStream<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetDataStream, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getFieldMapping<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetFieldMapping, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getIndexTemplate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetIndexTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getMapping<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetMapping, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getSettings<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetSettings, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getTemplate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetTemplate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getUpgrade<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesGetUpgrade, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
open<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesOpen, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putAlias<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutAlias<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutIndexTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putMapping<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutMapping<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putSettings<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutSettings<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesPutTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
recovery<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesRecovery, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
refresh<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesRefresh, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
reloadSearchAnalyzers<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesReloadSearchAnalyzers, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
resolveIndex<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesResolveIndex, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
rollover<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesRollover<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
segments<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesSegments, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
shardStores<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesShardStores, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
shrink<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesShrink<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
simulateIndexTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesSimulateIndexTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
simulateTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesSimulateTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
split<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesSplit<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
unfreeze<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesUnfreeze, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateAliases<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesUpdateAliases<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
upgrade<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesUpgrade, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
validateQuery<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesValidateQuery<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
info<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.Info, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
ingest: {
|
||||
deletePipeline<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IngestDeletePipeline, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getPipeline<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IngestGetPipeline, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
processorGrok<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IngestProcessorGrok, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putPipeline<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IngestPutPipeline<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
simulate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.IngestSimulate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
license: {
|
||||
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.LicenseDelete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.LicenseGet, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getBasicStatus<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.LicenseGetBasicStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getTrialStatus<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.LicenseGetTrialStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
post<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.LicensePost<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
postStartBasic<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.LicensePostStartBasic, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
postStartTrial<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.LicensePostStartTrial, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
mget<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Mget<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
migration: {
|
||||
deprecations<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MigrationDeprecations, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
ml: {
|
||||
closeJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlCloseJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteCalendar<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteCalendar, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteCalendarEvent<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteCalendarEvent, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteCalendarJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteCalendarJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteDataFrameAnalytics<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteDataFrameAnalytics, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteDatafeed<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteDatafeed, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteExpiredData<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteExpiredData<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteFilter<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteFilter, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteForecast<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteForecast, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteModelSnapshot<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteModelSnapshot, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteTrainedModel<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlDeleteTrainedModel, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
estimateModelMemory<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlEstimateModelMemory<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
evaluateDataFrame<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlEvaluateDataFrame<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
explainDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlExplainDataFrameAnalytics<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
findFileStructure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.MlFindFileStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
flushJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlFlushJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
forecast<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlForecast, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getBuckets<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetBuckets<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getCalendarEvents<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetCalendarEvents, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getCalendars<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetCalendars<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getCategories<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetCategories<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getDataFrameAnalytics<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetDataFrameAnalytics, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getDataFrameAnalyticsStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetDataFrameAnalyticsStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getDatafeedStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetDatafeedStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getDatafeeds<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetDatafeeds, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getFilters<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetFilters, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getInfluencers<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetInfluencers<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getJobStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetJobStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getJobs<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetJobs, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getModelSnapshots<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetModelSnapshots<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getOverallBuckets<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetOverallBuckets<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getRecords<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetRecords<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getTrainedModels<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetTrainedModels, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getTrainedModelsStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetTrainedModelsStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
info<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
openJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlOpenJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
postCalendarEvents<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPostCalendarEvents<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
postData<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPostData<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
previewDatafeed<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlPreviewDatafeed, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putCalendar<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPutCalendar<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putCalendarJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlPutCalendarJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPutDataFrameAnalytics<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putDatafeed<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPutDatafeed<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putFilter<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPutFilter<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPutJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putTrainedModel<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlPutTrainedModel<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
revertModelSnapshot<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlRevertModelSnapshot<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
setUpgradeMode<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlSetUpgradeMode, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
startDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlStartDataFrameAnalytics<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
startDatafeed<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlStartDatafeed<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stopDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlStopDataFrameAnalytics<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stopDatafeed<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlStopDatafeed, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateDataFrameAnalytics<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateDatafeed<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateDatafeed<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateFilter<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateFilter<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateModelSnapshot<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlUpdateModelSnapshot<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
validate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlValidate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
validateDetector<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlValidateDetector<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
monitoring: {
|
||||
bulk<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.MonitoringBulk<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
msearch<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.Msearch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
msearchTemplate<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.MsearchTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
mtermvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Mtermvectors<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
nodes: {
|
||||
hotThreads<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesHotThreads, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
info<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
reloadSecureSettings<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.NodesReloadSecureSettings<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
usage<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesUsage, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
ping<TResponse = boolean, TContext = Context>(params?: RequestParams.Ping, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putScript<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.PutScript<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
rankEval<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RankEval<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
reindex<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Reindex<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
reindexRethrottle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.ReindexRethrottle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
renderSearchTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RenderSearchTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
rollup: {
|
||||
deleteJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupDeleteJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getJobs<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupGetJobs, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getRollupCaps<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupGetRollupCaps, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getRollupIndexCaps<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupGetRollupIndexCaps, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RollupPutJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
rollupSearch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.RollupRollupSearch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
startJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupStartJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stopJob<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.RollupStopJob, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
scriptsPainlessExecute<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.ScriptsPainlessExecute<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
scroll<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Scroll<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Search<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
searchShards<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SearchShards, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
searchTemplate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SearchTemplate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
searchableSnapshots: {
|
||||
clearCache<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SearchableSnapshotsClearCache, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
mount<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SearchableSnapshotsMount<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
repositoryStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SearchableSnapshotsRepositoryStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SearchableSnapshotsStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
security: {
|
||||
authenticate<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityAuthenticate, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
changePassword<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityChangePassword<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clearCachedPrivileges<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityClearCachedPrivileges, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clearCachedRealms<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityClearCachedRealms, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clearCachedRoles<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityClearCachedRoles, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
createApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityCreateApiKey<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deletePrivileges<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityDeletePrivileges, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteRole<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityDeleteRole, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteRoleMapping<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityDeleteRoleMapping, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteUser<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityDeleteUser, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
disableUser<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityDisableUser, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
enableUser<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityEnableUser, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getApiKey<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetApiKey, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getBuiltinPrivileges<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetBuiltinPrivileges, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getPrivileges<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetPrivileges, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getRole<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetRole, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getRoleMapping<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetRoleMapping, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getToken<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetToken<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getUser<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetUser, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getUserPrivileges<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetUserPrivileges, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
hasPrivileges<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityHasPrivileges<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
invalidateApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityInvalidateApiKey<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
invalidateToken<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityInvalidateToken<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putPrivileges<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityPutPrivileges<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putRole<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityPutRole<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putRoleMapping<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityPutRoleMapping<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putUser<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityPutUser<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
slm: {
|
||||
deleteLifecycle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmDeleteLifecycle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
executeLifecycle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmExecuteLifecycle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
executeRetention<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmExecuteRetention, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getLifecycle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmGetLifecycle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmGetStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getStatus<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmGetStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putLifecycle<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SlmPutLifecycle<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
start<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmStart, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stop<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SlmStop, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
snapshot: {
|
||||
cleanupRepository<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotCleanupRepository, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
create<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotCreate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
createRepository<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotCreateRepository<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotDelete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteRepository<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotDeleteRepository, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotGet, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getRepository<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotGetRepository, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
restore<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotRestore<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
status<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotStatus, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
verifyRepository<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotVerifyRepository, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
sql: {
|
||||
clearCursor<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SqlClearCursor<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
query<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SqlQuery<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
translate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SqlTranslate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
ssl: {
|
||||
certificates<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SslCertificates, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
tasks: {
|
||||
cancel<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TasksCancel, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TasksGet, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
list<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TasksList, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
termvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Termvectors<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
transform: {
|
||||
deleteTransform<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformDeleteTransform, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getTransform<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformGetTransform, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getTransformStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformGetTransformStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
previewTransform<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TransformPreviewTransform<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putTransform<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TransformPutTransform<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
startTransform<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformStartTransform, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stopTransform<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformStopTransform, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateTransform<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TransformUpdateTransform<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
update<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Update<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateByQuery<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.UpdateByQuery<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
updateByQueryRethrottle<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.UpdateByQueryRethrottle, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
watcher: {
|
||||
ackWatch<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherAckWatch, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
activateWatch<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherActivateWatch, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deactivateWatch<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherDeactivateWatch, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
deleteWatch<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherDeleteWatch, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
executeWatch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherExecuteWatch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getWatch<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherGetWatch, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
putWatch<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherPutWatch<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
start<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherStart, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
stop<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.WatcherStop, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
xpack: {
|
||||
info<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.XpackInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
usage<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.XpackUsage, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
}
|
||||
/* /GENERATED */
|
||||
}
|
||||
|
||||
export { KibanaClient }
|
||||
224
api/requestParams.d.ts
vendored
224
api/requestParams.d.ts
vendored
@ -6,7 +6,6 @@ import { RequestBody, RequestNDBody } from '../lib/Transport'
|
||||
|
||||
export interface Generic {
|
||||
method?: string;
|
||||
ignore?: number | number[];
|
||||
filter_path?: string | string[];
|
||||
pretty?: boolean;
|
||||
human?: boolean;
|
||||
@ -20,7 +19,7 @@ export interface Bulk<T = RequestNDBody> extends Generic {
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
wait_for_active_shards?: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
routing?: string;
|
||||
timeout?: string;
|
||||
_source?: string | string[];
|
||||
@ -269,6 +268,16 @@ export interface ClusterDeleteComponentTemplate extends Generic {
|
||||
master_timeout?: string;
|
||||
}
|
||||
|
||||
export interface ClusterDeleteVotingConfigExclusions extends Generic {
|
||||
wait_for_removal?: boolean;
|
||||
}
|
||||
|
||||
export interface ClusterExistsComponentTemplate extends Generic {
|
||||
name: string;
|
||||
master_timeout?: string;
|
||||
local?: boolean;
|
||||
}
|
||||
|
||||
export interface ClusterGetComponentTemplate extends Generic {
|
||||
name?: string | string[];
|
||||
master_timeout?: string;
|
||||
@ -302,6 +311,12 @@ export interface ClusterPendingTasks extends Generic {
|
||||
master_timeout?: string;
|
||||
}
|
||||
|
||||
export interface ClusterPostVotingConfigExclusions extends Generic {
|
||||
node_ids?: string;
|
||||
node_names?: string;
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export interface ClusterPutComponentTemplate<T = RequestBody> extends Generic {
|
||||
name: string;
|
||||
create?: boolean;
|
||||
@ -374,7 +389,7 @@ export interface Create<T = RequestBody> extends Generic {
|
||||
index: string;
|
||||
type?: string;
|
||||
wait_for_active_shards?: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
routing?: string;
|
||||
timeout?: string;
|
||||
version?: number;
|
||||
@ -383,12 +398,29 @@ export interface Create<T = RequestBody> extends Generic {
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface DanglingIndicesDeleteDanglingIndex extends Generic {
|
||||
index_uuid: string;
|
||||
accept_data_loss?: boolean;
|
||||
timeout?: string;
|
||||
master_timeout?: string;
|
||||
}
|
||||
|
||||
export interface DanglingIndicesImportDanglingIndex extends Generic {
|
||||
index_uuid: string;
|
||||
accept_data_loss?: boolean;
|
||||
timeout?: string;
|
||||
master_timeout?: string;
|
||||
}
|
||||
|
||||
export interface DanglingIndicesListDanglingIndices extends Generic {
|
||||
}
|
||||
|
||||
export interface Delete extends Generic {
|
||||
id: string;
|
||||
index: string;
|
||||
type?: string;
|
||||
wait_for_active_shards?: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
routing?: string;
|
||||
timeout?: string;
|
||||
if_seq_no?: number;
|
||||
@ -505,13 +537,14 @@ export interface Explain<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface FieldCaps extends Generic {
|
||||
export interface FieldCaps<T = RequestBody> extends Generic {
|
||||
index?: string | string[];
|
||||
fields?: string | string[];
|
||||
ignore_unavailable?: boolean;
|
||||
allow_no_indices?: boolean;
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
include_unmapped?: boolean;
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface Get extends Generic {
|
||||
@ -566,7 +599,7 @@ export interface Index<T = RequestBody> extends Generic {
|
||||
type?: string;
|
||||
wait_for_active_shards?: string;
|
||||
op_type?: 'index' | 'create';
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
routing?: string;
|
||||
timeout?: string;
|
||||
version?: number;
|
||||
@ -577,6 +610,16 @@ export interface Index<T = RequestBody> extends Generic {
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface IndicesAddBlock extends Generic {
|
||||
index: string | string[];
|
||||
block: string;
|
||||
timeout?: string;
|
||||
master_timeout?: string;
|
||||
ignore_unavailable?: boolean;
|
||||
allow_no_indices?: boolean;
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
}
|
||||
|
||||
export interface IndicesAnalyze<T = RequestBody> extends Generic {
|
||||
index?: string;
|
||||
body?: T;
|
||||
@ -621,11 +664,6 @@ export interface IndicesCreate<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface IndicesCreateDataStream<T = RequestBody> extends Generic {
|
||||
name: string;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface IndicesDelete extends Generic {
|
||||
index: string | string[];
|
||||
timeout?: string;
|
||||
@ -642,8 +680,10 @@ export interface IndicesDeleteAlias extends Generic {
|
||||
master_timeout?: string;
|
||||
}
|
||||
|
||||
export interface IndicesDeleteDataStream extends Generic {
|
||||
export interface IndicesDeleteIndexTemplate extends Generic {
|
||||
name: string;
|
||||
timeout?: string;
|
||||
master_timeout?: string;
|
||||
}
|
||||
|
||||
export interface IndicesDeleteTemplate extends Generic {
|
||||
@ -671,6 +711,13 @@ export interface IndicesExistsAlias extends Generic {
|
||||
local?: boolean;
|
||||
}
|
||||
|
||||
export interface IndicesExistsIndexTemplate extends Generic {
|
||||
name: string;
|
||||
flat_settings?: boolean;
|
||||
master_timeout?: string;
|
||||
local?: boolean;
|
||||
}
|
||||
|
||||
export interface IndicesExistsTemplate extends Generic {
|
||||
name: string | string[];
|
||||
flat_settings?: boolean;
|
||||
@ -734,10 +781,6 @@ export interface IndicesGetAlias extends Generic {
|
||||
local?: boolean;
|
||||
}
|
||||
|
||||
export interface IndicesGetDataStreams extends Generic {
|
||||
name?: string | string[];
|
||||
}
|
||||
|
||||
export interface IndicesGetFieldMapping extends Generic {
|
||||
fields: string | string[];
|
||||
index?: string | string[];
|
||||
@ -750,6 +793,13 @@ export interface IndicesGetFieldMapping extends Generic {
|
||||
local?: boolean;
|
||||
}
|
||||
|
||||
export interface IndicesGetIndexTemplate extends Generic {
|
||||
name?: string | string[];
|
||||
flat_settings?: boolean;
|
||||
master_timeout?: string;
|
||||
local?: boolean;
|
||||
}
|
||||
|
||||
export interface IndicesGetMapping extends Generic {
|
||||
index?: string | string[];
|
||||
type?: string | string[];
|
||||
@ -806,6 +856,14 @@ export interface IndicesPutAlias<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface IndicesPutIndexTemplate<T = RequestBody> extends Generic {
|
||||
name: string;
|
||||
create?: boolean;
|
||||
cause?: string;
|
||||
master_timeout?: string;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface IndicesPutMapping<T = RequestBody> extends Generic {
|
||||
index?: string | string[];
|
||||
type?: string;
|
||||
@ -815,6 +873,7 @@ export interface IndicesPutMapping<T = RequestBody> extends Generic {
|
||||
ignore_unavailable?: boolean;
|
||||
allow_no_indices?: boolean;
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
write_index_only?: boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
@ -852,6 +911,11 @@ export interface IndicesRefresh extends Generic {
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
}
|
||||
|
||||
export interface IndicesResolveIndex extends Generic {
|
||||
name: string | string[];
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
}
|
||||
|
||||
export interface IndicesRollover<T = RequestBody> extends Generic {
|
||||
alias: string;
|
||||
new_index?: string;
|
||||
@ -889,6 +953,22 @@ export interface IndicesShrink<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface IndicesSimulateIndexTemplate<T = RequestBody> extends Generic {
|
||||
name: string;
|
||||
create?: boolean;
|
||||
cause?: string;
|
||||
master_timeout?: string;
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface IndicesSimulateTemplate<T = RequestBody> extends Generic {
|
||||
name?: string;
|
||||
create?: boolean;
|
||||
cause?: string;
|
||||
master_timeout?: string;
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface IndicesSplit<T = RequestBody> extends Generic {
|
||||
index: string;
|
||||
target: string;
|
||||
@ -1284,6 +1364,7 @@ export interface TasksCancel extends Generic {
|
||||
nodes?: string | string[];
|
||||
actions?: string | string[];
|
||||
parent_task_id?: string;
|
||||
wait_for_completion?: boolean;
|
||||
}
|
||||
|
||||
export interface TasksGet extends Generic {
|
||||
@ -1331,7 +1412,7 @@ export interface Update<T = RequestBody> extends Generic {
|
||||
_source_excludes?: string | string[];
|
||||
_source_includes?: string | string[];
|
||||
lang?: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
retry_on_conflict?: number;
|
||||
routing?: string;
|
||||
timeout?: string;
|
||||
@ -1447,9 +1528,22 @@ export interface AsyncSearchSubmit<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface AutoscalingDeleteAutoscalingPolicy extends Generic {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface AutoscalingGetAutoscalingDecision extends Generic {
|
||||
}
|
||||
|
||||
export interface AutoscalingGetAutoscalingPolicy extends Generic {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface AutoscalingPutAutoscalingPolicy<T = RequestBody> extends Generic {
|
||||
name: string;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface CatMlDataFrameAnalytics extends Generic {
|
||||
id?: string;
|
||||
allow_no_match?: boolean;
|
||||
@ -1589,8 +1683,21 @@ export interface EnrichPutPolicy<T = RequestBody> extends Generic {
|
||||
export interface EnrichStats extends Generic {
|
||||
}
|
||||
|
||||
export interface EqlDelete extends Generic {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface EqlGet extends Generic {
|
||||
id: string;
|
||||
wait_for_completion_timeout?: string;
|
||||
keep_alive?: string;
|
||||
}
|
||||
|
||||
export interface EqlSearch<T = RequestBody> extends Generic {
|
||||
index: string;
|
||||
wait_for_completion_timeout?: string;
|
||||
keep_on_completion?: boolean;
|
||||
keep_alive?: string;
|
||||
body: T;
|
||||
}
|
||||
|
||||
@ -1643,6 +1750,18 @@ export interface IlmStart extends Generic {
|
||||
export interface IlmStop extends Generic {
|
||||
}
|
||||
|
||||
export interface IndicesCreateDataStream extends Generic {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface IndicesDataStreamsStats extends Generic {
|
||||
name?: string | string[];
|
||||
}
|
||||
|
||||
export interface IndicesDeleteDataStream extends Generic {
|
||||
name: string | string[];
|
||||
}
|
||||
|
||||
export interface IndicesFreeze extends Generic {
|
||||
index: string;
|
||||
timeout?: string;
|
||||
@ -1653,6 +1772,10 @@ export interface IndicesFreeze extends Generic {
|
||||
wait_for_active_shards?: string;
|
||||
}
|
||||
|
||||
export interface IndicesGetDataStream extends Generic {
|
||||
name?: string | string[];
|
||||
}
|
||||
|
||||
export interface IndicesReloadSearchAnalyzers extends Generic {
|
||||
index: string | string[];
|
||||
ignore_unavailable?: boolean;
|
||||
@ -1727,6 +1850,7 @@ export interface MlDeleteCalendarJob extends Generic {
|
||||
export interface MlDeleteDataFrameAnalytics extends Generic {
|
||||
id: string;
|
||||
force?: boolean;
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export interface MlDeleteDatafeed extends Generic {
|
||||
@ -1734,7 +1858,11 @@ export interface MlDeleteDatafeed extends Generic {
|
||||
force?: boolean;
|
||||
}
|
||||
|
||||
export interface MlDeleteExpiredData extends Generic {
|
||||
export interface MlDeleteExpiredData<T = RequestBody> extends Generic {
|
||||
job_id?: string;
|
||||
requests_per_second?: number;
|
||||
timeout?: string;
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface MlDeleteFilter extends Generic {
|
||||
@ -1808,6 +1936,7 @@ export interface MlForecast extends Generic {
|
||||
job_id: string;
|
||||
duration?: string;
|
||||
expires_in?: string;
|
||||
max_model_memory?: string;
|
||||
}
|
||||
|
||||
export interface MlGetBuckets<T = RequestBody> extends Generic {
|
||||
@ -1846,6 +1975,7 @@ export interface MlGetCategories<T = RequestBody> extends Generic {
|
||||
category_id?: number;
|
||||
from?: number;
|
||||
size?: number;
|
||||
partition_field_value?: string;
|
||||
body?: T;
|
||||
}
|
||||
|
||||
@ -1947,6 +2077,7 @@ export interface MlGetTrainedModels extends Generic {
|
||||
from?: number;
|
||||
size?: number;
|
||||
tags?: string | string[];
|
||||
for_export?: boolean;
|
||||
}
|
||||
|
||||
export interface MlGetTrainedModelsStats extends Generic {
|
||||
@ -2059,6 +2190,11 @@ export interface MlStopDatafeed extends Generic {
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export interface MlUpdateDataFrameAnalytics<T = RequestBody> extends Generic {
|
||||
id: string;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface MlUpdateDatafeed<T = RequestBody> extends Generic {
|
||||
datafeed_id: string;
|
||||
ignore_unavailable?: boolean;
|
||||
@ -2139,15 +2275,42 @@ export interface RollupStopJob extends Generic {
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export interface SearchableSnapshotsClearCache extends Generic {
|
||||
index?: string | string[];
|
||||
ignore_unavailable?: boolean;
|
||||
allow_no_indices?: boolean;
|
||||
expand_wildcards?: 'open' | 'closed' | 'none' | 'all';
|
||||
}
|
||||
|
||||
export interface SearchableSnapshotsMount<T = RequestBody> extends Generic {
|
||||
repository: string;
|
||||
snapshot: string;
|
||||
master_timeout?: string;
|
||||
wait_for_completion?: boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SearchableSnapshotsRepositoryStats extends Generic {
|
||||
repository: string;
|
||||
}
|
||||
|
||||
export interface SearchableSnapshotsStats extends Generic {
|
||||
index?: string | string[];
|
||||
}
|
||||
|
||||
export interface SecurityAuthenticate extends Generic {
|
||||
}
|
||||
|
||||
export interface SecurityChangePassword<T = RequestBody> extends Generic {
|
||||
username?: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SecurityClearCachedPrivileges extends Generic {
|
||||
application: string | string[];
|
||||
}
|
||||
|
||||
export interface SecurityClearCachedRealms extends Generic {
|
||||
realms: string | string[];
|
||||
usernames?: string | string[];
|
||||
@ -2158,39 +2321,39 @@ export interface SecurityClearCachedRoles extends Generic {
|
||||
}
|
||||
|
||||
export interface SecurityCreateApiKey<T = RequestBody> extends Generic {
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SecurityDeletePrivileges extends Generic {
|
||||
application: string;
|
||||
name: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
}
|
||||
|
||||
export interface SecurityDeleteRole extends Generic {
|
||||
name: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
}
|
||||
|
||||
export interface SecurityDeleteRoleMapping extends Generic {
|
||||
name: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
}
|
||||
|
||||
export interface SecurityDeleteUser extends Generic {
|
||||
username: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
}
|
||||
|
||||
export interface SecurityDisableUser extends Generic {
|
||||
username: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
}
|
||||
|
||||
export interface SecurityEnableUser extends Generic {
|
||||
username: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
}
|
||||
|
||||
export interface SecurityGetApiKey extends Generic {
|
||||
@ -2242,25 +2405,25 @@ export interface SecurityInvalidateToken<T = RequestBody> extends Generic {
|
||||
}
|
||||
|
||||
export interface SecurityPutPrivileges<T = RequestBody> extends Generic {
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SecurityPutRole<T = RequestBody> extends Generic {
|
||||
name: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SecurityPutRoleMapping<T = RequestBody> extends Generic {
|
||||
name: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SecurityPutUser<T = RequestBody> extends Generic {
|
||||
username: string;
|
||||
refresh?: 'true' | 'false' | 'wait_for';
|
||||
refresh?: 'wait_for' | boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
@ -2410,6 +2573,7 @@ export interface WatcherStop extends Generic {
|
||||
|
||||
export interface XpackInfo extends Generic {
|
||||
categories?: string | string[];
|
||||
accept_enterprise?: boolean;
|
||||
}
|
||||
|
||||
export interface XpackUsage extends Generic {
|
||||
|
||||
@ -20,6 +20,8 @@ override this option by configuring them.
|
||||
|
||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
|
||||
behind a load balancer, Elastic Cloud will take care of everything for you.
|
||||
Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here]
|
||||
to know more.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -106,27 +108,22 @@ const client = new Client({
|
||||
|
||||
=== SSL configuration
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, but
|
||||
the certificates used to sign these requests will not verified
|
||||
(`rejectUnauthorized: false`). To turn on certificate verification, you must
|
||||
specify an `ssl` object either in the top level config or in each host config
|
||||
object and set `rejectUnauthorized: true`. The ssl config object can contain
|
||||
many of the same configuration options that
|
||||
https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[tls.connect()]
|
||||
accepts.
|
||||
Without any additional configuration you can specify `https://` node urls, and
|
||||
the certificates used to sign these requests will be verified. To turn off certificate verification, you must specify an `ssl` object in the top level config and set `rejectUnauthorized: false`. The default `ssl` values are the same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
||||
uses.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
ssl: {
|
||||
ca: fs.readFileSync('./cacert.pem'),
|
||||
rejectUnauthorized: true
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
@ -95,6 +95,16 @@ error, such as `statusCode`, `headers`, `body`, and `message`.
|
||||
version, you can specify the host and port in a variety of ways. With the new
|
||||
client, there is only one way to do it, via the `node` parameter.
|
||||
|
||||
* Certificates are verified by default, if you want to disable certificates verification, you should set the `rejectUnauthorized` option to `false` inside the `ssl` configuration:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
ssl: { rejectUnauthorized: false }
|
||||
})
|
||||
----
|
||||
|
||||
* The `plugins` option has been removed. If you want to extend the client now,
|
||||
you should use the `client.extend` API.
|
||||
|
||||
|
||||
@ -1,6 +1,78 @@
|
||||
[[changelog-client]]
|
||||
== Changelog
|
||||
|
||||
=== 7.8.0
|
||||
|
||||
==== Features
|
||||
|
||||
===== Support for Elasticsearch `v7.8`.
|
||||
|
||||
You can find all the API changes https://www.elastic.co/guide/en/elasticsearch/reference/7.8/release-notes-7.8.0.html[here].
|
||||
|
||||
===== Added multi search helper https://github.com/elastic/elasticsearch-js/pull/1186[#1186]
|
||||
|
||||
If you are sending search request at a high rate, this helper might be useful for you.
|
||||
It will use the mutli search API under the hood to batch the requests and improve the overall performances of your application. +
|
||||
The `result` exposes a `documents` property as well, which allows you to access directly the hits sources.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
// promise style API
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body)) // or result.documents
|
||||
.catch(err => console.error(err))
|
||||
|
||||
// callback style API
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'ruby' } } },
|
||||
(err, result) => {
|
||||
if (err) console.error(err)
|
||||
console.log(result.body)) // or result.documents
|
||||
}
|
||||
)
|
||||
----
|
||||
|
||||
===== Added timeout support in bulk and msearch helpers https://github.com/elastic/elasticsearch-js/pull/1206[#1206]
|
||||
|
||||
If there is a slow producer, the bulk helper might send data with a very large period of time, and if the process crashes for any reason, the data would be lost.
|
||||
This pr introduces a `flushInterval` option in the bulk helper to avoid this issue. By default, the bulk helper will flush the data automatically every 30 seconds, unless the threshold has been reached before.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
flushInterval: 30000
|
||||
})
|
||||
----
|
||||
|
||||
The same problem might happen with the multi search helper, where the user is not sending search requests fast enough. A `flushInterval` options has been added as well, with a default value of 500 milliseconds.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
flushInterval: 500
|
||||
})
|
||||
----
|
||||
|
||||
==== Internals
|
||||
|
||||
===== Use filter_path for improving the search helpers performances https://github.com/elastic/elasticsearch-js/pull/1199[#1199]
|
||||
|
||||
From now on, all he search helpers will use the `filter_path` option automatically when needed to retrieve only the hits source. This change will result in less netwprk traffic and improved deserialization performances.
|
||||
|
||||
===== Search helpers documents getter https://github.com/elastic/elasticsearch-js/pull/1186[#1186]
|
||||
|
||||
Before this, the `documents` key that you can access in any search helper was computed as soon as we got the search result from Elasticsearch. With this change the `documents` key is now a getter, which makes this procees lazy, resulting in better performances and lower memory impact.
|
||||
|
||||
|
||||
=== 7.7.1
|
||||
|
||||
==== Fixes
|
||||
|
||||
@ -76,19 +76,19 @@ auth: {
|
||||
_Default:_ `3`
|
||||
|
||||
|`requestTimeout`
|
||||
|`number` - Max request timeout for each request. +
|
||||
|`number` - Max request timeout in milliseconds for each request. +
|
||||
_Default:_ `30000`
|
||||
|
||||
|`pingTimeout`
|
||||
|`number` - Max ping request timeout for each request. +
|
||||
|`number` - Max ping request timeout in milliseconds for each request. +
|
||||
_Default:_ `3000`
|
||||
|
||||
|`sniffInterval`
|
||||
|`number, boolean` - Perform a sniff operation every `n` milliseconds. +
|
||||
|`number, boolean` - Perform a sniff operation every `n` milliseconds. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`sniffOnStart`
|
||||
|`boolean` - Perform a sniff once the client is started. +
|
||||
|`boolean` - Perform a sniff once the client is started. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`sniffEndpoint`
|
||||
@ -96,7 +96,7 @@ _Default:_ `false`
|
||||
_Default:_ `'_nodes/_all/http'`
|
||||
|
||||
|`sniffOnConnectionFault`
|
||||
|`boolean` - Perform a sniff on connection fault. +
|
||||
|`boolean` - Perform a sniff on connection fault. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`resurrectStrategy`
|
||||
@ -119,7 +119,8 @@ _Default:_ `null`
|
||||
|
||||
|`agent`
|
||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options],
|
||||
or a function that returns an actual http agent instance. +
|
||||
or a function that returns an actual http agent instance. If you want to disable the http agent use entirely
|
||||
(and disable the `keep-alive` feature), set the agent to `false`. +
|
||||
_Default:_ `null`
|
||||
[source,js]
|
||||
----
|
||||
@ -132,6 +133,12 @@ const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent: () => new CustomAgent()
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// Disable agent and keep-alive
|
||||
agent: false
|
||||
})
|
||||
----
|
||||
|
||||
|`nodeFilter`
|
||||
@ -178,7 +185,7 @@ function generateRequestId (params, options) {
|
||||
----
|
||||
|
||||
|`name`
|
||||
|`string` - The name to identify the client instance in the events. +
|
||||
|`string | symbol` - The name to identify the client instance in the events. +
|
||||
_Default:_ `elasticsearch-js`
|
||||
|
||||
|`opaqueIdPrefix`
|
||||
@ -190,6 +197,11 @@ _Default:_ `null`
|
||||
|`object` - A set of custom headers to send in every request. +
|
||||
_Default:_ `{}`
|
||||
|
||||
|`context`
|
||||
|`object` - A custom object that you can use for observability in yoru events.
|
||||
It will be merged with the API level context option. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`cloud`
|
||||
a|`object` - Custom configuration for connecting to
|
||||
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
|
||||
@ -7,6 +7,9 @@ CAUTION: The client helpers are experimental, and the API may change in the next
|
||||
The helpers will not work in any Node.js version lower than 10.
|
||||
|
||||
=== Bulk Helper
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
Running Bulk requests can be complex due to the shape of the API, this helper aims to provide a nicer developer experience around the Bulk API.
|
||||
|
||||
==== Usage
|
||||
@ -93,6 +96,16 @@ const b = client.helpers.bulk({
|
||||
})
|
||||
----
|
||||
|
||||
|`flushInterval`
|
||||
a|How much time (in milliseconds) the helper will wait before flushing the body from the last document read. +
|
||||
_Default:_ `30000`
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
flushInterval: 30000
|
||||
})
|
||||
----
|
||||
|
||||
|`concurrency`
|
||||
a|How many request will be executed at the same time. +
|
||||
_Default:_ `5`
|
||||
@ -114,7 +127,7 @@ const b = client.helpers.bulk({
|
||||
----
|
||||
|
||||
|`wait`
|
||||
a|How much time to wait before retries in milliseconds.+
|
||||
a|How much time to wait before retries in milliseconds. +
|
||||
_Default:_ 5000.
|
||||
[source,js]
|
||||
----
|
||||
@ -137,6 +150,64 @@ const b = client.helpers.bulk({
|
||||
|
||||
|===
|
||||
|
||||
==== Supported operations
|
||||
|
||||
===== Index
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
===== Create
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
create: { _index: 'my-index', _id: doc.id }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
===== Update
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
// Note that the update operation requires you to return
|
||||
// an array, where the first element is the actio, while
|
||||
// the second are the document option
|
||||
return [
|
||||
{ update: { _index: 'my-index', _id: doc.id } },
|
||||
{ doc_as_upsert: true }
|
||||
]
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
===== Delete
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: { _index: 'my-index', _id: doc.id }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
==== Abort a bulk operation
|
||||
If needed, you can abort a bulk operation at any time. The bulk helper returns a https://promisesaplus.com/[thenable], which has an `abort` method.
|
||||
|
||||
@ -211,8 +282,135 @@ const result = await client.helpers.bulk({
|
||||
console.log(result)
|
||||
----
|
||||
|
||||
=== Multi Search Helper
|
||||
|
||||
~Added~ ~in~ ~`v7.8.0`~
|
||||
|
||||
If you are sending search request at a high rate, this helper might be useful for you.
|
||||
It will use the mutli search API under the hood to batch the requests and improve the overall performances of your application. +
|
||||
The `result` exposes a `documents` property as well, which allows you to access directly the hits sources.
|
||||
|
||||
==== Usage
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
// promise style API
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body)) // or result.documents
|
||||
.catch(err => console.error(err))
|
||||
|
||||
// callback style API
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'ruby' } } },
|
||||
(err, result) => {
|
||||
if (err) console.error(err)
|
||||
console.log(result.body)) // or result.documents
|
||||
}
|
||||
)
|
||||
----
|
||||
|
||||
To create a new instance of the Msearch helper, you should access it as shown in the example above, the configuration options are:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`operations`
|
||||
a|How many search operations should be sent in a single msearch request. +
|
||||
_Default:_ `5`
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
operations: 10
|
||||
})
|
||||
----
|
||||
|
||||
|`flushInterval`
|
||||
a|How much time (in milliseconds) the helper will wait before flushing the operations from the last operation read. +
|
||||
_Default:_ `500`
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
flushInterval: 500
|
||||
})
|
||||
----
|
||||
|
||||
|`concurrency`
|
||||
a|How many request will be executed at the same time. +
|
||||
_Default:_ `5`
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
concurrency: 10
|
||||
})
|
||||
----
|
||||
|
||||
|`retries`
|
||||
a|How many times an operation will be retried before to resolve the request. An operation will be retried only in case of a 429 error. +
|
||||
_Default:_ Client max retries.
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
retries: 3
|
||||
})
|
||||
----
|
||||
|
||||
|`wait`
|
||||
a|How much time to wait before retries in milliseconds. +
|
||||
_Default:_ 5000.
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
wait: 3000
|
||||
})
|
||||
----
|
||||
|
||||
|===
|
||||
|
||||
==== Stopping the Msearch Helper
|
||||
If needed, you can stop a msearch processor at any time. The msearch helper returns a https://promisesaplus.com/[thenable], which has an `stop` method.
|
||||
|
||||
If you are creating multiple msearch helpers instances and using them for a limitied period of time, remember to always use the `stop` method once you have finished using them, otherwise your application will start leaking memory.
|
||||
|
||||
The `stop` method accepts an optional error, that will be dispatched every subsequent search request.
|
||||
|
||||
NOTE: The stop method will stop the execution of the msearch processor, but if you are using a concurrency higher than one, the operations that are already running will not be stopped.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'ruby' } } }
|
||||
)
|
||||
.then(result => console.log(result.body))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
setImmediate(() => m.stop())
|
||||
----
|
||||
|
||||
=== Search Helper
|
||||
A simple wrapper around the search API. Instead of returning the entire `result` object it will return only the search documents result.
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
A simple wrapper around the search API. Instead of returning the entire `result` object it will return only the search documents source.
|
||||
For improving the performances, this helper automatically adds `filter_path=hits.hits._source` to the querystring.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -233,12 +431,15 @@ for (const doc of documents) {
|
||||
----
|
||||
|
||||
=== Scroll Search Helper
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
This helpers offers a simple and intuitive way to use the scroll search API. Once called, it returns an https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of[async iterator] which can be used in conjuction with a for-await...of. +
|
||||
It handles automatically the `429` error and uses the client's `maxRetries` option.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const scrollSearch = await client.helpers.scrollSearch({
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'stackoverflow',
|
||||
body: {
|
||||
query: {
|
||||
@ -280,11 +481,14 @@ for await (const result of scrollSearch) {
|
||||
|
||||
=== Scroll Documents Helper
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
It works in the same way as the scroll search helper, but it returns only the documents instead. Note, every loop cycle will return you a single document, and you can't use the `clear` method.
|
||||
For improving the performances, this helper automatically adds `filter_path=hits.hits._source` to the querystring.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const scrollSearch = await client.helpers.scrollDocuments({
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: 'stackoverflow',
|
||||
body: {
|
||||
query: {
|
||||
|
||||
@ -27,12 +27,12 @@ npm install @elastic/elasticsearch
|
||||
|
||||
The minimum supported version of Node.js is `v8`.
|
||||
|
||||
The library is compatible with all {es} versions since 5.x. We recommend you to
|
||||
use the same major version of the client as the {es} instance that you are
|
||||
The library is compatible with all {es} versions since 5.x. We recommend you to
|
||||
use the same major version of the client as the {es} instance that you are
|
||||
using.
|
||||
|
||||
|
||||
[%header,cols=2*]
|
||||
[%header,cols=2*]
|
||||
|===
|
||||
|{es} Version
|
||||
|Client Version
|
||||
@ -59,8 +59,8 @@ npm install @elastic/elasticsearch@<major>
|
||||
|
||||
==== Browser
|
||||
|
||||
WARNING: There is no official support for the browser environment. It exposes
|
||||
your {es} instance to everyone, which could lead to security issues. We
|
||||
WARNING: There is no official support for the browser environment. It exposes
|
||||
your {es} instance to everyone, which could lead to security issues. We
|
||||
recommend you to write a lightweight proxy that uses this client instead.
|
||||
|
||||
|
||||
@ -82,13 +82,21 @@ You can use both the callback API and the promise API, both behave the same way.
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
@ -171,8 +179,8 @@ run().catch(console.log)
|
||||
|
||||
==== Install multiple versions
|
||||
|
||||
If you are using multiple versions of {es}, you need to use multiple versions of
|
||||
the client as well. In the past, installing multiple versions of the same
|
||||
If you are using multiple versions of {es}, you need to use multiple versions of
|
||||
the client as well. In the past, installing multiple versions of the same
|
||||
package was not possible, but with `npm v6.9`, you can do it via aliasing.
|
||||
|
||||
To install different version of the client, run the following command:
|
||||
@ -218,12 +226,12 @@ client7.info(console.log)
|
||||
----
|
||||
|
||||
|
||||
Finally, if you want to install the client for the next version of {es} (the one
|
||||
Finally, if you want to install the client for the next version of {es} (the one
|
||||
that lives in the {es} master branch), use the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install esmaster@github:elastic/elasticsearch-js
|
||||
----
|
||||
WARNING: This command installs the master branch of the client which is not
|
||||
considered stable.
|
||||
WARNING: This command installs the master branch of the client which is not
|
||||
considered stable.
|
||||
|
||||
@ -229,6 +229,44 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
The context object can also be configured as a global option in the client
|
||||
configuration. If you provide both, the two context object will be shallow merged,
|
||||
and the API level object will take precedece.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
context: { winter: 'is coming' }
|
||||
})
|
||||
|
||||
client.on('request', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { context } = result.meta
|
||||
if (err) {
|
||||
console.log({ error: err, reqId: id, context })
|
||||
}
|
||||
})
|
||||
|
||||
client.on('response', (err, result) => {
|
||||
const { id } = result.meta.request
|
||||
const { winter } = result.meta.context
|
||||
if (err) {
|
||||
console.log({ error: err, reqId: id, winter })
|
||||
}
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
context: { winter: 'has come' }
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== Client name
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -10,7 +10,7 @@ to define the API parameters instead of _camelCase_.
|
||||
|
||||
By default event API uses https://www.typescriptlang.org/docs/handbook/generics.html[generics] to specify the requets and response bodies and the `meta.context`. Currently we can't provide those definitions, but we are working to improve this situation.
|
||||
|
||||
You can't fid a partial definition of the request types by importing `RequestParams`, which it is used by default in the client and accepts a body (when needed) as a generic to provide a better specification.
|
||||
You can find a partial definition of the request types by importing `RequestParams`, which is used by default in the client and accepts a body (when needed) as a generic to provide a better specification.
|
||||
|
||||
The body defaults to `RequestBody` and `RequestNDBody`, which are defined as follows:
|
||||
|
||||
|
||||
@ -13,13 +13,21 @@ const client = new Client({ node: 'http://localhost:9200' })
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
@ -52,13 +60,21 @@ options, and the connection that has been used.
|
||||
// promise API
|
||||
const { body } = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, (err, { body }) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
@ -76,7 +92,11 @@ CAUTION: If you abort a request, the request will fail with a `RequestAbortedErr
|
||||
----
|
||||
const request = client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
@ -96,7 +116,11 @@ The same behavior is valid for the promise style API as well.
|
||||
----
|
||||
const request = client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
@ -118,16 +142,24 @@ If needed you can pass request specific options in a second object:
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
})
|
||||
|
||||
// calback API
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
@ -145,7 +177,7 @@ The supported request specific options are:
|
||||
_Default:_ `null`
|
||||
|
||||
|`requestTimeout`
|
||||
|`number` - Max request timeout for the request, it overrides the client default. +
|
||||
|`number` - Max request timeout for the request in milliseconds, it overrides the client default. +
|
||||
_Default:_ `30000`
|
||||
|
||||
|`maxRetries`
|
||||
@ -194,32 +226,64 @@ console.log(errors)
|
||||
|
||||
You can find the errors exported by the client in the table below.
|
||||
|
||||
[cols=2*]
|
||||
[cols=3*]
|
||||
|===
|
||||
|*Error*
|
||||
|*Description*
|
||||
|*Properties*
|
||||
|
||||
|`ElasticsearchClientError`
|
||||
|Every error inherits from this class, it is the basic error generated by the client.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
|
||||
|`TimeoutError`
|
||||
|Generated when a request exceeds the `requestTimeout` option.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`ConnectionError`
|
||||
|Generated when an error occurs during the request, it can be a connection error or a malformed stream of data.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`RequestAbortedError`
|
||||
|Generated if the user calls the `request.abort()` method.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`NoLivingConnectionsError`
|
||||
|Given the configuration, the ConnectionPool was not able to find a usable Connection for this request.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`SerializationError`
|
||||
|Generated if the serialization fails.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `data` - `object`, the object to serialize
|
||||
|
||||
|`DeserializationError`
|
||||
|Generated if the deserialization fails.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `data` - `string`, the string to deserialize
|
||||
|
||||
|`ConfigurationError`
|
||||
|Generated if there is a malformed configuration or parameter.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
|
||||
|`ResponseError`
|
||||
|Generated when in case of a `4xx` or `5xx` response.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
* `body` - `object`, the response body
|
||||
* `statusCode` - `object`, the response headers
|
||||
* `headers` - `object`, the response status code
|
||||
|===
|
||||
|
||||
4569
index.d.ts
vendored
4569
index.d.ts
vendored
File diff suppressed because it is too large
Load Diff
100
index.js
100
index.js
@ -13,7 +13,7 @@ const Transport = require('./lib/Transport')
|
||||
const Connection = require('./lib/Connection')
|
||||
const { ConnectionPool, CloudConnectionPool } = require('./lib/pool')
|
||||
// Helpers works only in Node.js >= 10
|
||||
const Helpers = nodeMajor < 10 ? null : require('./lib/Helpers')
|
||||
const Helpers = nodeMajor < 10 ? /* istanbul ignore next */ null : require('./lib/Helpers')
|
||||
const Serializer = require('./lib/Serializer')
|
||||
const errors = require('./lib/errors')
|
||||
const { ConfigurationError } = errors
|
||||
@ -21,12 +21,12 @@ const { ConfigurationError } = errors
|
||||
const kInitialOptions = Symbol('elasticsearchjs-initial-options')
|
||||
const kChild = Symbol('elasticsearchjs-child')
|
||||
const kExtensions = Symbol('elasticsearchjs-extensions')
|
||||
const kEventEmitter = Symbol('elasticsearchjs-event-emitter')
|
||||
|
||||
const buildApi = require('./api')
|
||||
|
||||
class Client extends EventEmitter {
|
||||
class Client {
|
||||
constructor (opts = {}) {
|
||||
super()
|
||||
if (opts.cloud) {
|
||||
const { id, username, password } = opts.cloud
|
||||
// the cloud id is `cluster-name:base64encodedurl`
|
||||
@ -84,34 +84,39 @@ class Client extends EventEmitter {
|
||||
generateRequestId: null,
|
||||
name: 'elasticsearch-js',
|
||||
auth: null,
|
||||
opaqueIdPrefix: null
|
||||
opaqueIdPrefix: null,
|
||||
context: null
|
||||
}, opts)
|
||||
|
||||
this[kInitialOptions] = options
|
||||
this[kExtensions] = []
|
||||
|
||||
this.name = options.name
|
||||
this.serializer = new options.Serializer()
|
||||
this.connectionPool = new options.ConnectionPool({
|
||||
pingTimeout: options.pingTimeout,
|
||||
resurrectStrategy: options.resurrectStrategy,
|
||||
ssl: options.ssl,
|
||||
agent: options.agent,
|
||||
Connection: options.Connection,
|
||||
auth: options.auth,
|
||||
emit: this.emit.bind(this),
|
||||
sniffEnabled: options.sniffInterval !== false ||
|
||||
options.sniffOnStart !== false ||
|
||||
options.sniffOnConnectionFault !== false
|
||||
})
|
||||
|
||||
// Add the connections before initialize the Transport
|
||||
if (opts[kChild] !== true) {
|
||||
if (opts[kChild] !== undefined) {
|
||||
this.serializer = options[kChild].serializer
|
||||
this.connectionPool = options[kChild].connectionPool
|
||||
this[kEventEmitter] = options[kChild].eventEmitter
|
||||
} else {
|
||||
this[kEventEmitter] = new EventEmitter()
|
||||
this.serializer = new options.Serializer()
|
||||
this.connectionPool = new options.ConnectionPool({
|
||||
pingTimeout: options.pingTimeout,
|
||||
resurrectStrategy: options.resurrectStrategy,
|
||||
ssl: options.ssl,
|
||||
agent: options.agent,
|
||||
Connection: options.Connection,
|
||||
auth: options.auth,
|
||||
emit: this[kEventEmitter].emit.bind(this[kEventEmitter]),
|
||||
sniffEnabled: options.sniffInterval !== false ||
|
||||
options.sniffOnStart !== false ||
|
||||
options.sniffOnConnectionFault !== false
|
||||
})
|
||||
// Add the connections before initialize the Transport
|
||||
this.connectionPool.addConnection(options.node || options.nodes)
|
||||
}
|
||||
|
||||
this.transport = new options.Transport({
|
||||
emit: this.emit.bind(this),
|
||||
emit: this[kEventEmitter].emit.bind(this[kEventEmitter]),
|
||||
connectionPool: this.connectionPool,
|
||||
serializer: this.serializer,
|
||||
maxRetries: options.maxRetries,
|
||||
@ -127,9 +132,11 @@ class Client extends EventEmitter {
|
||||
nodeSelector: options.nodeSelector,
|
||||
generateRequestId: options.generateRequestId,
|
||||
name: options.name,
|
||||
opaqueIdPrefix: options.opaqueIdPrefix
|
||||
opaqueIdPrefix: options.opaqueIdPrefix,
|
||||
context: options.context
|
||||
})
|
||||
|
||||
/* istanbul ignore else */
|
||||
if (Helpers !== null) {
|
||||
this.helpers = new Helpers({ client: this, maxRetries: options.maxRetries })
|
||||
}
|
||||
@ -140,9 +147,26 @@ class Client extends EventEmitter {
|
||||
ConfigurationError
|
||||
})
|
||||
|
||||
Object.keys(apis).forEach(api => {
|
||||
this[api] = apis[api]
|
||||
})
|
||||
const apiNames = Object.keys(apis)
|
||||
for (var i = 0, len = apiNames.length; i < len; i++) {
|
||||
this[apiNames[i]] = apis[apiNames[i]]
|
||||
}
|
||||
}
|
||||
|
||||
get emit () {
|
||||
return this[kEventEmitter].emit.bind(this[kEventEmitter])
|
||||
}
|
||||
|
||||
get on () {
|
||||
return this[kEventEmitter].on.bind(this[kEventEmitter])
|
||||
}
|
||||
|
||||
get once () {
|
||||
return this[kEventEmitter].once.bind(this[kEventEmitter])
|
||||
}
|
||||
|
||||
get off () {
|
||||
return this[kEventEmitter].off.bind(this[kEventEmitter])
|
||||
}
|
||||
|
||||
extend (name, opts, fn) {
|
||||
@ -186,23 +210,20 @@ class Client extends EventEmitter {
|
||||
child (opts) {
|
||||
// Merge the new options with the initial ones
|
||||
const initialOptions = Object.assign({}, this[kInitialOptions], opts)
|
||||
// Tell to the client that we are creating a child client
|
||||
initialOptions[kChild] = true
|
||||
// Pass to the child client the parent instances that cannot be overriden
|
||||
initialOptions[kChild] = {
|
||||
connectionPool: this.connectionPool,
|
||||
serializer: this.serializer,
|
||||
eventEmitter: this[kEventEmitter]
|
||||
}
|
||||
|
||||
const client = new Client(initialOptions)
|
||||
// Reuse the same connection pool
|
||||
client.connectionPool = this.connectionPool
|
||||
client.transport.connectionPool = this.connectionPool
|
||||
// Share event listener
|
||||
const emitter = this.emit.bind(this)
|
||||
client.emit = emitter
|
||||
client.connectionPool.emit = emitter
|
||||
client.transport.emit = emitter
|
||||
client.on = this.on.bind(this)
|
||||
// Add parent extensions
|
||||
this[kExtensions].forEach(({ name, opts, fn }) => {
|
||||
client.extend(name, opts, fn)
|
||||
})
|
||||
if (this[kExtensions].length > 0) {
|
||||
this[kExtensions].forEach(({ name, opts, fn }) => {
|
||||
client.extend(name, opts, fn)
|
||||
})
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
@ -237,6 +258,7 @@ function getAuth (node) {
|
||||
return null
|
||||
|
||||
function getUsernameAndPassword (node) {
|
||||
/* istanbul ignore else */
|
||||
if (typeof node === 'string') {
|
||||
const { username, password } = new URL(node)
|
||||
return {
|
||||
|
||||
10
index.mjs
Normal file
10
index.mjs
Normal file
@ -0,0 +1,10 @@
|
||||
import mod from './index.js'
|
||||
|
||||
export default mod
|
||||
export const Client = mod.Client
|
||||
export const Transport = mod.Transport
|
||||
export const ConnectionPool = mod.ConnectionPool
|
||||
export const Connection = mod.Connection
|
||||
export const Serializer = mod.Serializer
|
||||
export const events = mod.events
|
||||
export const errors = mod.errors
|
||||
@ -20,7 +20,7 @@ const {
|
||||
} = require('./errors')
|
||||
|
||||
class Connection {
|
||||
constructor (opts = {}) {
|
||||
constructor (opts) {
|
||||
this.url = opts.url
|
||||
this.ssl = opts.ssl || null
|
||||
this.id = opts.id || stripAuth(opts.url.href)
|
||||
@ -38,13 +38,15 @@ class Connection {
|
||||
|
||||
if (typeof opts.agent === 'function') {
|
||||
this.agent = opts.agent()
|
||||
} else if (opts.agent === false) {
|
||||
this.agent = undefined
|
||||
} else {
|
||||
const keepAliveFalse = opts.agent && opts.agent.keepAlive === false
|
||||
const agentOptions = Object.assign({}, {
|
||||
keepAlive: true,
|
||||
keepAliveMsecs: 1000,
|
||||
maxSockets: keepAliveFalse ? Infinity : 256,
|
||||
maxFreeSockets: 256
|
||||
maxSockets: 256,
|
||||
maxFreeSockets: 256,
|
||||
scheduling: 'lifo'
|
||||
}, opts.agent)
|
||||
this.agent = this.url.protocol === 'http:'
|
||||
? new http.Agent(agentOptions)
|
||||
@ -64,6 +66,7 @@ class Connection {
|
||||
// https://github.com/nodejs/node/commit/b961d9fd83
|
||||
if (INVALID_PATH_REGEX.test(requestParams.path) === true) {
|
||||
callback(new TypeError(`ERR_UNESCAPED_CHARACTERS: ${requestParams.path}`), null)
|
||||
/* istanbul ignore next */
|
||||
return { abort: () => {} }
|
||||
}
|
||||
|
||||
@ -73,6 +76,7 @@ class Connection {
|
||||
// listen for the response event
|
||||
// TODO: handle redirects?
|
||||
request.on('response', response => {
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
@ -87,6 +91,7 @@ class Connection {
|
||||
|
||||
// handles request timeout
|
||||
request.on('timeout', () => {
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
@ -97,6 +102,7 @@ class Connection {
|
||||
|
||||
// handles request error
|
||||
request.on('error', err => {
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
@ -107,6 +113,7 @@ class Connection {
|
||||
// updates the ended state
|
||||
request.on('abort', () => {
|
||||
debug('Request aborted', params)
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
@ -121,7 +128,7 @@ class Connection {
|
||||
if (isStream(params.body) === true) {
|
||||
pump(params.body, request, err => {
|
||||
/* istanbul ignore if */
|
||||
if (err != null && ended === false) {
|
||||
if (err != null && /* istanbul ignore next */ ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
callback(err, null)
|
||||
@ -140,7 +147,9 @@ class Connection {
|
||||
if (this._openRequests > 0) {
|
||||
setTimeout(() => this.close(callback), 1000)
|
||||
} else {
|
||||
this.agent.destroy()
|
||||
if (this.agent !== undefined) {
|
||||
this.agent.destroy()
|
||||
}
|
||||
callback()
|
||||
}
|
||||
}
|
||||
@ -300,6 +309,7 @@ function resolve (host, path) {
|
||||
|
||||
function prepareHeaders (headers = {}, auth) {
|
||||
if (auth != null && headers.authorization == null) {
|
||||
/* istanbul ignore else */
|
||||
if (auth.apiKey) {
|
||||
if (typeof auth.apiKey === 'object') {
|
||||
headers.authorization = 'ApiKey ' + Buffer.from(`${auth.apiKey.id}:${auth.apiKey.api_key}`).toString('base64')
|
||||
|
||||
31
lib/Helpers.d.ts
vendored
31
lib/Helpers.d.ts
vendored
@ -3,17 +3,18 @@
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
import { Readable as ReadableStream } from 'stream'
|
||||
import { TransportRequestOptions, ApiResponse, RequestBody } from './Transport'
|
||||
import { Search, Bulk } from '../api/requestParams'
|
||||
import { TransportRequestOptions, ApiError, ApiResponse, RequestBody, Context } from './Transport'
|
||||
import { Search, Msearch, Bulk } from '../api/requestParams'
|
||||
|
||||
export default class Helpers {
|
||||
search<TDocument = unknown, TRequestBody extends RequestBody = Record<string, any>>(params: Search<TRequestBody>, options?: TransportRequestOptions): Promise<TDocument[]>
|
||||
scrollSearch<TDocument = unknown, TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = unknown>(params: Search<TRequestBody>, options?: TransportRequestOptions): AsyncIterable<ScrollSearchResponse<TDocument, TResponse, TContext>>
|
||||
scrollSearch<TDocument = unknown, TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: Search<TRequestBody>, options?: TransportRequestOptions): AsyncIterable<ScrollSearchResponse<TDocument, TResponse, TContext>>
|
||||
scrollDocuments<TDocument = unknown, TRequestBody extends RequestBody = Record<string, any>>(params: Search<TRequestBody>, options?: TransportRequestOptions): AsyncIterable<TDocument>
|
||||
msearch(options?: MsearchHelperOptions): MsearchHelper
|
||||
bulk<TDocument = unknown>(options: BulkHelperOptions<TDocument>): BulkHelper<BulkStats>
|
||||
}
|
||||
|
||||
export interface ScrollSearchResponse<TDocument = unknown, TResponse = Record<string, any>, TContext = unknown> extends ApiResponse<TResponse, TContext> {
|
||||
export interface ScrollSearchResponse<TDocument = unknown, TResponse = Record<string, any>, TContext = Context> extends ApiResponse<TResponse, TContext> {
|
||||
clear: () => Promise<void>
|
||||
documents: TDocument[]
|
||||
}
|
||||
@ -68,10 +69,11 @@ export interface BulkHelperOptions<TDocument = unknown> extends Omit<Bulk, 'body
|
||||
datasource: TDocument[] | Buffer | ReadableStream | AsyncIterator<TDocument>
|
||||
onDocument: (doc: TDocument) => Action
|
||||
flushBytes?: number
|
||||
flushInterval?: number
|
||||
concurrency?: number
|
||||
retries?: number
|
||||
wait?: number,
|
||||
onDrop?: (doc: OnDropDocument<TDocument>) => void,
|
||||
wait?: number
|
||||
onDrop?: (doc: OnDropDocument<TDocument>) => void
|
||||
refreshOnCompletion?: boolean | string
|
||||
}
|
||||
|
||||
@ -87,4 +89,19 @@ export interface OnDropDocument<TDocument = unknown> {
|
||||
}
|
||||
document: TDocument
|
||||
retried: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface MsearchHelperOptions extends Omit<Msearch, 'body'> {
|
||||
operations?: number
|
||||
flushInterval?: number
|
||||
concurrency?: number
|
||||
retries?: number
|
||||
wait?: number
|
||||
}
|
||||
|
||||
declare type callbackFn<Response, Context> = (err: ApiError, result: ApiResponse<Response, Context>) => void;
|
||||
export interface MsearchHelper extends Promise<void> {
|
||||
stop(error?: Error): void
|
||||
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(header: Omit<Search, 'body'>, body: TRequestBody): Promise<ApiResponse<TResponse, TContext>>
|
||||
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(header: Omit<Search, 'body'>, body: TRequestBody, callback: callbackFn<TResponse, TContext>): void
|
||||
}
|
||||
|
||||
360
lib/Helpers.js
360
lib/Helpers.js
@ -4,13 +4,16 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
|
||||
const { Readable } = require('stream')
|
||||
const { promisify } = require('util')
|
||||
const { ResponseError, ConfigurationError } = require('./errors')
|
||||
|
||||
const pImmediate = promisify(setImmediate)
|
||||
const sleep = promisify(setTimeout)
|
||||
const kGetHits = Symbol('elasticsearch-get-hits')
|
||||
const kClient = Symbol('elasticsearch-client')
|
||||
/* istanbul ignore next */
|
||||
const noop = () => {}
|
||||
|
||||
class Helpers {
|
||||
@ -19,23 +22,22 @@ class Helpers {
|
||||
this.maxRetries = opts.maxRetries
|
||||
}
|
||||
|
||||
[kGetHits] (body) {
|
||||
if (body.hits && body.hits.hits) {
|
||||
return body.hits.hits.map(d => d._source)
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a search operation. The only difference between client.search and this utility,
|
||||
* is that we are only returning the hits to the user and not the full ES response.
|
||||
* This helper automatically adds `filter_path=hits.hits._source` to the querystring,
|
||||
* as it will only need the documents source.
|
||||
* @param {object} params - The Elasticsearch's search parameters.
|
||||
* @param {object} options - The client optional configuration for this request.
|
||||
* @return {array} The documents that matched the request.
|
||||
*/
|
||||
async search (params, options) {
|
||||
const response = await this[kClient].search(params, options)
|
||||
return this[kGetHits](response.body)
|
||||
appendFilterPath('hits.hits._source', params, true)
|
||||
const { body } = await this[kClient].search(params, options)
|
||||
if (body.hits && body.hits.hits) {
|
||||
return body.hits.hits.map(d => d._source)
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
@ -63,6 +65,8 @@ class Helpers {
|
||||
options.ignore = [429]
|
||||
}
|
||||
params.scroll = params.scroll || '1m'
|
||||
appendFilterPath('_scroll_id', params, false)
|
||||
const { method, body, index, ...querystring } = params
|
||||
|
||||
let response = null
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
@ -74,33 +78,31 @@ class Helpers {
|
||||
throw new ResponseError(response)
|
||||
}
|
||||
|
||||
let scrollId = response.body._scroll_id
|
||||
let scroll_id = response.body._scroll_id
|
||||
let stop = false
|
||||
const clear = async () => {
|
||||
stop = true
|
||||
await this[kClient].clearScroll(
|
||||
{ body: { scroll_id: scrollId } },
|
||||
{ body: { scroll_id } },
|
||||
{ ignore: [400] }
|
||||
)
|
||||
}
|
||||
|
||||
while (response.body.hits.hits.length > 0) {
|
||||
scrollId = response.body._scroll_id
|
||||
while (response.body.hits && response.body.hits.hits.length > 0) {
|
||||
scroll_id = response.body._scroll_id
|
||||
response.clear = clear
|
||||
response.documents = this[kGetHits](response.body)
|
||||
addDocumentsGetter(response)
|
||||
|
||||
yield response
|
||||
|
||||
if (!scrollId || stop === true) {
|
||||
if (!scroll_id || stop === true) {
|
||||
break
|
||||
}
|
||||
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
response = await this[kClient].scroll({
|
||||
scroll: params.scroll,
|
||||
body: {
|
||||
scroll_id: scrollId
|
||||
}
|
||||
...querystring,
|
||||
body: { scroll_id }
|
||||
}, options)
|
||||
if (response.statusCode !== 429) break
|
||||
await sleep(wait)
|
||||
@ -120,11 +122,14 @@ class Helpers {
|
||||
* }
|
||||
* ```
|
||||
* Each document is what you will find by running a scrollSearch and iterating on the hits array.
|
||||
* This helper automatically adds `filter_path=hits.hits._source` to the querystring,
|
||||
* as it will only need the documents source.
|
||||
* @param {object} params - The Elasticsearch's search parameters.
|
||||
* @param {object} options - The client optional configuration for this request.
|
||||
* @return {iterator} the async iterator
|
||||
*/
|
||||
async * scrollDocuments (params, options) {
|
||||
appendFilterPath('hits.hits._source', params, true)
|
||||
for await (const { documents } of this.scrollSearch(params)) {
|
||||
for (const document of documents) {
|
||||
yield document
|
||||
@ -132,21 +137,266 @@ class Helpers {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a msearch helper instance. Once you configure it, you can use the provided
|
||||
* `search` method to add new searches in the queue.
|
||||
* @param {object} options - The configuration of the msearch operations.
|
||||
* @return {object} The possible operations to run.
|
||||
*/
|
||||
msearch (options = {}) {
|
||||
const client = this[kClient]
|
||||
const {
|
||||
operations = 5,
|
||||
concurrency = 5,
|
||||
flushInterval = 500,
|
||||
retries = this.maxRetries,
|
||||
wait = 5000,
|
||||
...msearchOptions
|
||||
} = options
|
||||
|
||||
let stopReading = false
|
||||
let stopError = null
|
||||
let timeoutRef = null
|
||||
const operationsStream = new Readable({
|
||||
objectMode: true,
|
||||
read (size) {}
|
||||
})
|
||||
|
||||
const p = iterate()
|
||||
const helper = {
|
||||
then (onFulfilled, onRejected) {
|
||||
return p.then(onFulfilled, onRejected)
|
||||
},
|
||||
catch (onRejected) {
|
||||
return p.catch(onRejected)
|
||||
},
|
||||
stop (error = null) {
|
||||
if (stopReading === true) return
|
||||
stopReading = true
|
||||
stopError = error
|
||||
operationsStream.push(null)
|
||||
},
|
||||
// TODO: support abort a single search?
|
||||
// NOTE: the validation checks are synchronous and the callback/promise will
|
||||
// be resolved in the same tick. We might want to fix this in the future.
|
||||
search (header, body, callback) {
|
||||
if (stopReading === true) {
|
||||
const error = stopError === null
|
||||
? new ConfigurationError('The msearch processor has been stopped')
|
||||
: stopError
|
||||
return callback ? callback(error, {}) : Promise.reject(error)
|
||||
}
|
||||
|
||||
if (!(typeof header === 'object' && header !== null && !Array.isArray(header))) {
|
||||
const error = new ConfigurationError('The header should be an object')
|
||||
return callback ? callback(error, {}) : Promise.reject(error)
|
||||
}
|
||||
|
||||
if (!(typeof body === 'object' && body !== null && !Array.isArray(body))) {
|
||||
const error = new ConfigurationError('The body should be an object')
|
||||
return callback ? callback(error, {}) : Promise.reject(error)
|
||||
}
|
||||
|
||||
let promise = null
|
||||
if (callback === undefined) {
|
||||
let onFulfilled = null
|
||||
let onRejected = null
|
||||
promise = new Promise((resolve, reject) => {
|
||||
onFulfilled = resolve
|
||||
onRejected = reject
|
||||
})
|
||||
callback = function callback (err, result) {
|
||||
err ? onRejected(err) : onFulfilled(result)
|
||||
}
|
||||
}
|
||||
|
||||
operationsStream.push([header, body, callback])
|
||||
|
||||
if (promise !== null) {
|
||||
return promise
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return helper
|
||||
|
||||
async function iterate () {
|
||||
const { semaphore, finish } = buildSemaphore()
|
||||
const msearchBody = []
|
||||
const callbacks = []
|
||||
let loadedOperations = 0
|
||||
timeoutRef = setTimeout(onFlushTimeout, flushInterval)
|
||||
|
||||
for await (const operation of operationsStream) {
|
||||
timeoutRef.refresh()
|
||||
loadedOperations += 1
|
||||
msearchBody.push(operation[0], operation[1])
|
||||
callbacks.push(operation[2])
|
||||
if (loadedOperations >= operations) {
|
||||
const send = await semaphore()
|
||||
send(msearchBody.slice(), callbacks.slice())
|
||||
msearchBody.length = 0
|
||||
callbacks.length = 0
|
||||
loadedOperations = 0
|
||||
}
|
||||
}
|
||||
|
||||
clearTimeout(timeoutRef)
|
||||
// In some cases the previos http call does not have finished,
|
||||
// or we didn't reach the flush bytes threshold, so we force one last operation.
|
||||
if (loadedOperations > 0) {
|
||||
const send = await semaphore()
|
||||
send(msearchBody, callbacks)
|
||||
}
|
||||
|
||||
await finish()
|
||||
|
||||
if (stopError !== null) {
|
||||
throw stopError
|
||||
}
|
||||
|
||||
async function onFlushTimeout () {
|
||||
if (loadedOperations === 0) return
|
||||
const msearchBodyCopy = msearchBody.slice()
|
||||
const callbacksCopy = callbacks.slice()
|
||||
msearchBody.length = 0
|
||||
callbacks.length = 0
|
||||
loadedOperations = 0
|
||||
try {
|
||||
const send = await semaphore()
|
||||
send(msearchBodyCopy, callbacksCopy)
|
||||
} catch (err) {
|
||||
/* istanbul ignore next */
|
||||
helper.stop(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This function builds a semaphore using the concurrency
|
||||
// options of the msearch helper. It is used inside the iterator
|
||||
// to guarantee that no more than the number of operations
|
||||
// allowed to run at the same time are executed.
|
||||
// It returns a semaphore function which resolves in the next tick
|
||||
// if we didn't reach the maximim concurrency yet, otherwise it returns
|
||||
// a promise that resolves as soon as one of the running request has finshed.
|
||||
// The semaphore function resolves a send function, which will be used
|
||||
// to send the actual msearch request.
|
||||
// It also returns a finish function, which returns a promise that is resolved
|
||||
// when there are no longer request running.
|
||||
function buildSemaphore () {
|
||||
let resolveSemaphore = null
|
||||
let resolveFinish = null
|
||||
let running = 0
|
||||
|
||||
return { semaphore, finish }
|
||||
|
||||
function finish () {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (running === 0) {
|
||||
resolve()
|
||||
} else {
|
||||
resolveFinish = resolve
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function semaphore () {
|
||||
if (running < concurrency) {
|
||||
running += 1
|
||||
return pImmediate(send)
|
||||
} else {
|
||||
return new Promise((resolve, reject) => {
|
||||
resolveSemaphore = resolve
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function send (msearchBody, callbacks) {
|
||||
/* istanbul ignore if */
|
||||
if (running > concurrency) {
|
||||
throw new Error('Max concurrency reached')
|
||||
}
|
||||
msearchOperation(msearchBody, callbacks, () => {
|
||||
running -= 1
|
||||
if (resolveSemaphore) {
|
||||
running += 1
|
||||
resolveSemaphore(send)
|
||||
resolveSemaphore = null
|
||||
} else if (resolveFinish && running === 0) {
|
||||
resolveFinish()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function msearchOperation (msearchBody, callbacks, done) {
|
||||
let retryCount = retries
|
||||
|
||||
// Instead of going full on async-await, which would make the code easier to read,
|
||||
// we have decided to use callback style instead.
|
||||
// This because every time we use async await, V8 will create multiple promises
|
||||
// behind the scenes, making the code slightly slower.
|
||||
tryMsearch(msearchBody, callbacks, retrySearch)
|
||||
function retrySearch (msearchBody, callbacks) {
|
||||
if (msearchBody.length > 0 && retryCount > 0) {
|
||||
retryCount -= 1
|
||||
setTimeout(tryMsearch, wait, msearchBody, callbacks, retrySearch)
|
||||
return
|
||||
}
|
||||
|
||||
done()
|
||||
}
|
||||
|
||||
// This function never returns an error, if the msearch operation fails,
|
||||
// the error is dispatched to all search executors.
|
||||
function tryMsearch (msearchBody, callbacks, done) {
|
||||
client.msearch(Object.assign({}, msearchOptions, { body: msearchBody }), (err, results) => {
|
||||
const retryBody = []
|
||||
const retryCallbacks = []
|
||||
if (err) {
|
||||
addDocumentsGetter(results)
|
||||
for (const callback of callbacks) {
|
||||
callback(err, results)
|
||||
}
|
||||
return done(retryBody, retryCallbacks)
|
||||
}
|
||||
const { responses } = results.body
|
||||
for (let i = 0, len = responses.length; i < len; i++) {
|
||||
const response = responses[i]
|
||||
if (response.status === 429 && retryCount > 0) {
|
||||
retryBody.push(msearchBody[i * 2])
|
||||
retryBody.push(msearchBody[(i * 2) + 1])
|
||||
retryCallbacks.push(callbacks[i])
|
||||
continue
|
||||
}
|
||||
const result = { ...results, body: response }
|
||||
addDocumentsGetter(result)
|
||||
if (response.status >= 400) {
|
||||
callbacks[i](new ResponseError(result), result)
|
||||
} else {
|
||||
callbacks[i](null, result)
|
||||
}
|
||||
}
|
||||
done(retryBody, retryCallbacks)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a bulk helper instance. Once you configure it, you can pick which operation
|
||||
* to execute with the given dataset, index, create, update, and delete.
|
||||
* @param {object} options - The configuration of the bulk operation.
|
||||
* @return {object} The possible orations to run with the datasource.
|
||||
* @return {object} The possible operations to run with the datasource.
|
||||
*/
|
||||
bulk (options) {
|
||||
// TODO: add an interval to force flush the body
|
||||
// to handle the slow producer problem
|
||||
const client = this[kClient]
|
||||
const { serialize, deserialize } = client.serializer
|
||||
const {
|
||||
datasource,
|
||||
onDocument,
|
||||
flushBytes = 5000000,
|
||||
flushInterval = 30000,
|
||||
concurrency = 5,
|
||||
retries = this.maxRetries,
|
||||
wait = 5000,
|
||||
@ -166,6 +416,7 @@ class Helpers {
|
||||
}
|
||||
|
||||
let shouldAbort = false
|
||||
let timeoutRef = null
|
||||
const stats = {
|
||||
total: 0,
|
||||
failed: 0,
|
||||
@ -177,8 +428,7 @@ class Helpers {
|
||||
}
|
||||
|
||||
const p = iterate()
|
||||
|
||||
return {
|
||||
const helper = {
|
||||
then (onFulfilled, onRejected) {
|
||||
return p.then(onFulfilled, onRejected)
|
||||
},
|
||||
@ -186,12 +436,15 @@ class Helpers {
|
||||
return p.catch(onRejected)
|
||||
},
|
||||
abort () {
|
||||
clearTimeout(timeoutRef)
|
||||
shouldAbort = true
|
||||
stats.aborted = true
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
return helper
|
||||
|
||||
/**
|
||||
* Function that iterates over the given datasource and start a bulk operation as soon
|
||||
* as it reaches the configured bulk size. It's designed to use the Node.js asynchronous
|
||||
@ -208,9 +461,11 @@ class Helpers {
|
||||
let actionBody = ''
|
||||
let payloadBody = ''
|
||||
let chunkBytes = 0
|
||||
timeoutRef = setTimeout(onFlushTimeout, flushInterval)
|
||||
|
||||
for await (const chunk of datasource) {
|
||||
if (shouldAbort === true) break
|
||||
timeoutRef.refresh()
|
||||
const action = onDocument(chunk)
|
||||
const operation = Array.isArray(action)
|
||||
? Object.keys(action[0])[0]
|
||||
@ -219,21 +474,20 @@ class Helpers {
|
||||
actionBody = serialize(action)
|
||||
payloadBody = typeof chunk === 'string' ? chunk : serialize(chunk)
|
||||
chunkBytes += Buffer.byteLength(actionBody) + Buffer.byteLength(payloadBody)
|
||||
bulkBody.push(actionBody)
|
||||
bulkBody.push(payloadBody)
|
||||
bulkBody.push(actionBody, payloadBody)
|
||||
} else if (operation === 'update') {
|
||||
actionBody = serialize(action[0])
|
||||
payloadBody = typeof chunk === 'string'
|
||||
? `{doc:${chunk}}`
|
||||
? `{"doc":${chunk}}`
|
||||
: serialize({ doc: chunk, ...action[1] })
|
||||
chunkBytes += Buffer.byteLength(actionBody) + Buffer.byteLength(payloadBody)
|
||||
bulkBody.push(actionBody)
|
||||
bulkBody.push(payloadBody)
|
||||
bulkBody.push(actionBody, payloadBody)
|
||||
} else if (operation === 'delete') {
|
||||
actionBody = serialize(action)
|
||||
chunkBytes += Buffer.byteLength(actionBody)
|
||||
bulkBody.push(actionBody)
|
||||
} else {
|
||||
clearTimeout(timeoutRef)
|
||||
throw new ConfigurationError(`Bulk helper invalid action: '${operation}'`)
|
||||
}
|
||||
|
||||
@ -246,6 +500,7 @@ class Helpers {
|
||||
}
|
||||
}
|
||||
|
||||
clearTimeout(timeoutRef)
|
||||
// In some cases the previos http call does not have finished,
|
||||
// or we didn't reach the flush bytes threshold, so we force one last operation.
|
||||
if (shouldAbort === false && chunkBytes > 0) {
|
||||
@ -268,6 +523,21 @@ class Helpers {
|
||||
stats.total = stats.successful + stats.failed
|
||||
|
||||
return stats
|
||||
|
||||
async function onFlushTimeout () {
|
||||
if (chunkBytes === 0) return
|
||||
stats.bytes += chunkBytes
|
||||
const bulkBodyCopy = bulkBody.slice()
|
||||
bulkBody.length = 0
|
||||
chunkBytes = 0
|
||||
try {
|
||||
const send = await semaphore()
|
||||
send(bulkBodyCopy)
|
||||
} catch (err) {
|
||||
/* istanbul ignore next */
|
||||
helper.abort()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This function builds a semaphore using the concurrency
|
||||
@ -308,6 +578,7 @@ class Helpers {
|
||||
|
||||
function semaphore () {
|
||||
if (running < concurrency) {
|
||||
running += 1
|
||||
return pImmediate(send)
|
||||
} else {
|
||||
return new Promise((resolve, reject) => {
|
||||
@ -318,10 +589,9 @@ class Helpers {
|
||||
|
||||
function send (bulkBody) {
|
||||
/* istanbul ignore if */
|
||||
if (running >= concurrency) {
|
||||
if (running > concurrency) {
|
||||
throw new Error('Max concurrency reached')
|
||||
}
|
||||
running += 1
|
||||
bulkOperation(bulkBody, err => {
|
||||
running -= 1
|
||||
if (err) {
|
||||
@ -329,6 +599,7 @@ class Helpers {
|
||||
error = err
|
||||
}
|
||||
if (resolveSemaphore) {
|
||||
running += 1
|
||||
resolveSemaphore(send)
|
||||
resolveSemaphore = null
|
||||
} else if (resolveFinish && running === 0) {
|
||||
@ -371,6 +642,7 @@ class Helpers {
|
||||
operation: deserialize(bulkBody[i]),
|
||||
document: operation !== 'delete'
|
||||
? deserialize(bulkBody[i + 1])
|
||||
/* istanbul ignore next */
|
||||
: null,
|
||||
retried: isRetrying
|
||||
})
|
||||
@ -402,6 +674,7 @@ class Helpers {
|
||||
// but the ES node were handling too many operations.
|
||||
if (status === 429) {
|
||||
retry.push(bulkBody[indexSlice])
|
||||
/* istanbul ignore next */
|
||||
if (operation !== 'delete') {
|
||||
retry.push(bulkBody[indexSlice + 1])
|
||||
}
|
||||
@ -428,4 +701,27 @@ class Helpers {
|
||||
}
|
||||
}
|
||||
|
||||
// Using a getter will improve the overall performances of the code,
|
||||
// as we will reed the documents only if needed.
|
||||
function addDocumentsGetter (result) {
|
||||
Object.defineProperty(result, 'documents', {
|
||||
get () {
|
||||
if (this.body.hits && this.body.hits.hits) {
|
||||
return this.body.hits.hits.map(d => d._source)
|
||||
}
|
||||
return []
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function appendFilterPath (filter, params, force) {
|
||||
if (params.filter_path !== undefined) {
|
||||
params.filter_path += ',' + filter
|
||||
} else if (params.filterPath !== undefined) {
|
||||
params.filterPath += ',' + filter
|
||||
} else if (force === true) {
|
||||
params.filter_path = filter
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Helpers
|
||||
|
||||
14
lib/Transport.d.ts
vendored
14
lib/Transport.d.ts
vendored
@ -13,6 +13,8 @@ export type ApiError = errors.ConfigurationError | errors.ConnectionError |
|
||||
errors.NoLivingConnectionsError | errors.ResponseError |
|
||||
errors.TimeoutError | errors.RequestAbortedError
|
||||
|
||||
export type Context = Record<string, unknown> | null
|
||||
|
||||
export interface nodeSelectorFn {
|
||||
(connections: Connection[]): Connection;
|
||||
}
|
||||
@ -45,14 +47,14 @@ interface TransportOptions {
|
||||
opaqueIdPrefix?: string;
|
||||
}
|
||||
|
||||
export interface RequestEvent<TResponse = Record<string, any>, TContext = unknown> {
|
||||
export interface RequestEvent<TResponse = Record<string, any>, TContext = Context> {
|
||||
body: TResponse;
|
||||
statusCode: number | null;
|
||||
headers: Record<string, any> | null;
|
||||
warnings: string[] | null;
|
||||
meta: {
|
||||
context: TContext;
|
||||
name: string;
|
||||
name: string | symbol;
|
||||
request: {
|
||||
params: TransportRequestParams;
|
||||
options: TransportRequestOptions;
|
||||
@ -70,7 +72,7 @@ export interface RequestEvent<TResponse = Record<string, any>, TContext = unknow
|
||||
|
||||
// ApiResponse and RequestEvent are the same thing
|
||||
// we are doing this for have more clear names
|
||||
export interface ApiResponse<TResponse = Record<string, any>, TContext = unknown> extends RequestEvent<TResponse, TContext> {}
|
||||
export interface ApiResponse<TResponse = Record<string, any>, TContext = Context> extends RequestEvent<TResponse, TContext> {}
|
||||
|
||||
export type RequestBody<T = Record<string, any>> = T | string | Buffer | ReadableStream
|
||||
export type RequestNDBody<T = Record<string, any>[]> = T | string | string[] | Buffer | ReadableStream
|
||||
@ -80,7 +82,7 @@ export interface TransportRequestParams {
|
||||
path: string;
|
||||
body?: RequestBody;
|
||||
bulkBody?: RequestNDBody;
|
||||
querystring?: Record<string, any>;
|
||||
querystring?: Record<string, any> | string;
|
||||
}
|
||||
|
||||
export interface TransportRequestOptions {
|
||||
@ -92,7 +94,7 @@ export interface TransportRequestOptions {
|
||||
querystring?: Record<string, any>;
|
||||
compression?: 'gzip';
|
||||
id?: any;
|
||||
context?: any;
|
||||
context?: Context;
|
||||
warnings?: string[];
|
||||
opaqueId?: string;
|
||||
}
|
||||
@ -136,7 +138,7 @@ export default class Transport {
|
||||
_nextSniff: number;
|
||||
_isSniffing: boolean;
|
||||
constructor(opts: TransportOptions);
|
||||
request(params: TransportRequestParams, options?: TransportRequestOptions): Promise<ApiResponse>;
|
||||
request(params: TransportRequestParams, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse>;
|
||||
request(params: TransportRequestParams, options?: TransportRequestOptions, callback?: (err: ApiError, result: ApiResponse) => void): TransportRequestCallback;
|
||||
getConnection(opts: TransportGetConnectionOptions): Connection | null;
|
||||
sniff(opts?: TransportSniffOptions, callback?: (...args: any[]) => void): void;
|
||||
|
||||
@ -22,7 +22,7 @@ const clientVersion = require('../package.json').version
|
||||
const userAgent = `elasticsearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})`
|
||||
|
||||
class Transport {
|
||||
constructor (opts = {}) {
|
||||
constructor (opts) {
|
||||
if (typeof opts.compression === 'string' && opts.compression !== 'gzip') {
|
||||
throw new ConfigurationError(`Invalid compression: '${opts.compression}'`)
|
||||
}
|
||||
@ -33,6 +33,7 @@ class Transport {
|
||||
this.requestTimeout = toMs(opts.requestTimeout)
|
||||
this.suggestCompression = opts.suggestCompression === true
|
||||
this.compression = opts.compression || false
|
||||
this.context = opts.context || null
|
||||
this.headers = Object.assign({},
|
||||
{ 'user-agent': userAgent },
|
||||
opts.suggestCompression === true ? { 'accept-encoding': 'gzip,deflate' } : null,
|
||||
@ -51,7 +52,6 @@ class Transport {
|
||||
} else if (opts.nodeSelector === 'round-robin') {
|
||||
this.nodeSelector = roundRobinSelector()
|
||||
} else if (opts.nodeSelector === 'random') {
|
||||
/* istanbul ignore next */
|
||||
this.nodeSelector = randomSelector
|
||||
} else {
|
||||
this.nodeSelector = roundRobinSelector()
|
||||
@ -88,7 +88,7 @@ class Transport {
|
||||
}
|
||||
|
||||
const meta = {
|
||||
context: options.context || null,
|
||||
context: null,
|
||||
request: {
|
||||
params: null,
|
||||
options: null,
|
||||
@ -100,6 +100,14 @@ class Transport {
|
||||
aborted: false
|
||||
}
|
||||
|
||||
if (this.context != null && options.context != null) {
|
||||
meta.context = Object.assign({}, this.context, options.context)
|
||||
} else if (this.context != null) {
|
||||
meta.context = this.context
|
||||
} else if (options.context != null) {
|
||||
meta.context = options.context
|
||||
}
|
||||
|
||||
const result = {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
@ -385,7 +393,7 @@ class Transport {
|
||||
}
|
||||
|
||||
debug('Sniffing ended successfully', result.body)
|
||||
const protocol = result.meta.connection.url.protocol || 'http:'
|
||||
const protocol = result.meta.connection.url.protocol || /* istanbul ignore next */ 'http:'
|
||||
const hosts = this.connectionPool.nodesToHost(result.body.nodes, protocol)
|
||||
this.connectionPool.update(hosts)
|
||||
|
||||
|
||||
14
lib/errors.d.ts
vendored
14
lib/errors.d.ts
vendored
@ -2,28 +2,28 @@
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
import { ApiResponse } from './Transport'
|
||||
import { ApiResponse, Context } from './Transport'
|
||||
|
||||
export declare class ElasticsearchClientError extends Error {
|
||||
name: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export declare class TimeoutError<TResponse = Record<string, any>, TContext = unknown> extends ElasticsearchClientError {
|
||||
export declare class TimeoutError<TResponse = Record<string, any>, TContext = Context> extends ElasticsearchClientError {
|
||||
name: string;
|
||||
message: string;
|
||||
meta: ApiResponse<TResponse, TContext>;
|
||||
constructor(message: string, meta: ApiResponse);
|
||||
}
|
||||
|
||||
export declare class ConnectionError<TResponse = Record<string, any>, TContext = unknown> extends ElasticsearchClientError {
|
||||
export declare class ConnectionError<TResponse = Record<string, any>, TContext = Context> extends ElasticsearchClientError {
|
||||
name: string;
|
||||
message: string;
|
||||
meta: ApiResponse<TResponse, TContext>;
|
||||
constructor(message: string, meta: ApiResponse);
|
||||
}
|
||||
|
||||
export declare class NoLivingConnectionsError<TResponse = Record<string, any>, TContext = unknown> extends ElasticsearchClientError {
|
||||
export declare class NoLivingConnectionsError<TResponse = Record<string, any>, TContext = Context> extends ElasticsearchClientError {
|
||||
name: string;
|
||||
message: string;
|
||||
meta: ApiResponse<TResponse, TContext>;
|
||||
@ -50,7 +50,7 @@ export declare class ConfigurationError extends ElasticsearchClientError {
|
||||
constructor(message: string);
|
||||
}
|
||||
|
||||
export declare class ResponseError<TResponse = Record<string, any>, TContext = unknown> extends ElasticsearchClientError {
|
||||
export declare class ResponseError<TResponse = Record<string, any>, TContext = Context> extends ElasticsearchClientError {
|
||||
name: string;
|
||||
message: string;
|
||||
meta: ApiResponse<TResponse, TContext>;
|
||||
@ -60,9 +60,9 @@ export declare class ResponseError<TResponse = Record<string, any>, TContext = u
|
||||
constructor(meta: ApiResponse);
|
||||
}
|
||||
|
||||
export declare class RequestAbortedError<TResponse = Record<string, any>, TContext = unknown> extends ElasticsearchClientError {
|
||||
export declare class RequestAbortedError<TResponse = Record<string, any>, TContext = Context> extends ElasticsearchClientError {
|
||||
name: string;
|
||||
message: string;
|
||||
meta: ApiResponse<TResponse, TContext>;
|
||||
constructor(message: string, meta: ApiResponse);
|
||||
}
|
||||
}
|
||||
|
||||
@ -52,6 +52,7 @@ class BaseConnectionPool {
|
||||
}
|
||||
|
||||
if (opts.ssl == null) opts.ssl = this._ssl
|
||||
/* istanbul ignore else */
|
||||
if (opts.agent == null) opts.agent = this._agent
|
||||
|
||||
const connection = new this.Connection(opts)
|
||||
@ -201,6 +202,7 @@ class BaseConnectionPool {
|
||||
}
|
||||
|
||||
address = address.slice(0, 4) === 'http'
|
||||
/* istanbul ignore next */
|
||||
? address
|
||||
: `${protocol}//${address}`
|
||||
const roles = node.roles.reduce((acc, role) => {
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
const BaseConnectionPool = require('./BaseConnectionPool')
|
||||
|
||||
class CloudConnectionPool extends BaseConnectionPool {
|
||||
constructor (opts = {}) {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
this.cloudConnection = null
|
||||
}
|
||||
|
||||
@ -11,7 +11,7 @@ const Connection = require('../Connection')
|
||||
const noop = () => {}
|
||||
|
||||
class ConnectionPool extends BaseConnectionPool {
|
||||
constructor (opts = {}) {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
|
||||
this.dead = []
|
||||
|
||||
40
package.json
40
package.json
@ -3,8 +3,15 @@
|
||||
"description": "The official Elasticsearch client for Node.js",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"require": "./index.js",
|
||||
"import": "./index.mjs"
|
||||
},
|
||||
"./": "./"
|
||||
},
|
||||
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
"version": "7.7.1",
|
||||
"version": "7.9.0",
|
||||
"keywords": [
|
||||
"elasticsearch",
|
||||
"elastic",
|
||||
@ -16,21 +23,20 @@
|
||||
"index"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "npm run lint && npm run test:unit && npm run test:behavior && npm run test:types",
|
||||
"test:node8": "npm run lint && tap test/unit/*.test.js -t 300 --no-coverage && npm run test:behavior && npm run test:types",
|
||||
"test:unit": "tap test/unit/*.test.js test/unit/**/*.test.js -t 300 --no-coverage",
|
||||
"test:behavior": "tap test/behavior/*.test.js -t 300 --no-coverage",
|
||||
"test": "npm run lint && tap test/{unit,acceptance}/{*,**/*}.test.js && npm run test:types",
|
||||
"test:node8": "npm run lint && tap test/{unit,acceptance}/*.test.js",
|
||||
"test:unit": "tap test/unit/{*,**/*}.test.js",
|
||||
"test:acceptance": "tap test/acceptance/*.test.js",
|
||||
"test:integration": "node test/integration/index.js",
|
||||
"test:integration:helpers": "tap test/integration/helpers/*.test.js --no-coverage -J",
|
||||
"test:integration:helpers": "tap test/integration/helpers/*.test.js",
|
||||
"test:types": "tsd",
|
||||
"test:coverage": "tap test/unit/*.test.js test/unit/**/*.test.js test/behavior/*.test.js -t 300 && nyc report --reporter=text-lcov > coverage.lcov",
|
||||
"test:coverage-ui": "tap test/unit/*.test.js test/unit/**/*.test.js test/behavior/*.test.js -t 300 --coverage-report=html",
|
||||
"test:coverage-100": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --100 --nyc-arg=\"--exclude=api\"",
|
||||
"test:coverage-report": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --nyc-arg=\"--exclude=api\" && nyc report --reporter=text-lcov > coverage.lcov",
|
||||
"test:coverage-ui": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --coverage-report=html --nyc-arg=\"--exclude=api\"",
|
||||
"lint": "standard",
|
||||
"lint:fix": "standard --fix",
|
||||
"ci": "npm run license-checker && npm test && npm run test:integration:helpers && npm run test:integration && npm run test:coverage",
|
||||
"license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause'",
|
||||
"elasticsearch": "./scripts/es-docker.sh",
|
||||
"elasticsearch:xpack": "./scripts/es-docker-platinum.sh"
|
||||
"build-esm": "npx gen-esm-wrapper . index.mjs && standard --fix index.mjs"
|
||||
},
|
||||
"author": {
|
||||
"name": "Tomas Della Vedova",
|
||||
@ -41,6 +47,7 @@
|
||||
"company": "Elasticsearch BV"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1",
|
||||
"@types/node": "^12.6.2",
|
||||
"convert-hrtime": "^3.0.0",
|
||||
"dedent": "^0.7.0",
|
||||
@ -50,7 +57,6 @@
|
||||
"into-stream": "^5.1.1",
|
||||
"js-yaml": "^3.13.1",
|
||||
"license-checker": "^25.0.1",
|
||||
"lolex": "^4.0.1",
|
||||
"minimist": "^1.2.0",
|
||||
"ora": "^3.4.0",
|
||||
"pretty-hrtime": "^1.0.3",
|
||||
@ -62,7 +68,7 @@
|
||||
"standard": "^13.0.2",
|
||||
"stoppable": "^1.1.0",
|
||||
"tap": "^14.4.1",
|
||||
"tsd": "^0.11.0",
|
||||
"tsd": "^0.13.1",
|
||||
"workq": "^2.1.0",
|
||||
"xmlbuilder2": "^2.1.2"
|
||||
},
|
||||
@ -86,5 +92,13 @@
|
||||
},
|
||||
"tsd": {
|
||||
"directory": "test/types"
|
||||
},
|
||||
"tap": {
|
||||
"esm": false,
|
||||
"ts": false,
|
||||
"jsx": false,
|
||||
"flow": false,
|
||||
"coverage": false,
|
||||
"jobs-auto": true
|
||||
}
|
||||
}
|
||||
|
||||
@ -33,6 +33,7 @@ function start (opts) {
|
||||
const apiOutputFolder = join(packageFolder, 'api')
|
||||
const mainOutputFile = join(packageFolder, 'index.js')
|
||||
const typeDefFile = join(__dirname, '..', 'index.d.ts')
|
||||
const kibanaTypeDefFile = join(packageFolder, 'kibana.d.ts')
|
||||
const docOutputFile = join(__dirname, '..', 'docs', 'reference.asciidoc')
|
||||
const requestParamsOutputFile = join(packageFolder, 'requestParams.d.ts')
|
||||
const allSpec = []
|
||||
@ -59,23 +60,33 @@ function start (opts) {
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
const { fn: factory, types } = genFactory(apiOutputFolder, [apiFolder, xPackFolder])
|
||||
const { fn: factory, types, kibanaTypes } = genFactory(apiOutputFolder, [apiFolder, xPackFolder])
|
||||
writeFileSync(
|
||||
mainOutputFile,
|
||||
factory,
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
const oldTypeDefString = readFileSync(typeDefFile, 'utf8')
|
||||
const start = oldTypeDefString.indexOf('/* GENERATED */')
|
||||
const end = oldTypeDefString.indexOf('/* /GENERATED */')
|
||||
const newTypeDefString = oldTypeDefString.slice(0, start + 15) + '\n' + types + '\n ' + oldTypeDefString.slice(end)
|
||||
let oldTypeDefString = readFileSync(typeDefFile, 'utf8')
|
||||
let start = oldTypeDefString.indexOf('/* GENERATED */')
|
||||
let end = oldTypeDefString.indexOf('/* /GENERATED */')
|
||||
let newTypeDefString = oldTypeDefString.slice(0, start + 15) + '\n' + types + '\n ' + oldTypeDefString.slice(end)
|
||||
writeFileSync(
|
||||
typeDefFile,
|
||||
newTypeDefString,
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
oldTypeDefString = readFileSync(kibanaTypeDefFile, 'utf8')
|
||||
start = oldTypeDefString.indexOf('/* GENERATED */')
|
||||
end = oldTypeDefString.indexOf('/* /GENERATED */')
|
||||
newTypeDefString = oldTypeDefString.slice(0, start + 15) + '\n' + kibanaTypes + '\n ' + oldTypeDefString.slice(end)
|
||||
writeFileSync(
|
||||
kibanaTypeDefFile,
|
||||
newTypeDefString,
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
lintFiles(log, () => {
|
||||
log.text = 'Generating documentation'
|
||||
const allSpec = apiFolderContents.filter(f => f !== '_common.json')
|
||||
|
||||
@ -35,8 +35,10 @@ function genFactory (folder, paths) {
|
||||
.split('.')
|
||||
.reverse()
|
||||
.reduce((acc, val) => {
|
||||
const body = hasBody(paths, file.slice(0, -3))
|
||||
const methods = acc === null ? buildMethodDefinition(val, name, body) : null
|
||||
const spec = readSpec(paths, file.slice(0, -3))
|
||||
const isHead = isHeadMethod(spec, file.slice(0, -3))
|
||||
const body = hasBody(spec, file.slice(0, -3))
|
||||
const methods = acc === null ? buildMethodDefinition({ kibana: false }, val, name, body, isHead) : null
|
||||
const obj = {}
|
||||
if (methods) {
|
||||
for (const m of methods) {
|
||||
@ -53,6 +55,35 @@ function genFactory (folder, paths) {
|
||||
})
|
||||
.reduce((acc, val) => deepmerge(acc, val), {})
|
||||
|
||||
const kibanaTypes = apiFiles
|
||||
.map(file => {
|
||||
const name = file
|
||||
.slice(0, -3)
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
return file
|
||||
.slice(0, -3) // remove `.js` extension
|
||||
.split('.')
|
||||
.reverse()
|
||||
.reduce((acc, val) => {
|
||||
const spec = readSpec(paths, file.slice(0, -3))
|
||||
const isHead = isHeadMethod(spec, file.slice(0, -3))
|
||||
const body = hasBody(spec, file.slice(0, -3))
|
||||
const methods = acc === null ? buildMethodDefinition({ kibana: true }, val, name, body, isHead) : null
|
||||
const obj = {}
|
||||
if (methods) {
|
||||
for (const m of methods) {
|
||||
obj[m.key] = m.val
|
||||
}
|
||||
} else {
|
||||
obj[camelify(val)] = acc
|
||||
}
|
||||
return obj
|
||||
}, null)
|
||||
})
|
||||
.reduce((acc, val) => deepmerge(acc, val), {})
|
||||
|
||||
const apis = apiFiles
|
||||
.map(file => {
|
||||
// const name = format(file.slice(0, -3))
|
||||
@ -97,6 +128,18 @@ function genFactory (folder, paths) {
|
||||
// remove useless quotes and commas
|
||||
.replace(/"/g, '')
|
||||
.replace(/,$/gm, '')
|
||||
const kibanaTypesStr = Object.keys(kibanaTypes)
|
||||
.map(key => {
|
||||
const line = ` ${key}: ${JSON.stringify(kibanaTypes[key], null, 4)}`
|
||||
if (line.slice(-1) === '}') {
|
||||
return line.slice(0, -1) + ' }'
|
||||
}
|
||||
return line
|
||||
})
|
||||
.join('\n')
|
||||
// remove useless quotes and commas
|
||||
.replace(/"/g, '')
|
||||
.replace(/,$/gm, '')
|
||||
|
||||
const fn = dedent`
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
@ -161,7 +204,7 @@ function genFactory (folder, paths) {
|
||||
`
|
||||
|
||||
// new line at the end of file
|
||||
return { fn: fn + '\n', types: typesStr }
|
||||
return { fn: fn + '\n', types: typesStr, kibanaTypes: kibanaTypesStr }
|
||||
}
|
||||
|
||||
// from snake_case to camelCase
|
||||
@ -177,61 +220,86 @@ function toPascalCase (str) {
|
||||
return str[0].toUpperCase() + str.slice(1)
|
||||
}
|
||||
|
||||
function buildMethodDefinition (api, name, hasBody) {
|
||||
function buildMethodDefinition (opts, api, name, hasBody, isHead) {
|
||||
const Name = toPascalCase(name)
|
||||
const bodyType = ndjsonApiKey.includes(Name) ? 'RequestNDBody' : 'RequestBody'
|
||||
const responseType = isHead ? 'boolean' : 'Record<string, any>'
|
||||
const defaultBodyType = ndjsonApiKey.includes(Name) ? 'Record<string, any>[]' : 'Record<string, any>'
|
||||
|
||||
if (opts.kibana) {
|
||||
if (hasBody) {
|
||||
return [
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` }
|
||||
]
|
||||
} else {
|
||||
return [
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
if (hasBody) {
|
||||
let methods = [
|
||||
{ key: `${api}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${api}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
]
|
||||
if (isSnakeCased(api)) {
|
||||
methods = methods.concat([
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = unknown>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
])
|
||||
}
|
||||
return methods
|
||||
} else {
|
||||
let methods = [
|
||||
{ key: `${api}<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${api}<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
]
|
||||
if (isSnakeCased(api)) {
|
||||
methods = methods.concat([
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TContext = unknown>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TContext = unknown>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = Record<string, any>, TContext = unknown>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
|
||||
])
|
||||
}
|
||||
return methods
|
||||
}
|
||||
}
|
||||
|
||||
function hasBody (paths, file) {
|
||||
const spec = readSpec()
|
||||
return !!spec[file].body
|
||||
function hasBody (spec, api) {
|
||||
return !!spec[api].body
|
||||
}
|
||||
|
||||
function readSpec () {
|
||||
try {
|
||||
return require(join(paths[0], file))
|
||||
} catch (err) {}
|
||||
|
||||
try {
|
||||
return require(join(paths[1], file))
|
||||
} catch (err) {}
|
||||
|
||||
throw new Error(`Cannot read spec file ${file}`)
|
||||
function isHeadMethod (spec, api) {
|
||||
const { paths } = spec[api].url
|
||||
const methods = []
|
||||
for (const path of paths) {
|
||||
for (const method of path.methods) {
|
||||
if (!methods.includes(method)) {
|
||||
methods.push(method)
|
||||
}
|
||||
}
|
||||
}
|
||||
return methods.length === 1 && methods[0] === 'HEAD'
|
||||
}
|
||||
|
||||
function readSpec (paths, file) {
|
||||
try {
|
||||
return require(join(paths[0], file))
|
||||
} catch (err) {}
|
||||
|
||||
try {
|
||||
return require(join(paths[1], file))
|
||||
} catch (err) {}
|
||||
|
||||
throw new Error(`Cannot read spec file ${file}`)
|
||||
}
|
||||
|
||||
module.exports = genFactory
|
||||
|
||||
@ -26,7 +26,6 @@ import { RequestBody, RequestNDBody } from '../lib/Transport'
|
||||
|
||||
export interface Generic {
|
||||
method?: string;
|
||||
ignore?: number | number[];
|
||||
filter_path?: string | string[];
|
||||
pretty?: boolean;
|
||||
human?: boolean;
|
||||
@ -118,11 +117,37 @@ export interface ${toPascalCase(name)}${body ? `<T = ${bodyGeneric}>` : ''} exte
|
||||
case 'timeout':
|
||||
return 'string'
|
||||
case 'enum':
|
||||
return options.map(k => `'${k}'`).join(' | ')
|
||||
// the following code changes 'true' | 'false' to boolean
|
||||
let foundTrue = false
|
||||
let foundFalse = false
|
||||
options = options
|
||||
.map(k => {
|
||||
if (k === 'true') {
|
||||
foundTrue = true
|
||||
return true
|
||||
} else if (k === 'false') {
|
||||
foundFalse = true
|
||||
return false
|
||||
} else {
|
||||
return `'${k}'`
|
||||
}
|
||||
})
|
||||
.filter(k => {
|
||||
if (foundTrue && foundFalse && (k === true || k === false)) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
if (foundTrue && foundFalse) {
|
||||
options.push('boolean')
|
||||
}
|
||||
return options.join(' | ')
|
||||
case 'int':
|
||||
case 'double':
|
||||
case 'long':
|
||||
return 'number'
|
||||
case 'boolean|long':
|
||||
return 'boolean | number'
|
||||
default:
|
||||
return type
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const lolex = require('lolex')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
const { Client, Transport } = require('../../index')
|
||||
const {
|
||||
connection: { MockConnection, MockConnectionSniff }
|
||||
@ -125,7 +125,7 @@ test('Request id', t => {
|
||||
t.test('Resurrect should use the same request id of the request that starts it', t => {
|
||||
t.plan(2)
|
||||
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
@ -192,6 +192,50 @@ test('Request context', t => {
|
||||
client.info({}, { context: { winter: 'is coming' } }, t.error)
|
||||
})
|
||||
|
||||
t.test('global value', t => {
|
||||
t.plan(5)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
context: { winter: 'is coming' }
|
||||
})
|
||||
|
||||
client.on('request', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(meta.context, { winter: 'is coming' })
|
||||
})
|
||||
|
||||
client.on('response', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(meta.context, { winter: 'is coming' })
|
||||
})
|
||||
|
||||
client.info(t.error)
|
||||
})
|
||||
|
||||
t.test('override global', t => {
|
||||
t.plan(5)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
context: { winter: 'is coming' }
|
||||
})
|
||||
|
||||
client.on('request', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(meta.context, { winter: 'has come' })
|
||||
})
|
||||
|
||||
client.on('response', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(meta.context, { winter: 'has come' })
|
||||
})
|
||||
|
||||
client.info({}, { context: { winter: 'has come' } }, t.error)
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
@ -205,7 +249,7 @@ test('Client name', t => {
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Is present in the event metadata', t => {
|
||||
t.test('Is present in the event metadata (as string)', t => {
|
||||
t.plan(6)
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
@ -229,6 +273,31 @@ test('Client name', t => {
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Is present in the event metadata (as symbol)', t => {
|
||||
t.plan(6)
|
||||
const symbol = Symbol('cluster')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
name: symbol
|
||||
})
|
||||
|
||||
client.on('request', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.strictEqual(meta.name, symbol)
|
||||
})
|
||||
|
||||
client.on('response', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.strictEqual(meta.name, symbol)
|
||||
})
|
||||
|
||||
client.info((err, { meta }) => {
|
||||
t.error(err)
|
||||
t.strictEqual(meta.name, symbol)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Sniff and client name', t => {
|
||||
t.test('sniffOnStart', t => {
|
||||
t.plan(2)
|
||||
@ -281,7 +350,7 @@ test('Client name', t => {
|
||||
t.test('Resurrect should have the client name configured', t => {
|
||||
t.plan(2)
|
||||
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
@ -305,7 +374,7 @@ test('Client name', t => {
|
||||
t.test('Resurrect should have the client name configured (child client)', t => {
|
||||
t.plan(2)
|
||||
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
@ -6,7 +6,7 @@
|
||||
|
||||
const { test } = require('tap')
|
||||
const { URL } = require('url')
|
||||
const lolex = require('lolex')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
const workq = require('workq')
|
||||
const { buildCluster } = require('../utils')
|
||||
const { Client, events } = require('../../index')
|
||||
@ -24,7 +24,7 @@ const { Client, events } = require('../../index')
|
||||
test('Should execute the recurrect API with the ping strategy', t => {
|
||||
t.plan(8)
|
||||
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
const q = workq()
|
||||
|
||||
buildCluster({ numberOfNodes: 2 }, cluster => {
|
||||
@ -77,7 +77,7 @@ test('Should execute the recurrect API with the ping strategy', t => {
|
||||
test('Resurrect a node and handle 502/3/4 status code', t => {
|
||||
t.plan(15)
|
||||
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
const q = workq()
|
||||
|
||||
var count = 0
|
||||
@ -146,7 +146,7 @@ test('Resurrect a node and handle 502/3/4 status code', t => {
|
||||
test('Should execute the recurrect API with the optimistic strategy', t => {
|
||||
t.plan(8)
|
||||
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
const q = workq()
|
||||
|
||||
buildCluster({ numberOfNodes: 2 }, cluster => {
|
||||
@ -6,7 +6,7 @@
|
||||
|
||||
const { test } = require('tap')
|
||||
const { URL } = require('url')
|
||||
const lolex = require('lolex')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
const workq = require('workq')
|
||||
const { buildCluster } = require('../utils')
|
||||
const { Client, Connection, Transport, events, errors } = require('../../index')
|
||||
@ -115,7 +115,7 @@ test('Should handle hostnames in publish_address', t => {
|
||||
|
||||
test('Sniff interval', t => {
|
||||
t.plan(11)
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
const q = workq()
|
||||
|
||||
buildCluster(({ nodes, shutdown, kill }) => {
|
||||
106
test/integration/helpers/msearch.test.js
Normal file
106
test/integration/helpers/msearch.test.js
Normal file
@ -0,0 +1,106 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client, errors } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('Basic', t => {
|
||||
t.plan(4)
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'javascript' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.strictEqual(result.body.hits.total.value, 106)
|
||||
}
|
||||
)
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'ruby' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.strictEqual(result.body.hits.total.value, 29)
|
||||
}
|
||||
)
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Bad request', t => {
|
||||
t.plan(3)
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'javascript' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.strictEqual(result.body.hits.total.value, 106)
|
||||
}
|
||||
)
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { foo: { title: 'ruby' } } },
|
||||
(err, result) => {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
}
|
||||
)
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Send multiple request concurrently over the concurrency limit', t => {
|
||||
t.plan(20)
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'javascript' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.strictEqual(result.body.hits.total.value, 106)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
@ -55,6 +55,8 @@ const xPackBlackList = {
|
||||
'Attempt to open job when upgrade_mode is enabled',
|
||||
'Setting upgrade mode to disabled from enabled'
|
||||
],
|
||||
// The cleanup fails with a index not found when retrieving the jobs
|
||||
'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'],
|
||||
// investigate why this is failing
|
||||
'monitoring/bulk/10_basic.yml': ['*'],
|
||||
'monitoring/bulk/20_privileges.yml': ['*'],
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
import { expectType, expectError } from 'tsd'
|
||||
import { Readable as ReadableStream } from 'stream';
|
||||
import { TransportRequestCallback } from '../../lib/Transport'
|
||||
import { TransportRequestCallback, Context } from '../../lib/Transport'
|
||||
import { Client, ApiError } from '../../'
|
||||
|
||||
const client = new Client({
|
||||
@ -80,7 +80,7 @@ expectError(
|
||||
})
|
||||
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Define only the response body (promise style)
|
||||
@ -95,7 +95,7 @@ expectError(
|
||||
})
|
||||
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Define response body and request body (promise style)
|
||||
@ -110,12 +110,12 @@ expectError(
|
||||
})
|
||||
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Define response body, request body and the context (promise style)
|
||||
{
|
||||
const response = await client.search<SearchResponse<Source>, SearchBody, string>({
|
||||
const response = await client.search<SearchResponse<Source>, SearchBody, Context>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
@ -125,7 +125,7 @@ expectError(
|
||||
})
|
||||
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<string>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Send request body as string (promise style)
|
||||
@ -136,7 +136,7 @@ expectError(
|
||||
})
|
||||
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Send request body as buffer (promise style)
|
||||
@ -147,7 +147,7 @@ expectError(
|
||||
})
|
||||
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Send request body as readable stream (promise style)
|
||||
@ -158,7 +158,7 @@ expectError(
|
||||
})
|
||||
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// No generics (callback style)
|
||||
@ -173,7 +173,7 @@ expectError(
|
||||
}, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
@ -190,7 +190,7 @@ expectError(
|
||||
}, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
@ -207,14 +207,14 @@ expectError(
|
||||
}, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
|
||||
// Define response body, request body and the context (callback style)
|
||||
{
|
||||
const result = client.search<SearchResponse<Source>, SearchBody, string>({
|
||||
const result = client.search<SearchResponse<Source>, SearchBody, Context>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
@ -224,7 +224,7 @@ expectError(
|
||||
}, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<string>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
@ -237,7 +237,7 @@ expectError(
|
||||
}, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
@ -250,7 +250,7 @@ expectError(
|
||||
}, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
@ -263,7 +263,7 @@ expectError(
|
||||
}, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
import { expectType } from 'tsd'
|
||||
import { TransportRequestCallback } from '../../lib/Transport'
|
||||
import { TransportRequestCallback, Context } from '../../lib/Transport'
|
||||
import { Client, ApiError } from '../../'
|
||||
|
||||
const client = new Client({
|
||||
@ -15,7 +15,7 @@ const client = new Client({
|
||||
const response = await client.cat.count({ index: 'test' })
|
||||
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Define only the response body (promise style)
|
||||
@ -23,7 +23,7 @@ const client = new Client({
|
||||
const response = await client.cat.count<string>({ index: 'test' })
|
||||
|
||||
expectType<string>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Define response body and the context (promise style)
|
||||
@ -39,7 +39,7 @@ const client = new Client({
|
||||
const result = client.cat.count({ index: 'test' }, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
@ -49,17 +49,17 @@ const client = new Client({
|
||||
const result = client.cat.count<string>({ index: 'test' }, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<string>(response.body)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
|
||||
// Define response body and the context (callback style)
|
||||
{
|
||||
const result = client.cat.count<string, string>({ index: 'test' }, (err, response) => {
|
||||
const result = client.cat.count<string, Context>({ index: 'test' }, (err, response) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<string>(response.body)
|
||||
expectType<string>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
})
|
||||
expectType<TransportRequestCallback>(result)
|
||||
}
|
||||
|
||||
@ -51,12 +51,14 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 42
|
||||
})
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: {
|
||||
url: 'http://localhost:9200',
|
||||
@ -76,6 +78,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
maxRetries: 'five'
|
||||
@ -93,6 +96,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
requestTimeout: 'five'
|
||||
@ -110,6 +114,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
pingTimeout: 'five'
|
||||
@ -134,6 +139,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
sniffInterval: 'five'
|
||||
@ -151,6 +157,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
sniffOnStart: 'no'
|
||||
@ -168,6 +175,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
sniffEndpoint: false
|
||||
@ -185,6 +193,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
sniffOnConnectionFault: 'yes'
|
||||
@ -216,6 +225,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
resurrectStrategy: 'custom'
|
||||
@ -233,6 +243,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
suggestCompression: 'no'
|
||||
@ -250,6 +261,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
compression: 'deflate'
|
||||
@ -267,6 +279,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
headers: 'foo=bar'
|
||||
@ -284,6 +297,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
opaqueIdPrefix: 42
|
||||
@ -300,7 +314,15 @@ expectType<Client>(
|
||||
})
|
||||
)
|
||||
|
||||
expectType<Client>(
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
name: Symbol('foo')
|
||||
})
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
name: 42
|
||||
@ -342,6 +364,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
auth: 'password'
|
||||
@ -360,6 +383,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
cloud: {
|
||||
id: 42
|
||||
@ -382,12 +406,22 @@ expectType<Client>(
|
||||
})
|
||||
)
|
||||
|
||||
expectType<Client>(
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent: false
|
||||
})
|
||||
)
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent: {
|
||||
// @ts-expect-error
|
||||
keepAlive: 'yes',
|
||||
// @ts-expect-error
|
||||
keepAliveMsecs: true,
|
||||
// @ts-expect-error
|
||||
maxSockets: 'all',
|
||||
maxFreeSockets: null
|
||||
}
|
||||
@ -411,7 +445,9 @@ expectError<errors.ConfigurationError>(
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
ssl: {
|
||||
// @ts-expect-error
|
||||
ca: 42,
|
||||
// @ts-expect-error
|
||||
rejectUnauthorized: 'yes'
|
||||
}
|
||||
})
|
||||
@ -430,6 +466,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
generateRequestId: 'id'
|
||||
@ -456,6 +493,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
nodeSelector (connections) {
|
||||
@ -477,6 +515,7 @@ expectType<Client>(
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
nodeFilter (connection) {
|
||||
@ -511,6 +550,7 @@ expectError<errors.ConfigurationError>(
|
||||
}
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Serializer: CustomSerializer
|
||||
@ -546,6 +586,7 @@ expectError<errors.ConfigurationError>(
|
||||
expectError<errors.ConfigurationError>(
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// @ts-expect-error
|
||||
Connection: CustomConnection
|
||||
})
|
||||
)
|
||||
@ -577,6 +618,7 @@ expectError<errors.ConfigurationError>(
|
||||
}
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
ConnectionPool: CustomConnectionPool
|
||||
@ -610,9 +652,28 @@ expectError<errors.ConfigurationError>(
|
||||
}
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Transport: CustomTransport
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* `context` option
|
||||
*/
|
||||
expectType<Client>(
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
context: { hello: 'world' }
|
||||
})
|
||||
)
|
||||
|
||||
expectError<errors.ConfigurationError>(
|
||||
// @ts-expect-error
|
||||
new Client({
|
||||
node: 'http://localhost:9200',
|
||||
context: 'hello world'
|
||||
})
|
||||
)
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
import { expectType } from 'tsd'
|
||||
import { Client, ApiError, ApiResponse, RequestEvent, ResurrectEvent } from '../../'
|
||||
import { TransportRequestCallback, TransportRequestPromise } from '../..//lib/Transport';
|
||||
import { TransportRequestCallback, TransportRequestPromise } from '../../lib/Transport'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
@ -120,4 +120,4 @@ client.on('resurrect', (err, meta) => {
|
||||
} catch (err) {
|
||||
expectType<any>(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,8 +9,10 @@ import {
|
||||
BulkStats,
|
||||
BulkHelperOptions,
|
||||
ScrollSearchResponse,
|
||||
OnDropDocument
|
||||
OnDropDocument,
|
||||
MsearchHelper
|
||||
} from '../../lib/Helpers'
|
||||
import { ApiResponse, ApiError, Context } from '../../lib/Transport'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
@ -25,6 +27,7 @@ const b = client.helpers.bulk<Record<string, any>>({
|
||||
return { index: { _index: 'test' } }
|
||||
},
|
||||
flushBytes: 5000000,
|
||||
flushInterval: 30000,
|
||||
concurrency: 5,
|
||||
retries: 3,
|
||||
wait: 5000,
|
||||
@ -56,7 +59,7 @@ expectError(
|
||||
const options = {
|
||||
datasource: [],
|
||||
onDocument (doc: Record<string, any>) {
|
||||
return { index: { _index: 'test' } }
|
||||
return { index: { _index: 'test' } }
|
||||
}
|
||||
}
|
||||
expectAssignable<BulkHelperOptions<Record<string, any>>>(options)
|
||||
@ -131,26 +134,26 @@ expectError(
|
||||
expectAssignable<ScrollSearchResponse>(response)
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<unknown[]>(response.documents)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// with type defs
|
||||
{
|
||||
{
|
||||
interface ShardsResponse {
|
||||
total: number;
|
||||
successful: number;
|
||||
failed: number;
|
||||
skipped: number;
|
||||
}
|
||||
|
||||
|
||||
interface Explanation {
|
||||
value: number;
|
||||
description: string;
|
||||
details: Explanation[];
|
||||
}
|
||||
|
||||
|
||||
interface SearchResponse<T> {
|
||||
took: number;
|
||||
timed_out: boolean;
|
||||
@ -176,7 +179,7 @@ expectError(
|
||||
};
|
||||
aggregations?: any;
|
||||
}
|
||||
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
@ -195,7 +198,7 @@ expectError(
|
||||
expectAssignable<ScrollSearchResponse>(response)
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<Source[]>(response.documents)
|
||||
expectType<unknown>(response.meta.context)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -206,20 +209,20 @@ expectError(
|
||||
match: { foo: string }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
interface ShardsResponse {
|
||||
total: number;
|
||||
successful: number;
|
||||
failed: number;
|
||||
skipped: number;
|
||||
}
|
||||
|
||||
|
||||
interface Explanation {
|
||||
value: number;
|
||||
description: string;
|
||||
details: Explanation[];
|
||||
}
|
||||
|
||||
|
||||
interface SearchResponse<T> {
|
||||
took: number;
|
||||
timed_out: boolean;
|
||||
@ -245,13 +248,13 @@ expectError(
|
||||
};
|
||||
aggregations?: any;
|
||||
}
|
||||
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
|
||||
async function test () {
|
||||
const scrollSearch = client.helpers.scrollSearch<Source, SearchResponse<Source>, SearchBody, string>({
|
||||
const scrollSearch = client.helpers.scrollSearch<Source, SearchResponse<Source>, SearchBody, Record<string, unknown>>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
@ -264,7 +267,7 @@ expectError(
|
||||
expectAssignable<ScrollSearchResponse>(response)
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<Source[]>(response.documents)
|
||||
expectType<string>(response.meta.context)
|
||||
expectType<Record<string, unknown>>(response.meta.context)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -308,7 +311,7 @@ expectError(
|
||||
}
|
||||
|
||||
// with type defs
|
||||
{
|
||||
{
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
@ -335,7 +338,7 @@ expectError(
|
||||
match: { foo: string }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
@ -413,7 +416,7 @@ expectError(
|
||||
match: { foo: string }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
@ -429,4 +432,30 @@ expectError(
|
||||
|
||||
expectType<Promise<Source[]>>(p)
|
||||
expectType<Source[]>(await p)
|
||||
}
|
||||
}
|
||||
|
||||
/// .helpers.msearch
|
||||
|
||||
const s = client.helpers.msearch({
|
||||
operations: 5,
|
||||
flushInterval: 500,
|
||||
concurrency: 5,
|
||||
retries: 5,
|
||||
wait: 5000
|
||||
})
|
||||
|
||||
expectType<MsearchHelper>(s)
|
||||
expectType<void>(s.stop())
|
||||
expectType<void>(s.stop(new Error('kaboom')))
|
||||
|
||||
expectType<Promise<ApiResponse<Record<string, any>, unknown>>>(s.search({ index: 'foo'}, { query: {} }))
|
||||
expectType<Promise<ApiResponse<string, string>>>(s.search<string, Record<string, any>, string>({ index: 'foo'}, { query: {} }))
|
||||
|
||||
expectType<void>(s.search({ index: 'foo'}, { query: {} }, (err, result) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<ApiResponse>(result)
|
||||
}))
|
||||
expectType<void>(s.search<string, Record<string, any>, string>({ index: 'foo'}, { query: {} }, (err, result) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<ApiResponse<string, string>>(result)
|
||||
}))
|
||||
|
||||
108
test/types/kibana.test-d.ts
Normal file
108
test/types/kibana.test-d.ts
Normal file
@ -0,0 +1,108 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
import { expectType, expectNotType, expectError } from 'tsd'
|
||||
import { Client, RequestEvent, ResurrectEvent, ApiError, ApiResponse } from '../../'
|
||||
import { KibanaClient } from '../../api/kibana'
|
||||
import { TransportRequestPromise, Context } from '../../lib/Transport'
|
||||
|
||||
const client: KibanaClient = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
client.on('request', (err, meta) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<RequestEvent>(meta)
|
||||
})
|
||||
|
||||
client.on('response', (err, meta) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<RequestEvent>(meta)
|
||||
})
|
||||
|
||||
client.on('sniff', (err, meta) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<RequestEvent>(meta)
|
||||
})
|
||||
|
||||
client.on('resurrect', (err, meta) => {
|
||||
expectType<null>(err)
|
||||
expectType<ResurrectEvent>(meta)
|
||||
})
|
||||
|
||||
// No generics
|
||||
{
|
||||
const response = await client.cat.count({ index: 'test' })
|
||||
|
||||
expectType<Record<string, any>>(response.body)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Define only the response body
|
||||
{
|
||||
const response = await client.cat.count<string>({ index: 'test' })
|
||||
|
||||
expectType<string>(response.body)
|
||||
expectType<Context>(response.meta.context)
|
||||
}
|
||||
|
||||
// Define response body and the context
|
||||
{
|
||||
const response = await client.cat.count<string, string>({ index: 'test' })
|
||||
|
||||
expectType<string>(response.body)
|
||||
expectType<string>(response.meta.context)
|
||||
}
|
||||
|
||||
// Check API returned type and optional parameters
|
||||
{
|
||||
const promise = client.info()
|
||||
expectType<TransportRequestPromise<ApiResponse>>(promise)
|
||||
promise
|
||||
.then(result => expectType<ApiResponse>(result))
|
||||
.catch((err: ApiError) => expectType<ApiError>(err))
|
||||
expectType<void>(promise.abort())
|
||||
}
|
||||
|
||||
{
|
||||
const promise = client.info({ pretty: true })
|
||||
expectType<TransportRequestPromise<ApiResponse>>(promise)
|
||||
promise
|
||||
.then(result => expectType<ApiResponse>(result))
|
||||
.catch((err: ApiError) => expectType<ApiError>(err))
|
||||
expectType<void>(promise.abort())
|
||||
}
|
||||
|
||||
{
|
||||
const promise = client.info({ pretty: true }, { ignore: [404] })
|
||||
expectType<TransportRequestPromise<ApiResponse>>(promise)
|
||||
promise
|
||||
.then(result => expectType<ApiResponse>(result))
|
||||
.catch((err: ApiError) => expectType<ApiError>(err))
|
||||
expectType<void>(promise.abort())
|
||||
}
|
||||
|
||||
// body that does not respect the RequestBody constraint
|
||||
expectError(
|
||||
client.search({
|
||||
index: 'hello',
|
||||
body: 42
|
||||
}).then(console.log)
|
||||
)
|
||||
|
||||
// @ts-expect-error
|
||||
client.async_search.get()
|
||||
|
||||
// callback api is not supported
|
||||
expectError(client.cat.count({ index: 'test' }, {}, (err: any, result: any) => {}))
|
||||
|
||||
// close api, only promises should be supported
|
||||
// callback api is not supported
|
||||
expectType<Promise<void>>(client.close())
|
||||
expectError(client.close(() => {}))
|
||||
|
||||
// the child api should return a KibanaClient instance
|
||||
const child = client.child()
|
||||
expectType<KibanaClient>(child)
|
||||
expectNotType<Client>(child)
|
||||
@ -14,6 +14,7 @@ import {
|
||||
TransportRequestParams,
|
||||
TransportRequestOptions,
|
||||
TransportRequestCallback,
|
||||
TransportRequestPromise,
|
||||
RequestEvent,
|
||||
ApiError,
|
||||
RequestBody,
|
||||
@ -102,6 +103,16 @@ expectType<Transport>(transport)
|
||||
|
||||
expectType<TransportRequestCallback>(transport.request(params, options, (err, result) => {}))
|
||||
|
||||
// querystring as string
|
||||
transport.request({
|
||||
method: 'GET',
|
||||
path: '/search',
|
||||
querystring: 'baz=faz'
|
||||
}, options, (err, result) => {
|
||||
expectType<ApiError>(err)
|
||||
expectType<ApiResponse>(result)
|
||||
})
|
||||
|
||||
// body as object
|
||||
transport.request(params, options, (err, result) => {
|
||||
expectType<ApiError>(err)
|
||||
@ -142,7 +153,7 @@ transport.request({
|
||||
})
|
||||
|
||||
const promise = transport.request(params, options)
|
||||
expectType<Promise<ApiResponse>>(promise)
|
||||
expectType<TransportRequestPromise<ApiResponse>>(promise)
|
||||
promise.then(result => expectType<ApiResponse>(result))
|
||||
expectType<ApiResponse>(await promise)
|
||||
|
||||
|
||||
@ -1067,3 +1067,77 @@ test('Correctly handles the same header cased differently', t => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Random selector', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`,
|
||||
nodeSelector: 'random'
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Disable keep alive agent', t => {
|
||||
t.plan(3)
|
||||
|
||||
function handler (req, res) {
|
||||
t.strictEqual(req.headers.connection, 'close')
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`,
|
||||
agent: false
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('name property as string', t => {
|
||||
t.plan(1)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
name: 'client-name'
|
||||
})
|
||||
|
||||
t.strictEqual(client.name, 'client-name')
|
||||
})
|
||||
|
||||
test('name property as symbol', t => {
|
||||
t.plan(1)
|
||||
|
||||
const symbol = Symbol('client-name')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
name: symbol
|
||||
})
|
||||
|
||||
t.strictEqual(client.name, symbol)
|
||||
})
|
||||
|
||||
@ -198,7 +198,7 @@ test('Disable keep alive', t => {
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const connection = new Connection({
|
||||
url: new URL(`http://localhost:${port}`),
|
||||
agent: { keepAlive: false }
|
||||
agent: false
|
||||
})
|
||||
connection.request({
|
||||
path: '/hello',
|
||||
@ -499,6 +499,43 @@ test('Should not close a connection if there are open requests', t => {
|
||||
})
|
||||
})
|
||||
|
||||
test('Should not close a connection if there are open requests (with agent disabled)', t => {
|
||||
t.plan(4)
|
||||
|
||||
function handler (req, res) {
|
||||
setTimeout(() => res.end('ok'), 1000)
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const connection = new Connection({
|
||||
url: new URL(`http://localhost:${port}`),
|
||||
agent: false
|
||||
})
|
||||
|
||||
setTimeout(() => {
|
||||
t.strictEqual(connection._openRequests, 1)
|
||||
connection.close()
|
||||
}, 500)
|
||||
|
||||
connection.request({
|
||||
path: '/hello',
|
||||
method: 'GET'
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
t.strictEqual(connection._openRequests, 0)
|
||||
|
||||
var payload = ''
|
||||
res.setEncoding('utf8')
|
||||
res.on('data', chunk => { payload += chunk })
|
||||
res.on('error', err => t.fail(err))
|
||||
res.on('end', () => {
|
||||
t.strictEqual(payload, 'ok')
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Url with auth', t => {
|
||||
t.plan(2)
|
||||
|
||||
@ -617,6 +654,14 @@ test('Connection id should not contain credentials', t => {
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Ipv6 support', t => {
|
||||
const connection = new Connection({
|
||||
url: new URL('http://[::1]:9200')
|
||||
})
|
||||
t.strictEqual(connection.buildRequestObject({}).hostname, '::1')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Should throw if the protocol is not http or https', t => {
|
||||
try {
|
||||
new Connection({ // eslint-disable-line
|
||||
@ -900,3 +945,18 @@ test('Abort a request asyncronously', t => {
|
||||
setImmediate(() => request.abort())
|
||||
})
|
||||
})
|
||||
|
||||
test('Should correctly resolve request pathname', t => {
|
||||
t.plan(1)
|
||||
|
||||
const connection = new Connection({
|
||||
url: new URL(`http://localhost:80/test`)
|
||||
})
|
||||
|
||||
t.strictEqual(
|
||||
connection.buildRequestObject({
|
||||
path: 'hello'
|
||||
}).pathname,
|
||||
'/test/hello'
|
||||
)
|
||||
})
|
||||
|
||||
8
test/unit/esm/index.mjs
Normal file
8
test/unit/esm/index.mjs
Normal file
@ -0,0 +1,8 @@
|
||||
import t from 'tap'
|
||||
import { Client } from '../../../index.mjs'
|
||||
|
||||
t.test('esm support', t => {
|
||||
t.plan(1)
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
t.strictEqual(client.name, 'elasticsearch-js')
|
||||
})
|
||||
19
test/unit/esm/index.test.js
Normal file
19
test/unit/esm/index.test.js
Normal file
@ -0,0 +1,19 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
const semver = require('semver')
|
||||
|
||||
if (semver.lt(process.versions.node, '12.17.0')) {
|
||||
t.skip('Skip because Node version < 12.17.0')
|
||||
t.end()
|
||||
} else {
|
||||
// Node v8 throw a `SyntaxError: Unexpected token import`
|
||||
// even if this branch is never touch in the code,
|
||||
// by using `eval` we can avoid this issue.
|
||||
// eslint-disable-next-line
|
||||
new Function('module', 'return import(module)')('./index.mjs').catch((err) => {
|
||||
process.nextTick(() => {
|
||||
throw err
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -5,6 +5,7 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const semver = require('semver')
|
||||
const { Client, events } = require('../../index')
|
||||
const { TimeoutError } = require('../../lib/errors')
|
||||
const { connection: { MockConnection, MockConnectionTimeout } } = require('../utils')
|
||||
@ -54,6 +55,113 @@ test('Should emit a request event when a request is performed', t => {
|
||||
})
|
||||
})
|
||||
|
||||
test('Should emit a request event once when a request is performed', t => {
|
||||
t.plan(4)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.once(events.REQUEST, (err, request) => {
|
||||
t.error(err)
|
||||
t.match(request, {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
headers: null,
|
||||
warnings: null,
|
||||
meta: {
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
request: {
|
||||
params: {
|
||||
method: 'GET',
|
||||
path: '/test/_search',
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar'
|
||||
},
|
||||
options: {},
|
||||
id: 1
|
||||
},
|
||||
connection: {
|
||||
id: 'http://localhost:9200'
|
||||
},
|
||||
attempts: 0,
|
||||
aborted: false
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
test('Remove an event', { skip: semver.lt(process.versions.node, '10.0.0') }, t => {
|
||||
t.plan(4)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on(events.REQUEST, onRequest)
|
||||
function onRequest (err, request) {
|
||||
t.error(err)
|
||||
t.match(request, {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
headers: null,
|
||||
warnings: null,
|
||||
meta: {
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
request: {
|
||||
params: {
|
||||
method: 'GET',
|
||||
path: '/test/_search',
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar'
|
||||
},
|
||||
options: {},
|
||||
id: 1
|
||||
},
|
||||
connection: {
|
||||
id: 'http://localhost:9200'
|
||||
},
|
||||
attempts: 0,
|
||||
aborted: false
|
||||
}
|
||||
})
|
||||
|
||||
client.off('request', onRequest)
|
||||
}
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
test('Should emit a response event in case of a successful response', t => {
|
||||
t.plan(3)
|
||||
|
||||
@ -151,3 +259,19 @@ test('Should emit a response event with the error set', t => {
|
||||
t.ok(err instanceof TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
test('Emit event', t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on(events.REQUEST, (err, request) => {
|
||||
t.error(err)
|
||||
t.deepEqual(request, { hello: 'world' })
|
||||
})
|
||||
|
||||
client.emit(events.REQUEST, null, { hello: 'world' })
|
||||
})
|
||||
|
||||
@ -7,6 +7,7 @@
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
const semver = require('semver')
|
||||
const { test } = require('tap')
|
||||
const { Client, errors } = require('../../../')
|
||||
@ -188,6 +189,51 @@ test('bulk index', t => {
|
||||
})
|
||||
})
|
||||
|
||||
t.test('refreshOnCompletion custom index', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (params.method === 'GET') {
|
||||
t.strictEqual(params.path, '/test/_refresh')
|
||||
return { body: { acknowledged: true } }
|
||||
} else {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
refreshOnCompletion: 'test',
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (custom action)', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
@ -806,6 +852,53 @@ test('bulk update', t => {
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request dataset as string)', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), { doc: dataset[count++] })
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
let id = 0
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.map(d => JSON.stringify(d)),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return [{
|
||||
update: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
}
|
||||
}]
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
@ -855,10 +948,6 @@ test('bulk delete', t => {
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (failure)', async t => {
|
||||
if (semver.lt(process.versions.node, '10.0.0')) {
|
||||
t.skip('This test will not pass on Node v8')
|
||||
return
|
||||
}
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
@ -987,3 +1076,118 @@ test('errors', t => {
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Flush interval', t => {
|
||||
t.test('Slow producer', async t => {
|
||||
const clock = FakeTimers.install({ toFake: ['setTimeout', 'clearTimeout'] })
|
||||
t.teardown(() => clock.uninstall())
|
||||
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: (async function * generator () {
|
||||
for (const chunk of dataset) {
|
||||
await clock.nextAsync()
|
||||
yield chunk
|
||||
}
|
||||
})(),
|
||||
flushBytes: 5000000,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Abort operation', async t => {
|
||||
const clock = FakeTimers.install({ toFake: ['setTimeout', 'clearTimeout'] })
|
||||
t.teardown(() => clock.uninstall())
|
||||
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.true(count < 2)
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const b = client.helpers.bulk({
|
||||
datasource: (async function * generator () {
|
||||
for (const chunk of dataset) {
|
||||
await clock.nextAsync()
|
||||
if (chunk.user === 'tyrion') {
|
||||
// Needed otherwise in Node.js 10
|
||||
// the second request will never be sent
|
||||
await Promise.resolve()
|
||||
b.abort()
|
||||
}
|
||||
yield chunk
|
||||
}
|
||||
})(),
|
||||
flushBytes: 5000000,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
const result = await b
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 2,
|
||||
successful: 2,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: true
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
743
test/unit/helpers/msearch.test.js
Normal file
743
test/unit/helpers/msearch.test.js
Normal file
@ -0,0 +1,743 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { Client, errors } = require('../../../')
|
||||
const { connection } = require('../../utils')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
|
||||
test('Basic', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
const result = await m.search(
|
||||
{ index: 'test' },
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Multiple searches (inside async iterator)', t => {
|
||||
t.plan(6)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch({ operations: 2 })
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
])
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Multiple searches (async iterator exits)', t => {
|
||||
t.plan(6)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
])
|
||||
})
|
||||
|
||||
setImmediate(() => m.stop())
|
||||
})
|
||||
|
||||
test('Stop a msearch processor (promises)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.stop()
|
||||
|
||||
try {
|
||||
await m.search(
|
||||
{ index: 'test' },
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
} catch (err) {
|
||||
t.strictEqual(err.message, 'The msearch processor has been stopped')
|
||||
}
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Stop a msearch processor (callbacks)', t => {
|
||||
t.plan(1)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.stop()
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.strictEqual(err.message, 'The msearch processor has been stopped')
|
||||
})
|
||||
})
|
||||
|
||||
test('Bad header', t => {
|
||||
t.plan(2)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(null, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.strictEqual(err.message, 'The header should be an object')
|
||||
})
|
||||
|
||||
m.search(null, { query: { match: { foo: 'bar' } } })
|
||||
.catch(err => {
|
||||
t.strictEqual(err.message, 'The header should be an object')
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Bad body', t => {
|
||||
t.plan(2)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, null, (err, result) => {
|
||||
t.strictEqual(err.message, 'The body should be an object')
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, null)
|
||||
.catch(err => {
|
||||
t.strictEqual(err.message, 'The body should be an object')
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Retry on 429', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (count++ === 0) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 429,
|
||||
error: {}
|
||||
}]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1, wait: 10 })
|
||||
|
||||
const result = await m.search(
|
||||
{ index: 'test' },
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Single search errors', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 400,
|
||||
error: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
try {
|
||||
await m.search(
|
||||
{ index: 'test' },
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
}
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Entire msearch fails', t => {
|
||||
t.plan(4)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: {
|
||||
status: 500,
|
||||
error: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.deepEqual(result.documents, [])
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.deepEqual(result.documents, [])
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Resolves the msearch helper', t => {
|
||||
t.plan(1)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.stop()
|
||||
|
||||
m.then(
|
||||
() => t.pass('called'),
|
||||
e => t.fail('Should not fail')
|
||||
)
|
||||
|
||||
m.catch(e => t.fail('Should not fail'))
|
||||
})
|
||||
|
||||
test('Stop the msearch helper with an error', t => {
|
||||
t.plan(3)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.stop(new Error('kaboom'))
|
||||
|
||||
m.then(
|
||||
() => t.fail('Should fail'),
|
||||
err => t.is(err.message, 'kaboom')
|
||||
)
|
||||
|
||||
m.catch(err => t.is(err.message, 'kaboom'))
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.is(err.message, 'kaboom')
|
||||
})
|
||||
})
|
||||
|
||||
test('Multiple searches (concurrency = 1)', t => {
|
||||
t.plan(6)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1, concurrency: 1 })
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Flush interval', t => {
|
||||
t.plan(4)
|
||||
const clock = FakeTimers.install({ toFake: ['setTimeout', 'clearTimeout'] })
|
||||
t.teardown(() => clock.uninstall())
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.is(result.documents.length, 3)
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.is(result.documents.length, 3)
|
||||
})
|
||||
|
||||
setImmediate(clock.next)
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Flush interval - early stop', t => {
|
||||
t.plan(3)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: [{
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.is(result.documents.length, 3)
|
||||
})
|
||||
|
||||
setImmediate(() => {
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
})
|
||||
})
|
||||
|
||||
m.stop()
|
||||
})
|
||||
|
||||
test('Stop should resolve the helper', t => {
|
||||
t.plan(1)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: []
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
setImmediate(m.stop)
|
||||
|
||||
m.then(() => t.pass('Called'))
|
||||
.catch(() => t.fail('Should not fail'))
|
||||
})
|
||||
|
||||
test('Stop should resolve the helper (error)', t => {
|
||||
t.plan(3)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
responses: []
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
setImmediate(m.stop, new Error('kaboom'))
|
||||
|
||||
m.then(() => t.fail('Should not fail'))
|
||||
.catch(err => t.is(err.message, 'kaboom'))
|
||||
|
||||
m.catch(err => t.is(err.message, 'kaboom'))
|
||||
|
||||
m.then(() => t.fail('Should not fail'), err => t.is(err.message, 'kaboom'))
|
||||
})
|
||||
@ -183,6 +183,8 @@ test('Scroll search (retry throws later)', async t => {
|
||||
var count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
// filter_path should not be added if is not already present
|
||||
t.strictEqual(params.querystring, 'scroll=1m')
|
||||
if (count > 1) {
|
||||
count += 1
|
||||
return { body: {}, statusCode: 429 }
|
||||
@ -232,6 +234,7 @@ test('Scroll search documents', async t => {
|
||||
var count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=hits.hits._source%2C_scroll_id&scroll=1m')
|
||||
return {
|
||||
body: {
|
||||
_scroll_id: count === 3 ? undefined : 'id',
|
||||
|
||||
@ -11,6 +11,7 @@ const { connection } = require('../../utils')
|
||||
test('Search should have an additional documents property', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=hits.hits._source')
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
@ -44,6 +45,7 @@ test('Search should have an additional documents property', async t => {
|
||||
test('kGetHits fallback', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=hits.hits._source')
|
||||
return { body: {} }
|
||||
}
|
||||
})
|
||||
@ -59,3 +61,73 @@ test('kGetHits fallback', async t => {
|
||||
})
|
||||
t.deepEqual(result, [])
|
||||
})
|
||||
|
||||
test('Merge filter paths (snake_case)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=foo%2Chits.hits._source')
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const result = await client.helpers.search({
|
||||
index: 'test',
|
||||
filter_path: 'foo',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.deepEqual(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
test('Merge filter paths (camelCase)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=foo%2Chits.hits._source')
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const result = await client.helpers.search({
|
||||
index: 'test',
|
||||
filterPath: 'foo',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.deepEqual(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
|
||||
const { test } = require('tap')
|
||||
const { URL } = require('url')
|
||||
const lolex = require('lolex')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
const { createGunzip } = require('zlib')
|
||||
const os = require('os')
|
||||
const intoStream = require('into-stream')
|
||||
@ -1072,7 +1072,7 @@ test('sniff', t => {
|
||||
t.test('sniffInterval', t => {
|
||||
t.plan(6)
|
||||
|
||||
const clock = lolex.install({ toFake: ['Date'] })
|
||||
const clock = FakeTimers.install({ toFake: ['Date'] })
|
||||
t.teardown(() => clock.uninstall())
|
||||
|
||||
class MyTransport extends Transport {
|
||||
|
||||
Reference in New Issue
Block a user