Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8c0cd05ecb | |||
| b423288436 | |||
| 1e4d2b6b33 | |||
| 5b6a7f01a8 | |||
| 12dfc31c8d | |||
| 9df5e6f713 | |||
| d6d7b5f14c | |||
| 718630afc9 | |||
| 5a6f5573e8 | |||
| 187c229ba7 | |||
| 3d4323043d |
@ -1,6 +1,6 @@
|
||||
---
|
||||
STACK_VERSION:
|
||||
- 7.x-SNAPSHOT
|
||||
- 7.14.0-SNAPSHOT
|
||||
|
||||
NODE_JS_VERSION:
|
||||
- 16
|
||||
|
||||
29
.github/workflows/nodejs.yml
vendored
29
.github/workflows/nodejs.yml
vendored
@ -61,7 +61,7 @@ jobs:
|
||||
- name: Runs Elasticsearch
|
||||
uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
with:
|
||||
stack-version: 7.x-SNAPSHOT
|
||||
stack-version: 7.14.0-SNAPSHOT
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
@ -93,7 +93,7 @@ jobs:
|
||||
- name: Runs Elasticsearch
|
||||
uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
with:
|
||||
stack-version: 8.0.0-SNAPSHOT
|
||||
stack-version: 7.14.0-SNAPSHOT
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
@ -119,34 +119,13 @@ jobs:
|
||||
npm start --prefix test/bundlers/rollup-test
|
||||
npm start --prefix test/bundlers/webpack-test
|
||||
|
||||
mock-support:
|
||||
name: Mock support
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
npm install
|
||||
npm install --prefix test/mock
|
||||
|
||||
- name: Run test
|
||||
run: |
|
||||
npm test --prefix test/mock
|
||||
|
||||
code-coverage:
|
||||
name: Code coverage
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [14.x]
|
||||
node-version: [12.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@ -180,7 +159,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [14.x]
|
||||
node-version: [12.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
11
README.md
11
README.md
@ -28,7 +28,7 @@ npm install @elastic/elasticsearch
|
||||
|
||||
### Node.js support
|
||||
|
||||
NOTE: The minimum supported version of Node.js is `v12`.
|
||||
NOTE: The minimum supported version of Node.js is `v10`.
|
||||
|
||||
The client versioning follows the Elastc Stack versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
@ -49,13 +49,14 @@ of `^7.10.0`).
|
||||
|
||||
| Node.js Version | Node.js EOL date | End of support |
|
||||
| --------------- |------------------| ---------------------- |
|
||||
| `8.x` | `December 2019` | `7.11` (early 2021) |
|
||||
| `10.x` | `April 2021` | `7.12` (mid 2021) |
|
||||
| `8.x` | `December 2019` | `7.11` (early 2021) |
|
||||
| `10.x` | `Apri 2021` | `7.12` (mid 2021) |
|
||||
|
||||
### Compatibility
|
||||
|
||||
Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch.
|
||||
Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made.
|
||||
Elastic language clients are guaranteed to be able to communicate with Elasticsearch or Elastic solutions running on the same major version and greater or equal minor version.
|
||||
|
||||
Language clients are forward compatible; meaning that clients support communicating with greater minor versions of Elasticsearch. Elastic language clients are not guaranteed to be backwards compatible.
|
||||
|
||||
| Elasticsearch Version | Client Version |
|
||||
| --------------------- |----------------|
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['timeout', 'master_timeout', 'ignore_unavailable', 'allow_no_indices', 'expand_wildcards', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'index', 'fielddata', 'fields', 'query', 'request', 'wait_for_active_shards', 'include_type_name', 'run_expensive_tasks', 'flush', 'local', 'flat_settings', 'include_defaults', 'force', 'wait_if_ongoing', 'max_num_segments', 'only_expunge_deletes', 'create', 'cause', 'write_index_only', 'preserve_existing', 'order', 'detailed', 'active_only', 'dry_run', 'verbose', 'status', 'copy_settings', 'completion_fields', 'fielddata_fields', 'groups', 'level', 'types', 'include_segment_file_sizes', 'include_unloaded_segments', 'forbid_closed_indices', 'wait_for_completion', 'only_ancient_segments', 'explain', 'q', 'analyzer', 'analyze_wildcard', 'default_operator', 'df', 'lenient', 'rewrite', 'all_shards']
|
||||
const snakeCase = { masterTimeout: 'master_timeout', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', errorTrace: 'error_trace', filterPath: 'filter_path', waitForActiveShards: 'wait_for_active_shards', includeTypeName: 'include_type_name', runExpensiveTasks: 'run_expensive_tasks', flatSettings: 'flat_settings', includeDefaults: 'include_defaults', waitIfOngoing: 'wait_if_ongoing', maxNumSegments: 'max_num_segments', onlyExpungeDeletes: 'only_expunge_deletes', writeIndexOnly: 'write_index_only', preserveExisting: 'preserve_existing', activeOnly: 'active_only', dryRun: 'dry_run', copySettings: 'copy_settings', completionFields: 'completion_fields', fielddataFields: 'fielddata_fields', includeSegmentFileSizes: 'include_segment_file_sizes', includeUnloadedSegments: 'include_unloaded_segments', forbidClosedIndices: 'forbid_closed_indices', waitForCompletion: 'wait_for_completion', onlyAncientSegments: 'only_ancient_segments', analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', allShards: 'all_shards' }
|
||||
const acceptedQuerystring = ['timeout', 'master_timeout', 'ignore_unavailable', 'allow_no_indices', 'expand_wildcards', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'index', 'fielddata', 'fields', 'query', 'request', 'wait_for_active_shards', 'include_type_name', 'local', 'flat_settings', 'include_defaults', 'force', 'wait_if_ongoing', 'flush', 'max_num_segments', 'only_expunge_deletes', 'create', 'cause', 'write_index_only', 'preserve_existing', 'order', 'detailed', 'active_only', 'dry_run', 'verbose', 'status', 'copy_settings', 'completion_fields', 'fielddata_fields', 'groups', 'level', 'types', 'include_segment_file_sizes', 'include_unloaded_segments', 'forbid_closed_indices', 'wait_for_completion', 'only_ancient_segments', 'explain', 'q', 'analyzer', 'analyze_wildcard', 'default_operator', 'df', 'lenient', 'rewrite', 'all_shards']
|
||||
const snakeCase = { masterTimeout: 'master_timeout', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', errorTrace: 'error_trace', filterPath: 'filter_path', waitForActiveShards: 'wait_for_active_shards', includeTypeName: 'include_type_name', flatSettings: 'flat_settings', includeDefaults: 'include_defaults', waitIfOngoing: 'wait_if_ongoing', maxNumSegments: 'max_num_segments', onlyExpungeDeletes: 'only_expunge_deletes', writeIndexOnly: 'write_index_only', preserveExisting: 'preserve_existing', activeOnly: 'active_only', dryRun: 'dry_run', copySettings: 'copy_settings', completionFields: 'completion_fields', fielddataFields: 'fielddata_fields', includeSegmentFileSizes: 'include_segment_file_sizes', includeUnloadedSegments: 'include_unloaded_segments', forbidClosedIndices: 'forbid_closed_indices', waitForCompletion: 'wait_for_completion', onlyAncientSegments: 'only_ancient_segments', analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', allShards: 'all_shards' }
|
||||
|
||||
function IndicesApi (transport, ConfigurationError) {
|
||||
this.transport = transport
|
||||
@ -414,33 +414,6 @@ IndicesApi.prototype.deleteTemplate = function indicesDeleteTemplateApi (params,
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
IndicesApi.prototype.diskUsage = function indicesDiskUsageApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
// check required parameters
|
||||
if (params.index == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: index')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
let { method, body, index, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_disk_usage'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
IndicesApi.prototype.exists = function indicesExistsApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -591,33 +564,6 @@ IndicesApi.prototype.existsType = function indicesExistsTypeApi (params, options
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
IndicesApi.prototype.fieldUsageStats = function indicesFieldUsageStatsApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
// check required parameters
|
||||
if (params.index == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: index')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
let { method, body, index, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_field_usage_stats'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
IndicesApi.prototype.flush = function indicesFlushApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -1719,12 +1665,10 @@ Object.defineProperties(IndicesApi.prototype, {
|
||||
delete_data_stream: { get () { return this.deleteDataStream } },
|
||||
delete_index_template: { get () { return this.deleteIndexTemplate } },
|
||||
delete_template: { get () { return this.deleteTemplate } },
|
||||
disk_usage: { get () { return this.diskUsage } },
|
||||
exists_alias: { get () { return this.existsAlias } },
|
||||
exists_index_template: { get () { return this.existsIndexTemplate } },
|
||||
exists_template: { get () { return this.existsTemplate } },
|
||||
exists_type: { get () { return this.existsType } },
|
||||
field_usage_stats: { get () { return this.fieldUsageStats } },
|
||||
flush_synced: { get () { return this.flushSynced } },
|
||||
get_alias: { get () { return this.getAlias } },
|
||||
get_data_stream: { get () { return this.getDataStream } },
|
||||
|
||||
@ -23,78 +23,14 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['pretty', 'human', 'error_trace', 'source', 'filter_path', 'interval', 'snapshots', 'threads', 'ignore_idle_threads', 'type', 'timeout', 'flat_settings', 'completion_fields', 'fielddata_fields', 'fields', 'groups', 'level', 'types', 'include_segment_file_sizes', 'include_unloaded_segments']
|
||||
const snakeCase = { errorTrace: 'error_trace', filterPath: 'filter_path', ignoreIdleThreads: 'ignore_idle_threads', flatSettings: 'flat_settings', completionFields: 'completion_fields', fielddataFields: 'fielddata_fields', includeSegmentFileSizes: 'include_segment_file_sizes', includeUnloadedSegments: 'include_unloaded_segments' }
|
||||
const acceptedQuerystring = ['interval', 'snapshots', 'threads', 'ignore_idle_threads', 'type', 'timeout', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'flat_settings', 'completion_fields', 'fielddata_fields', 'fields', 'groups', 'level', 'types', 'include_segment_file_sizes', 'include_unloaded_segments']
|
||||
const snakeCase = { ignoreIdleThreads: 'ignore_idle_threads', errorTrace: 'error_trace', filterPath: 'filter_path', flatSettings: 'flat_settings', completionFields: 'completion_fields', fielddataFields: 'fielddata_fields', includeSegmentFileSizes: 'include_segment_file_sizes', includeUnloadedSegments: 'include_unloaded_segments' }
|
||||
|
||||
function NodesApi (transport, ConfigurationError) {
|
||||
this.transport = transport
|
||||
this[kConfigurationError] = ConfigurationError
|
||||
}
|
||||
|
||||
NodesApi.prototype.clearMeteringArchive = function nodesClearMeteringArchiveApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
// check required parameters
|
||||
if (params.node_id == null && params.nodeId == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: node_id or nodeId')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params.max_archive_version == null && params.maxArchiveVersion == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: max_archive_version or maxArchiveVersion')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// check required url components
|
||||
if ((params.max_archive_version != null || params.maxArchiveVersion != null) && ((params.node_id == null && params.nodeId == null))) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: node_id')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
let { method, body, nodeId, node_id, maxArchiveVersion, max_archive_version, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + '_repositories_metering' + '/' + encodeURIComponent(max_archive_version || maxArchiveVersion)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
NodesApi.prototype.getMeteringInfo = function nodesGetMeteringInfoApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
// check required parameters
|
||||
if (params.node_id == null && params.nodeId == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: node_id or nodeId')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
let { method, body, nodeId, node_id, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + '_repositories_metering'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
NodesApi.prototype.hotThreads = function nodesHotThreadsApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -259,8 +195,6 @@ NodesApi.prototype.usage = function nodesUsageApi (params, options, callback) {
|
||||
}
|
||||
|
||||
Object.defineProperties(NodesApi.prototype, {
|
||||
clear_metering_archive: { get () { return this.clearMeteringArchive } },
|
||||
get_metering_info: { get () { return this.getMeteringInfo } },
|
||||
hot_threads: { get () { return this.hotThreads } },
|
||||
reload_secure_settings: { get () { return this.reloadSecureSettings } }
|
||||
})
|
||||
|
||||
@ -1,87 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['exact_bounds', 'extent', 'grid_precision', 'grid_type', 'size', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { exactBounds: 'exact_bounds', gridPrecision: 'grid_precision', gridType: 'grid_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function searchMvtApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
// check required parameters
|
||||
if (params.index == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: index')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params.field == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: field')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params.zoom == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: zoom')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params.x == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: x')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params.y == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: y')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// check required url components
|
||||
if (params.y != null && (params.x == null || params.zoom == null || params.field == null || params.index == null)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: x, zoom, field, index')
|
||||
return handleError(err, callback)
|
||||
} else if (params.x != null && (params.zoom == null || params.field == null || params.index == null)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: zoom, field, index')
|
||||
return handleError(err, callback)
|
||||
} else if (params.zoom != null && (params.field == null || params.index == null)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: field, index')
|
||||
return handleError(err, callback)
|
||||
} else if (params.field != null && (params.index == null)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: index')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
let { method, body, index, field, zoom, x, y, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
if (method == null) method = body == null ? 'GET' : 'POST'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_mvt' + '/' + encodeURIComponent(field) + '/' + encodeURIComponent(zoom) + '/' + encodeURIComponent(x) + '/' + encodeURIComponent(y)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
module.exports = searchMvtApi
|
||||
@ -1032,27 +1032,6 @@ SecurityApi.prototype.putUser = function securityPutUserApi (params, options, ca
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
SecurityApi.prototype.queryApiKeys = function securityQueryApiKeysApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
let { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
if (method == null) method = body == null ? 'GET' : 'POST'
|
||||
path = '/' + '_security' + '/' + '_query' + '/' + 'api_key'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
SecurityApi.prototype.samlAuthenticate = function securitySamlAuthenticateApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -1249,7 +1228,6 @@ Object.defineProperties(SecurityApi.prototype, {
|
||||
put_role: { get () { return this.putRole } },
|
||||
put_role_mapping: { get () { return this.putRoleMapping } },
|
||||
put_user: { get () { return this.putUser } },
|
||||
query_api_keys: { get () { return this.queryApiKeys } },
|
||||
saml_authenticate: { get () { return this.samlAuthenticate } },
|
||||
saml_complete_logout: { get () { return this.samlCompleteLogout } },
|
||||
saml_invalidate: { get () { return this.samlInvalidate } },
|
||||
|
||||
@ -74,7 +74,6 @@ const RollupApi = require('./api/rollup')
|
||||
const scriptsPainlessExecuteApi = require('./api/scripts_painless_execute')
|
||||
const scrollApi = require('./api/scroll')
|
||||
const searchApi = require('./api/search')
|
||||
const searchMvtApi = require('./api/search_mvt')
|
||||
const searchShardsApi = require('./api/search_shards')
|
||||
const searchTemplateApi = require('./api/search_template')
|
||||
const SearchableSnapshotsApi = require('./api/searchable_snapshots')
|
||||
@ -201,7 +200,6 @@ ESAPI.prototype.renderSearchTemplate = renderSearchTemplateApi
|
||||
ESAPI.prototype.scriptsPainlessExecute = scriptsPainlessExecuteApi
|
||||
ESAPI.prototype.scroll = scrollApi
|
||||
ESAPI.prototype.search = searchApi
|
||||
ESAPI.prototype.searchMvt = searchMvtApi
|
||||
ESAPI.prototype.searchShards = searchShardsApi
|
||||
ESAPI.prototype.searchTemplate = searchTemplateApi
|
||||
ESAPI.prototype.termsEnum = termsEnumApi
|
||||
@ -399,7 +397,6 @@ Object.defineProperties(ESAPI.prototype, {
|
||||
}
|
||||
},
|
||||
scripts_painless_execute: { get () { return this.scriptsPainlessExecute } },
|
||||
search_mvt: { get () { return this.searchMvt } },
|
||||
search_shards: { get () { return this.searchShards } },
|
||||
search_template: { get () { return this.searchTemplate } },
|
||||
searchableSnapshots: {
|
||||
|
||||
4
api/kibana.d.ts
vendored
4
api/kibana.d.ts
vendored
@ -345,7 +345,7 @@ interface KibanaClient {
|
||||
info<TContext = unknown>(params?: T.MlInfoRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlInfoResponse, TContext>>
|
||||
openJob<TContext = unknown>(params: T.MlOpenJobRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlOpenJobResponse, TContext>>
|
||||
postCalendarEvents<TContext = unknown>(params?: T.MlPostCalendarEventsRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPostCalendarEventsResponse, TContext>>
|
||||
postData<TContext = unknown>(params: T.MlPostJobDataRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPostJobDataResponse, TContext>>
|
||||
postData<TContext = unknown>(params: T.MlPostDataRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPostDataResponse, TContext>>
|
||||
previewDataFrameAnalytics<TContext = unknown>(params?: T.MlPreviewDataFrameAnalyticsRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPreviewDataFrameAnalyticsResponse, TContext>>
|
||||
previewDatafeed<TDocument = unknown, TContext = unknown>(params?: T.MlPreviewDatafeedRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPreviewDatafeedResponse<TDocument>, TContext>>
|
||||
putCalendar<TContext = unknown>(params: T.MlPutCalendarRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPutCalendarResponse, TContext>>
|
||||
@ -366,7 +366,7 @@ interface KibanaClient {
|
||||
updateDataFrameAnalytics<TContext = unknown>(params: T.MlUpdateDataFrameAnalyticsRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpdateDataFrameAnalyticsResponse, TContext>>
|
||||
updateDatafeed<TContext = unknown>(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TODO, TContext>>
|
||||
updateFilter<TContext = unknown>(params: T.MlUpdateFilterRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpdateFilterResponse, TContext>>
|
||||
updateJob<TContext = unknown>(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TODO, TContext>>
|
||||
updateJob<TContext = unknown>(params: T.MlUpdateJobRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpdateJobResponse, TContext>>
|
||||
updateModelSnapshot<TContext = unknown>(params: T.MlUpdateModelSnapshotRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpdateModelSnapshotResponse, TContext>>
|
||||
upgradeJobSnapshot<TContext = unknown>(params: T.MlUpgradeJobSnapshotRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpgradeJobSnapshotResponse, TContext>>
|
||||
validate<TContext = unknown>(params?: T.MlValidateJobRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlValidateJobResponse, TContext>>
|
||||
|
||||
13
api/new.d.ts
vendored
13
api/new.d.ts
vendored
@ -950,9 +950,9 @@ declare class Client {
|
||||
postCalendarEvents<TContext = unknown>(callback: callbackFn<T.MlPostCalendarEventsResponse, TContext>): TransportRequestCallback
|
||||
postCalendarEvents<TContext = unknown>(params: T.MlPostCalendarEventsRequest, callback: callbackFn<T.MlPostCalendarEventsResponse, TContext>): TransportRequestCallback
|
||||
postCalendarEvents<TContext = unknown>(params: T.MlPostCalendarEventsRequest, options: TransportRequestOptions, callback: callbackFn<T.MlPostCalendarEventsResponse, TContext>): TransportRequestCallback
|
||||
postData<TContext = unknown>(params: T.MlPostJobDataRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPostJobDataResponse, TContext>>
|
||||
postData<TContext = unknown>(params: T.MlPostJobDataRequest, callback: callbackFn<T.MlPostJobDataResponse, TContext>): TransportRequestCallback
|
||||
postData<TContext = unknown>(params: T.MlPostJobDataRequest, options: TransportRequestOptions, callback: callbackFn<T.MlPostJobDataResponse, TContext>): TransportRequestCallback
|
||||
postData<TContext = unknown>(params: T.MlPostDataRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPostDataResponse, TContext>>
|
||||
postData<TContext = unknown>(params: T.MlPostDataRequest, callback: callbackFn<T.MlPostDataResponse, TContext>): TransportRequestCallback
|
||||
postData<TContext = unknown>(params: T.MlPostDataRequest, options: TransportRequestOptions, callback: callbackFn<T.MlPostDataResponse, TContext>): TransportRequestCallback
|
||||
previewDataFrameAnalytics<TContext = unknown>(params?: T.MlPreviewDataFrameAnalyticsRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlPreviewDataFrameAnalyticsResponse, TContext>>
|
||||
previewDataFrameAnalytics<TContext = unknown>(callback: callbackFn<T.MlPreviewDataFrameAnalyticsResponse, TContext>): TransportRequestCallback
|
||||
previewDataFrameAnalytics<TContext = unknown>(params: T.MlPreviewDataFrameAnalyticsRequest, callback: callbackFn<T.MlPreviewDataFrameAnalyticsResponse, TContext>): TransportRequestCallback
|
||||
@ -1018,10 +1018,9 @@ declare class Client {
|
||||
updateFilter<TContext = unknown>(params: T.MlUpdateFilterRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpdateFilterResponse, TContext>>
|
||||
updateFilter<TContext = unknown>(params: T.MlUpdateFilterRequest, callback: callbackFn<T.MlUpdateFilterResponse, TContext>): TransportRequestCallback
|
||||
updateFilter<TContext = unknown>(params: T.MlUpdateFilterRequest, options: TransportRequestOptions, callback: callbackFn<T.MlUpdateFilterResponse, TContext>): TransportRequestCallback
|
||||
updateJob<TContext = unknown>(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TODO, TContext>>
|
||||
updateJob<TContext = unknown>(callback: callbackFn<TODO, TContext>): TransportRequestCallback
|
||||
updateJob<TContext = unknown>(params: TODO, callback: callbackFn<TODO, TContext>): TransportRequestCallback
|
||||
updateJob<TContext = unknown>(params: TODO, options: TransportRequestOptions, callback: callbackFn<TODO, TContext>): TransportRequestCallback
|
||||
updateJob<TContext = unknown>(params: T.MlUpdateJobRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpdateJobResponse, TContext>>
|
||||
updateJob<TContext = unknown>(params: T.MlUpdateJobRequest, callback: callbackFn<T.MlUpdateJobResponse, TContext>): TransportRequestCallback
|
||||
updateJob<TContext = unknown>(params: T.MlUpdateJobRequest, options: TransportRequestOptions, callback: callbackFn<T.MlUpdateJobResponse, TContext>): TransportRequestCallback
|
||||
updateModelSnapshot<TContext = unknown>(params: T.MlUpdateModelSnapshotRequest, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<T.MlUpdateModelSnapshotResponse, TContext>>
|
||||
updateModelSnapshot<TContext = unknown>(params: T.MlUpdateModelSnapshotRequest, callback: callbackFn<T.MlUpdateModelSnapshotResponse, TContext>): TransportRequestCallback
|
||||
updateModelSnapshot<TContext = unknown>(params: T.MlUpdateModelSnapshotRequest, options: TransportRequestOptions, callback: callbackFn<T.MlUpdateModelSnapshotResponse, TContext>): TransportRequestCallback
|
||||
|
||||
48
api/requestParams.d.ts
vendored
48
api/requestParams.d.ts
vendored
@ -1039,15 +1039,6 @@ export interface IndicesDeleteTemplate extends Generic {
|
||||
master_timeout?: string;
|
||||
}
|
||||
|
||||
export interface IndicesDiskUsage extends Generic {
|
||||
index: string;
|
||||
run_expensive_tasks?: boolean;
|
||||
flush?: boolean;
|
||||
ignore_unavailable?: boolean;
|
||||
allow_no_indices?: boolean;
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
}
|
||||
|
||||
export interface IndicesExists extends Generic {
|
||||
index: string | string[];
|
||||
local?: boolean;
|
||||
@ -1090,14 +1081,6 @@ export interface IndicesExistsType extends Generic {
|
||||
local?: boolean;
|
||||
}
|
||||
|
||||
export interface IndicesFieldUsageStats extends Generic {
|
||||
index: string;
|
||||
fields?: string | string[];
|
||||
ignore_unavailable?: boolean;
|
||||
allow_no_indices?: boolean;
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
}
|
||||
|
||||
export interface IndicesFlush extends Generic {
|
||||
index?: string | string[];
|
||||
force?: boolean;
|
||||
@ -1864,10 +1847,6 @@ export interface MlPutFilter<T = RequestBody> extends Generic {
|
||||
|
||||
export interface MlPutJob<T = RequestBody> extends Generic {
|
||||
job_id: string;
|
||||
ignore_unavailable?: boolean;
|
||||
allow_no_indices?: boolean;
|
||||
ignore_throttled?: boolean;
|
||||
expand_wildcards?: 'open' | 'closed' | 'hidden' | 'none' | 'all';
|
||||
body: T;
|
||||
}
|
||||
|
||||
@ -2025,15 +2004,6 @@ export interface Mtermvectors<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface NodesClearMeteringArchive extends Generic {
|
||||
node_id: string | string[];
|
||||
max_archive_version: number;
|
||||
}
|
||||
|
||||
export interface NodesGetMeteringInfo extends Generic {
|
||||
node_id: string | string[];
|
||||
}
|
||||
|
||||
export interface NodesHotThreads extends Generic {
|
||||
node_id?: string | string[];
|
||||
interval?: string;
|
||||
@ -2236,20 +2206,6 @@ export interface Search<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface SearchMvt<T = RequestBody> extends Generic {
|
||||
index: string | string[];
|
||||
field: string;
|
||||
zoom: number;
|
||||
x: number;
|
||||
y: number;
|
||||
exact_bounds?: boolean;
|
||||
extent?: number;
|
||||
grid_precision?: number;
|
||||
grid_type?: 'grid' | 'point';
|
||||
size?: number;
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface SearchShards extends Generic {
|
||||
index?: string | string[];
|
||||
preference?: string;
|
||||
@ -2476,10 +2432,6 @@ export interface SecurityPutUser<T = RequestBody> extends Generic {
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SecurityQueryApiKeys<T = RequestBody> extends Generic {
|
||||
body?: T;
|
||||
}
|
||||
|
||||
export interface SecuritySamlAuthenticate<T = RequestBody> extends Generic {
|
||||
body: T;
|
||||
}
|
||||
|
||||
135
api/types.d.ts
vendored
135
api/types.d.ts
vendored
@ -10135,7 +10135,8 @@ export interface MlAnalysisConfig {
|
||||
categorization_field_name?: Field
|
||||
categorization_filters?: string[]
|
||||
detectors: MlDetector[]
|
||||
influencers: Field[]
|
||||
influencers?: Field[]
|
||||
model_prune_window?: Time
|
||||
latency?: Time
|
||||
multivariate_by_fields?: boolean
|
||||
per_partition_categorization?: MlPerPartitionCategorization
|
||||
@ -10265,26 +10266,28 @@ export type MlChunkingMode = 'auto' | 'manual' | 'off'
|
||||
|
||||
export type MlConditionOperator = 'gt' | 'gte' | 'lt' | 'lte'
|
||||
|
||||
export interface MlCustomSettings {
|
||||
export interface MlCustomSettingsKeys {
|
||||
custom_urls?: XpackUsageUrlConfig[]
|
||||
created_by?: string
|
||||
job_tags?: Record<string, string>
|
||||
}
|
||||
export type MlCustomSettings = MlCustomSettingsKeys |
|
||||
{ [property: string]: any }
|
||||
|
||||
export interface MlDataCounts {
|
||||
bucket_count: long
|
||||
earliest_record_timestamp: long
|
||||
earliest_record_timestamp?: long
|
||||
empty_bucket_count: long
|
||||
input_bytes: long
|
||||
input_field_count: long
|
||||
input_record_count: long
|
||||
invalid_date_count: long
|
||||
job_id: Id
|
||||
last_data_time: long
|
||||
latest_empty_bucket_timestamp: long
|
||||
latest_record_timestamp: long
|
||||
latest_sparse_bucket_timestamp: long
|
||||
latest_bucket_timestamp: long
|
||||
last_data_time?: long
|
||||
latest_empty_bucket_timestamp?: long
|
||||
latest_record_timestamp?: long
|
||||
latest_sparse_bucket_timestamp?: long
|
||||
latest_bucket_timestamp?: long
|
||||
missing_field_count: long
|
||||
out_of_order_timestamp_count: long
|
||||
processed_field_count: long
|
||||
@ -10325,12 +10328,12 @@ export interface MlDatafeedConfig {
|
||||
datafeed_id?: Id
|
||||
delayed_data_check_config?: MlDelayedDataCheckConfig
|
||||
frequency?: Timestamp
|
||||
indexes?: string[]
|
||||
indices: string[]
|
||||
indexes?: Indices
|
||||
indices?: Indices
|
||||
indices_options?: MlDatafeedIndicesOptions
|
||||
job_id?: Id
|
||||
max_empty_searches?: integer
|
||||
query: QueryDslQueryContainer
|
||||
query?: QueryDslQueryContainer
|
||||
query_delay?: Timestamp
|
||||
runtime_mappings?: MappingRuntimeFields
|
||||
script_fields?: Record<string, ScriptField>
|
||||
@ -10344,14 +10347,20 @@ export interface MlDatafeedIndicesOptions {
|
||||
ignore_throttled?: boolean
|
||||
}
|
||||
|
||||
export interface MlDatafeedRunningState {
|
||||
real_time_configured: boolean
|
||||
real_time_running: boolean
|
||||
}
|
||||
|
||||
export type MlDatafeedState = 'started' | 'stopped' | 'starting' | 'stopping'
|
||||
|
||||
export interface MlDatafeedStats {
|
||||
assignment_explanation: string
|
||||
assignment_explanation?: string
|
||||
datafeed_id: Id
|
||||
node: MlDiscoveryNode
|
||||
node?: MlDiscoveryNode
|
||||
state: MlDatafeedState
|
||||
timing_stats: MlDatafeedTimingStats
|
||||
running_state?: MlDatafeedRunningState
|
||||
}
|
||||
|
||||
export interface MlDatafeedTimingStats {
|
||||
@ -10360,7 +10369,7 @@ export interface MlDatafeedTimingStats {
|
||||
job_id: Id
|
||||
search_count: long
|
||||
total_search_time_ms: double
|
||||
average_search_time_per_bucket_ms: number
|
||||
average_search_time_per_bucket_ms?: number
|
||||
}
|
||||
|
||||
export interface MlDataframeAnalysis {
|
||||
@ -10625,10 +10634,11 @@ export interface MlDetector {
|
||||
detector_index?: integer
|
||||
exclude_frequent?: MlExcludeFrequent
|
||||
field_name?: Field
|
||||
function: string
|
||||
function?: string
|
||||
over_field_name?: Field
|
||||
partition_field_name?: Field
|
||||
use_null?: boolean
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface MlDiscoveryNode {
|
||||
@ -10688,9 +10698,9 @@ export interface MlJob {
|
||||
allow_lazy_open: boolean
|
||||
analysis_config: MlAnalysisConfig
|
||||
analysis_limits?: MlAnalysisLimits
|
||||
background_persist_interval: Time
|
||||
background_persist_interval?: Time
|
||||
blocked?: MlJobBlocked
|
||||
create_time: integer
|
||||
create_time?: integer
|
||||
custom_settings?: MlCustomSettings
|
||||
daily_model_snapshot_retention_after_days?: long
|
||||
data_description: MlDataDescription
|
||||
@ -10700,8 +10710,8 @@ export interface MlJob {
|
||||
finished_time?: integer
|
||||
groups?: string[]
|
||||
job_id: Id
|
||||
job_type: string
|
||||
job_version: VersionString
|
||||
job_type?: string
|
||||
job_version?: VersionString
|
||||
model_plot_config?: MlModelPlotConfig
|
||||
model_snapshot_id?: Id
|
||||
model_snapshot_retention_days: long
|
||||
@ -10756,12 +10766,12 @@ export interface MlJobStatistics {
|
||||
}
|
||||
|
||||
export interface MlJobStats {
|
||||
assignment_explanation: string
|
||||
assignment_explanation?: string
|
||||
data_counts: MlDataCounts
|
||||
forecasts_stats: MlJobForecastStatistics
|
||||
job_id: string
|
||||
model_size_stats: MlModelSizeStats
|
||||
node: MlDiscoveryNode
|
||||
node?: MlDiscoveryNode
|
||||
open_time?: DateString
|
||||
state: MlJobState
|
||||
timing_stats: MlJobTimingStats
|
||||
@ -10769,14 +10779,14 @@ export interface MlJobStats {
|
||||
}
|
||||
|
||||
export interface MlJobTimingStats {
|
||||
average_bucket_processing_time_ms: double
|
||||
average_bucket_processing_time_ms?: double
|
||||
bucket_count: long
|
||||
exponential_average_bucket_processing_time_ms: double
|
||||
exponential_average_bucket_processing_time_ms?: double
|
||||
exponential_average_bucket_processing_time_per_hour_ms: double
|
||||
job_id: Id
|
||||
total_bucket_processing_time_ms: double
|
||||
maximum_bucket_processing_time_ms: double
|
||||
minimum_bucket_processing_time_ms: double
|
||||
maximum_bucket_processing_time_ms?: double
|
||||
minimum_bucket_processing_time_ms?: double
|
||||
}
|
||||
|
||||
export type MlMemoryStatus = 'ok' | 'soft_limit' | 'hard_limit'
|
||||
@ -10793,9 +10803,9 @@ export interface MlModelSizeStats {
|
||||
log_time: Time
|
||||
memory_status: MlMemoryStatus
|
||||
model_bytes: long
|
||||
model_bytes_exceeded: long
|
||||
model_bytes_memory_limit: long
|
||||
peak_model_bytes: long
|
||||
model_bytes_exceeded?: long
|
||||
model_bytes_memory_limit?: long
|
||||
peak_model_bytes?: long
|
||||
assignment_memory_basis?: string
|
||||
result_type: string
|
||||
total_by_field_count: long
|
||||
@ -10814,10 +10824,10 @@ export interface MlModelSizeStats {
|
||||
export interface MlModelSnapshot {
|
||||
description?: string
|
||||
job_id: Id
|
||||
latest_record_time_stamp: integer
|
||||
latest_result_time_stamp: integer
|
||||
latest_record_time_stamp?: integer
|
||||
latest_result_time_stamp?: integer
|
||||
min_version: VersionString
|
||||
model_size_stats: MlModelSizeStats
|
||||
model_size_stats?: MlModelSizeStats
|
||||
retain: boolean
|
||||
snapshot_doc_count: long
|
||||
snapshot_id: Id
|
||||
@ -11540,6 +11550,7 @@ export interface MlOpenJobRequest extends RequestBase {
|
||||
|
||||
export interface MlOpenJobResponse {
|
||||
opened: boolean
|
||||
node: Id
|
||||
}
|
||||
|
||||
export interface MlPostCalendarEventsRequest extends RequestBase {
|
||||
@ -11553,18 +11564,22 @@ export interface MlPostCalendarEventsResponse {
|
||||
events: MlCalendarEvent[]
|
||||
}
|
||||
|
||||
export interface MlPostJobDataRequest extends RequestBase {
|
||||
export type MlPostDataInput = any | MlPostDataMultipleInputs
|
||||
|
||||
export interface MlPostDataMultipleInputs {
|
||||
data: any[]
|
||||
}
|
||||
|
||||
export interface MlPostDataRequest extends RequestBase {
|
||||
job_id: Id
|
||||
reset_end?: DateString
|
||||
reset_start?: DateString
|
||||
body?: {
|
||||
data?: any[]
|
||||
}
|
||||
body?: MlPostDataInput
|
||||
}
|
||||
|
||||
export interface MlPostJobDataResponse {
|
||||
export interface MlPostDataResponse {
|
||||
bucket_count: long
|
||||
earliest_record_timestamp: integer
|
||||
earliest_record_timestamp?: integer
|
||||
empty_bucket_count: long
|
||||
input_bytes: long
|
||||
input_field_count: long
|
||||
@ -11572,7 +11587,7 @@ export interface MlPostJobDataResponse {
|
||||
invalid_date_count: long
|
||||
job_id: Id
|
||||
last_data_time: integer
|
||||
latest_record_timestamp: integer
|
||||
latest_record_timestamp?: integer
|
||||
missing_field_count: long
|
||||
out_of_order_timestamp_count: long
|
||||
processed_field_count: long
|
||||
@ -11607,20 +11622,19 @@ export interface MlPreviewDatafeedRequest extends RequestBase {
|
||||
}
|
||||
}
|
||||
|
||||
export interface MlPreviewDatafeedResponse<TDocument = unknown> {
|
||||
data: TDocument[]
|
||||
}
|
||||
export type MlPreviewDatafeedResponse<TDocument = unknown> = TDocument[]
|
||||
|
||||
export interface MlPutCalendarRequest extends RequestBase {
|
||||
calendar_id: Id
|
||||
body?: {
|
||||
job_ids?: Ids
|
||||
description?: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface MlPutCalendarResponse {
|
||||
calendar_id: Id
|
||||
description: string
|
||||
description?: string
|
||||
job_ids: Ids
|
||||
}
|
||||
|
||||
@ -11670,12 +11684,13 @@ export interface MlPutDatafeedRequest extends RequestBase {
|
||||
ignore_throttled?: boolean
|
||||
ignore_unavailable?: boolean
|
||||
body?: {
|
||||
aggs?: Record<string, AggregationsAggregationContainer>
|
||||
aggregations?: Record<string, AggregationsAggregationContainer>
|
||||
chunking_config?: MlChunkingConfig
|
||||
delayed_data_check_config?: MlDelayedDataCheckConfig
|
||||
frequency?: Time
|
||||
indices?: string[]
|
||||
indexes?: string[]
|
||||
indices?: Indices
|
||||
indexes?: Indices
|
||||
indices_options?: MlDatafeedIndicesOptions
|
||||
job_id?: Id
|
||||
max_empty_searches?: integer
|
||||
@ -11688,15 +11703,15 @@ export interface MlPutDatafeedRequest extends RequestBase {
|
||||
}
|
||||
|
||||
export interface MlPutDatafeedResponse {
|
||||
aggregations: Record<string, AggregationsAggregationContainer>
|
||||
aggregations?: Record<string, AggregationsAggregationContainer>
|
||||
chunking_config: MlChunkingConfig
|
||||
delayed_data_check_config?: MlDelayedDataCheckConfig
|
||||
datafeed_id: Id
|
||||
frequency: Time
|
||||
frequency?: Time
|
||||
indices: string[]
|
||||
job_id: Id
|
||||
indices_options?: MlDatafeedIndicesOptions
|
||||
max_empty_searches: integer
|
||||
max_empty_searches?: integer
|
||||
query: QueryDslQueryContainer
|
||||
query_delay: Time
|
||||
runtime_mappings?: MappingRuntimeFields
|
||||
@ -11713,7 +11728,7 @@ export interface MlPutFilterRequest extends RequestBase {
|
||||
}
|
||||
|
||||
export interface MlPutFilterResponse {
|
||||
description: string
|
||||
description?: string
|
||||
filter_id: Id
|
||||
items: string[]
|
||||
}
|
||||
@ -11793,6 +11808,7 @@ export interface MlResetJobResponse extends AcknowledgedResponseBase {
|
||||
export interface MlRevertModelSnapshotRequest extends RequestBase {
|
||||
job_id: Id
|
||||
snapshot_id: Id
|
||||
delete_intervening_results?: boolean
|
||||
body?: {
|
||||
delete_intervening_results?: boolean
|
||||
}
|
||||
@ -11919,7 +11935,27 @@ export interface MlUpdateJobRequest extends RequestBase {
|
||||
}
|
||||
|
||||
export interface MlUpdateJobResponse {
|
||||
stub: boolean
|
||||
allow_lazy_open: boolean
|
||||
analysis_config: MlAnalysisConfig
|
||||
analysis_limits: MlAnalysisLimits
|
||||
background_persist_interval?: Time
|
||||
create_time: Time
|
||||
custom_settings?: MlCustomSettings
|
||||
daily_model_snapshot_retention_after_days: long
|
||||
data_description: MlDataDescription
|
||||
datafeed_config?: MlDatafeed
|
||||
description?: string
|
||||
groups?: string[]
|
||||
job_id: Id
|
||||
job_type: string
|
||||
job_version: string
|
||||
finished_time?: Time
|
||||
model_plot_config?: MlModelPlotConfig
|
||||
model_snapshot_id?: Id
|
||||
model_snapshot_retention_days: long
|
||||
renormalization_window_days?: long
|
||||
results_index_name: string
|
||||
results_retention_days?: long
|
||||
}
|
||||
|
||||
export interface MlUpdateModelSnapshotRequest extends RequestBase {
|
||||
@ -13778,7 +13814,6 @@ export interface SnapshotCreateRequest extends RequestBase {
|
||||
ignore_unavailable?: boolean
|
||||
include_global_state?: boolean
|
||||
indices?: Indices
|
||||
feature_states?: string[]
|
||||
metadata?: Metadata
|
||||
partial?: boolean
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
[[basic-config]]
|
||||
=== Basic configuration
|
||||
|
||||
This page shows you the possible basic configuration options that the clients
|
||||
This page shows you the possible basic configuration options that the clients
|
||||
offers.
|
||||
|
||||
|
||||
@ -46,9 +46,9 @@ node: {
|
||||
----
|
||||
|
||||
|`auth`
|
||||
a|Your authentication data. You can use both basic authentication and
|
||||
a|Your authentication data. You can use both basic authentication and
|
||||
{ref}/security-api-create-api-key.html[ApiKey]. +
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
@ -141,7 +141,7 @@ const client = new Client({
|
||||
----
|
||||
|
||||
|`agent`
|
||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options],
|
||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options],
|
||||
or a function that returns an actual http agent instance. If you want to disable the http agent use entirely
|
||||
(and disable the `keep-alive` feature), set the agent to `false`. +
|
||||
_Default:_ `null`
|
||||
@ -196,7 +196,7 @@ function nodeSelector (connections) {
|
||||
----
|
||||
|
||||
|`generateRequestId`
|
||||
a|`function` - function to generate the request id for every request, it takes
|
||||
a|`function` - function to generate the request id for every request, it takes
|
||||
two parameters, the request parameters and options. +
|
||||
By default it generates an incremental integer for every request. +
|
||||
_Custom function example:_
|
||||
@ -233,17 +233,17 @@ such as the client and platform version. +
|
||||
_Default:_ `true`
|
||||
|
||||
|`cloud`
|
||||
a|`object` - Custom configuration for connecting to
|
||||
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
a|`object` - Custom configuration for connecting to
|
||||
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
for more details. +
|
||||
_Default:_ `null` +
|
||||
_Cloud configuration example:_
|
||||
_Default:_ `null` +
|
||||
_Cloud configuration example:_
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA=='
|
||||
},
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
@ -255,36 +255,4 @@ const client = new Client({
|
||||
|`boolean`, `'proto'`, `'constructor'` - By the default the client will protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more. If needed you can disable prototype poisoning protection entirely or one of the two checks. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`caFingerprint`
|
||||
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
==== Performances considerations
|
||||
|
||||
By default, the client will protection you against prototype poisoning attacks.
|
||||
Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more.
|
||||
If needed you can disable prototype poisoning protection entirely or one of the two checks.
|
||||
Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more.
|
||||
|
||||
While it's good to be safe, you should know that security always comes with a cost.
|
||||
With big enough payloads, this security check could causea drop in the overall performances,
|
||||
which might be a problem for your application.
|
||||
If you know you can trust the data stored in Elasticsearch, you can safely disable this check.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
disablePrototypePoisoningProtection: true
|
||||
})
|
||||
----
|
||||
|
||||
@ -1,113 +1,6 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 7.15.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v7.15`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.15/release-notes-7.15.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Support mapbox content type https://github.com/elastic/elasticsearch-js/pull/1500[#1500]
|
||||
|
||||
If you call an API that returns a mapbox conten type, the response body will be a buffer.
|
||||
|
||||
[discrete]
|
||||
===== Support CA fingerprint validation https://github.com/elastic/elasticsearch-js/pull/1499[#1499]
|
||||
|
||||
You can configure the client to only trust certificates that are signed by a specific CA certificate ( CA certificate pinning ) by providing a `caFingerprint` option. This will verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied value.
|
||||
a `caFingerprint` option, which will verify the supplied certificate authority fingerprint.
|
||||
You must configure a SHA256 digest.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://example.com'
|
||||
auth: { ... },
|
||||
// the fingerprint (SHA256) of the CA certificate that is used to sign the certificate that the Elasticsearch node presents for TLS.
|
||||
caFingerprint: '20:0D:CA:FA:76:...',
|
||||
ssl: {
|
||||
// might be required if it's a self-signed certificate
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Show the body as string if the response error can't be read as ES error https://github.com/elastic/elasticsearch-js/pull/1509[#1509]
|
||||
|
||||
Useful if the errored response does not come from Elasticsearch, but a proxy in the middle for example.
|
||||
|
||||
[discrete]
|
||||
===== Always display request params and options in request event https://github.com/elastic/elasticsearch-js/pull/1531[#1531]
|
||||
|
||||
In some edge cases the params and options weren't available in observabilty events, now they are always defined.
|
||||
|
||||
[discrete]
|
||||
===== Always emit request aborted event https://github.com/elastic/elasticsearch-js/pull/1534[#1534]
|
||||
|
||||
If the client is busy running an async operation, the `.abort()` call might be executed before sending the actual request. In such case, the error was swallowed, now it will always be emitted, either in the `request` or `response` event.
|
||||
|
||||
[discrete]
|
||||
=== 7.14.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v7.14`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.14/release-notes-7.14.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Verify connection to Elasticsearch https://github.com/elastic/elasticsearch-js/pull/1487[#1487]
|
||||
|
||||
The client will verify if it's working with a supported release of Elasticsearch.
|
||||
Elastic language clients are guaranteed to be able to communicate with Elasticsearch or Elastic solutions running on the same major version and greater or equal minor version.
|
||||
|
||||
Language clients are forward compatible; meaning that clients support communicating with greater minor versions of Elasticsearch. Elastic language clients are not guaranteed to be backwards compatible.
|
||||
|
||||
[discrete]
|
||||
===== Add api compatibility header support https://github.com/elastic/elasticsearch-js/pull/1478[#1478]
|
||||
|
||||
If you configure the `ELASTIC_CLIENT_APIVERSIONING` to `true` the client will send a compatibility header
|
||||
to allow you to use a 7.x client against a 8.x cluster. In this way it will be easier to migrate your code to a newer release of Elasticsearch.
|
||||
|
||||
[discrete]
|
||||
===== Add support for bearer auth https://github.com/elastic/elasticsearch-js/pull/1488[#1488]
|
||||
|
||||
Bearer authentication, useful for https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html[service account tokens].
|
||||
Be aware that it does not handle automatic token refresh:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
bearer: 'token'
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Bulk update improvements https://github.com/elastic/elasticsearch-js/pull/1428[#1428]
|
||||
|
||||
The final stats object will let you know how many `noop` operations happened.
|
||||
Also, a new `.stats` getter has been added to allow you to read the stats before
|
||||
the operation finishes.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({ ... })
|
||||
...
|
||||
console.log(b.stats)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
=== 7.13.0
|
||||
|
||||
@ -117,7 +10,7 @@ console.log(b.stats)
|
||||
[discrete]
|
||||
===== Remove Node.js v10 support https://github.com/elastic/elasticsearch-js/pull/1471[#1471]
|
||||
|
||||
According to our
|
||||
According to our
|
||||
https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/installation.html#nodejs-support[support matrix].
|
||||
|
||||
[discrete]
|
||||
@ -126,7 +19,7 @@ https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/inst
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v7.13`
|
||||
|
||||
You can find all the API changes
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.13/release-notes-7.13.0.html[here].
|
||||
|
||||
[discrete]
|
||||
@ -163,7 +56,7 @@ This is now fixed and in case of error you will get the full body response.
|
||||
[discrete]
|
||||
===== Remove Node.js v8 support https://github.com/elastic/elasticsearch-js/pull/1402[#1402]
|
||||
|
||||
According to our
|
||||
According to our
|
||||
https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/installation.html#nodejs-support[support matrix].
|
||||
|
||||
[discrete]
|
||||
@ -172,7 +65,7 @@ https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/inst
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v7.12`
|
||||
|
||||
You can find all the API changes
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.12/release-notes-7.12.0.html[here].
|
||||
|
||||
[discrete]
|
||||
@ -198,15 +91,15 @@ Now the thenable offers a `.finally` method as well.
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v7.11`
|
||||
|
||||
You can find all the API changes
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Added new observability events https://github.com/elastic/elasticsearch-js/pull/1365[#1365]
|
||||
|
||||
Two new observability events has been introduced: `serialization` and
|
||||
`deserialization`. The event order is described in the following graph, in some
|
||||
edge cases, the order is not guaranteed. You can find in
|
||||
Two new observability events has been introduced: `serialization` and
|
||||
`deserialization`. The event order is described in the following graph, in some
|
||||
edge cases, the order is not guaranteed. You can find in
|
||||
https://github.com/elastic/elasticsearch-js/blob/master/test/acceptance/events-order.test.js[`test/acceptance/events-order.test.js`]
|
||||
how the order changes based on the situation.
|
||||
|
||||
@ -229,7 +122,7 @@ serialization
|
||||
[discrete]
|
||||
===== Added x-elastic-client-meta header https://github.com/elastic/elasticsearch-js/pull/1373[#1373]
|
||||
|
||||
Adds the `x-elastic-client-meta` HTTP header which is used by Elastic Cloud and
|
||||
Adds the `x-elastic-client-meta` HTTP header which is used by Elastic Cloud and
|
||||
can be disabled with the `enableMetaHeader` parameter set to `false`.
|
||||
|
||||
[discrete]
|
||||
@ -238,9 +131,9 @@ can be disabled with the `enableMetaHeader` parameter set to `false`.
|
||||
[discrete]
|
||||
===== Fixes req.abort() with a body that is a stream calls callback(err) twice https://github.com/elastic/elasticsearch-js/pull/1376[#1376]
|
||||
|
||||
When using a body that is a stream to client.search(), and calling req.abort(),
|
||||
the callback is called twice. Once for the RequestAbortedError, as expected, and
|
||||
once for a "premature close" error from end-of-stream, used by pump, used by the
|
||||
When using a body that is a stream to client.search(), and calling req.abort(),
|
||||
the callback is called twice. Once for the RequestAbortedError, as expected, and
|
||||
once for a "premature close" error from end-of-stream, used by pump, used by the
|
||||
client. This issue has now been fixed.
|
||||
|
||||
[discrete]
|
||||
@ -252,14 +145,14 @@ client. This issue has now been fixed.
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v7.10`.
|
||||
|
||||
You can find all the API changes
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.10/release-notes-7.10.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Added proxy support https://github.com/elastic/elasticsearch-js/pull/1260[#1260]
|
||||
|
||||
If you need to pass through an http(s) proxy for connecting to {es}, the client
|
||||
offers out of the box a handy configuration for helping you with it. Under the
|
||||
If you need to pass through an http(s) proxy for connecting to {es}, the client
|
||||
offers out of the box a handy configuration for helping you with it. Under the
|
||||
hood it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
|
||||
|
||||
[source,js]
|
||||
@ -286,15 +179,15 @@ const client = new Client({
|
||||
[discrete]
|
||||
===== Scroll search should clear the scroll at the end https://github.com/elastic/elasticsearch-js/pull/1331[#1331]
|
||||
|
||||
From now on the scroll search helper will automatically close the scroll on
|
||||
From now on the scroll search helper will automatically close the scroll on
|
||||
{es}, by doing so, {es} will free resources faster.
|
||||
|
||||
[discrete]
|
||||
===== Handle connectivity issues while reading the body https://github.com/elastic/elasticsearch-js/pull/1343[#1343]
|
||||
|
||||
It might happen that the underlying socket stops working due to an external
|
||||
cause while reading the body. This could lead to an unwanted
|
||||
`DeserialzationError`. From now, this will be handled as a generic
|
||||
It might happen that the underlying socket stops working due to an external
|
||||
cause while reading the body. This could lead to an unwanted
|
||||
`DeserialzationError`. From now, this will be handled as a generic
|
||||
`ConnectionError`.
|
||||
|
||||
[discrete]
|
||||
@ -303,13 +196,13 @@ cause while reading the body. This could lead to an unwanted
|
||||
[discrete]
|
||||
===== Add warning log about nodejs version support https://github.com/elastic/elasticsearch-js/pull/1349[#1349]
|
||||
|
||||
`7.11` will be the last version of the client that will support Node.js v8,
|
||||
while `7.12` will be the last one that supports Node.js v10. If you are using
|
||||
this versions you will see a `DeprecationWaring` in your logs. We strongly
|
||||
recommend to upgrade to newer versions of Node.js as usng an EOL version will
|
||||
`7.11` will be the last version of the client that will support Node.js v8,
|
||||
while `7.12` will be the last one that supports Node.js v10. If you are eusing
|
||||
this versions you will see a `DeprecationWaring` in your logs. We strongly
|
||||
recommend to upgrade to newer versions of Node.js as usng an EOL version will
|
||||
expose you to securty risks.
|
||||
|
||||
Please refer to https://ela.st/nodejs-support[ela.st/nodejs-support] for
|
||||
Please refer to https://ela.st/nodejs-support[ela.st/nodejs-support] for
|
||||
additional information.
|
||||
|
||||
[discrete]
|
||||
@ -321,16 +214,16 @@ additional information.
|
||||
[discrete]
|
||||
===== Improve child performances https://github.com/elastic/elasticsearch-js/pull/1314[#1314]
|
||||
|
||||
The client code has been refactored to speed up the performances of the child
|
||||
method. Before this pr, creating many children per second would have caused a
|
||||
high memory consumption and a spike in CPU usage. This pr changes the way the
|
||||
client is created by refactoring the code generation, now the clients methods
|
||||
are no longer added to the instance with a for loop but via prototypal
|
||||
inheritance. Thus, the overall performances are way better, now creating a child
|
||||
The client code has been refactored to speed up the performances of the child
|
||||
method. Before this pr, creating many children per second would have caused a
|
||||
high memory consumption and a spike in CPU usage. This pr changes the way the
|
||||
client is created by refactoring the code generation, now the clients methods
|
||||
are no longer added to the instance with a for loop but via prototypal
|
||||
inheritance. Thus, the overall performances are way better, now creating a child
|
||||
is ~5 times faster, and it consumes ~70% less memory.
|
||||
|
||||
This change should not cause any breaking change unless you were mocking the
|
||||
client methods. In such case you should refactor it, or use
|
||||
This change should not cause any breaking change unless you were mocking the
|
||||
client methods. In such case you should refactor it, or use
|
||||
https://github.com/elastic/elasticsearch-js-mock[elasticsearch-js-mock].
|
||||
|
||||
Finally, this change should also fix once and of all the bundlers support.
|
||||
@ -338,15 +231,15 @@ Finally, this change should also fix once and of all the bundlers support.
|
||||
[discrete]
|
||||
===== Throw all errors asynchronously https://github.com/elastic/elasticsearch-js/pull/1295[#1295]
|
||||
|
||||
Some validation errors were thrown synchronously, causing the callback to be
|
||||
called in th same tick. This issue is known as _"The release fo Zalgo"_ (see
|
||||
Some validation errors were thrown synchronously, causing the callback to be
|
||||
called in th same tick. This issue is known as _"The release fo Zalgo"_ (see
|
||||
https://blog.izs.me/2013/08/designing-apis-for-asynchrony[here]).
|
||||
|
||||
[discrete]
|
||||
===== Fix `maxRetries` request option handling https://github.com/elastic/elasticsearch-js/pull/1296[#1296]
|
||||
|
||||
The `maxRetries` parameter can be configured on a per requets basis, if set to
|
||||
zero it was defaulting to the client default. Now the client is honoring the
|
||||
The `maxRetries` parameter can be configured on a per requets basis, if set to
|
||||
zero it was defaulting to the client default. Now the client is honoring the
|
||||
request specific configuration.
|
||||
|
||||
[discrete]
|
||||
@ -357,7 +250,7 @@ The Connection requets option types were not accepting `null` as valid value.
|
||||
[discrete]
|
||||
===== Fixed `size` and `maxRetries` parameters in helpers https://github.com/elastic/elasticsearch-js/pull/1284[#1284]
|
||||
|
||||
The `size` parameter was being passed too the scroll request, which was causing
|
||||
The `size` parameter was being passed too the scroll request, which was causing
|
||||
an error. Value of `maxRetries` set to 0 was resulting in no request at all.
|
||||
|
||||
[discrete]
|
||||
@ -383,8 +276,8 @@ const client = new Client({
|
||||
[discrete]
|
||||
===== Add support for a global context option https://github.com/elastic/elasticsearch-js/pull/1256[#1256]
|
||||
|
||||
Before this, you could set a `context` option in each request, but there was no
|
||||
way of setting it globally. Now you can by configuring the `context` object in
|
||||
Before this, you could set a `context` option in each request, but there was no
|
||||
way of setting it globally. Now you can by configuring the `context` object in
|
||||
the global configuration, that will be merged with the local one.
|
||||
|
||||
[source,js]
|
||||
@ -412,7 +305,7 @@ import { Client } from '@elastic/elasticsearch'
|
||||
[discrete]
|
||||
===== Allow the client name to be a symbol https://github.com/elastic/elasticsearch-js/pull/1254[#1254]
|
||||
|
||||
It was possible in plain JavaScript, but not in TypeScript, now you can do it in
|
||||
It was possible in plain JavaScript, but not in TypeScript, now you can do it in
|
||||
TypeScript as well.
|
||||
|
||||
[source,js]
|
||||
@ -432,17 +325,17 @@ Only `Record<string, any>` was allowed. Now `string` is allowed as well.
|
||||
[discrete]
|
||||
===== Fixed type definitions https://github.com/elastic/elasticsearch-js/pull/1263[#1263]
|
||||
|
||||
* The `transport.request` defintion was incorrect, it was returning a
|
||||
* The `transport.request` defintion was incorrect, it was returning a
|
||||
`Promise<T>` instead of `TransportRequestPromise<T>`.
|
||||
* The `refresh` parameter of most APIs was declared as
|
||||
`'true' | 'false' | 'wait_for'`, which was clunky. Now is
|
||||
* The `refresh` parameter of most APIs was declared as
|
||||
`'true' | 'false' | 'wait_for'`, which was clunky. Now is
|
||||
`'wait_for' | boolean`.
|
||||
|
||||
[discrete]
|
||||
===== Generate response type as boolean if the request is HEAD only https://github.com/elastic/elasticsearch-js/pull/1275[#1275]
|
||||
|
||||
All HEAD request will have the body casted to a boolean value, `true` in case of
|
||||
a 200 response, `false` in case of a 404 response. The type definitions were not
|
||||
All HEAD request will have the body casted to a boolean value, `true` in case of
|
||||
a 200 response, `false` in case of a 404 response. The type definitions were not
|
||||
reflecting this behavior.
|
||||
|
||||
[source,ts]
|
||||
@ -462,10 +355,10 @@ console.log(body) // either `true` or `false`
|
||||
[discrete]
|
||||
===== Updated default http agent configuration https://github.com/elastic/elasticsearch-js/pull/1242[#1242]
|
||||
|
||||
Added the scheduling: 'lifo' option to the default HTTP agent configuration to
|
||||
avoid maximizing the open sockets against {es} and lowering the risk of
|
||||
encountering socket timeouts. This feature is only available from Node v14.5+,
|
||||
but it should be backported to v10 and v12
|
||||
Added the scheduling: 'lifo' option to the default HTTP agent configuration to
|
||||
avoid maximizing the open sockets against {es} and lowering the risk of
|
||||
encountering socket timeouts. This feature is only available from Node v14.5+,
|
||||
but it should be backported to v10 and v12
|
||||
(https://github.com/nodejs/node/pull/33278[nodejs/node#33278]).
|
||||
|
||||
[discrete]
|
||||
@ -473,12 +366,12 @@ but it should be backported to v10 and v12
|
||||
|
||||
This pr introduce two changes which should not impact the surface API:
|
||||
|
||||
* Refactored the `client.child` API to allocate fewer objects, this change
|
||||
improves memory consumption over time and improves the child creation
|
||||
* Refactored the `client.child` API to allocate fewer objects, this change
|
||||
improves memory consumption over time and improves the child creation
|
||||
performances by ~12%.
|
||||
* The client no longer inherits from the EventEmitter class, but instead has an
|
||||
internal event emitter and exposes only the API useful for the users, namely
|
||||
`emit, `on`, `once`, and `off`. The type definitions have been updated
|
||||
* The client no longer inherits from the EventEmitter class, but instead has an
|
||||
internal event emitter and exposes only the API useful for the users, namely
|
||||
`emit, `on`, `once`, and `off`. The type definitions have been updated
|
||||
accordingly.
|
||||
|
||||
[discrete]
|
||||
@ -495,10 +388,10 @@ You can find all the API changes https://www.elastic.co/guide/en/elasticsearch/r
|
||||
[discrete]
|
||||
===== Added multi search helper https://github.com/elastic/elasticsearch-js/pull/1186[#1186]
|
||||
|
||||
If you are sending search request at a high rate, this helper might be useful
|
||||
for you. It will use the mutli search API under the hood to batch the requests
|
||||
and improve the overall performances of your application. The `result` exposes a
|
||||
`documents` property as well, which allows you to access directly the hits
|
||||
If you are sending search request at a high rate, this helper might be useful
|
||||
for you. It will use the mutli search API under the hood to batch the requests
|
||||
and improve the overall performances of your application. The `result` exposes a
|
||||
`documents` property as well, which allows you to access directly the hits
|
||||
sources.
|
||||
|
||||
[source,js]
|
||||
@ -530,10 +423,10 @@ m.search(
|
||||
[discrete]
|
||||
===== Added timeout support in bulk and msearch helpers https://github.com/elastic/elasticsearch-js/pull/1206[#1206]
|
||||
|
||||
If there is a slow producer, the bulk helper might send data with a very large
|
||||
period of time, and if the process crashes for any reason, the data would be
|
||||
lost. This pr introduces a `flushInterval` option in the bulk helper to avoid
|
||||
this issue. By default, the bulk helper will flush the data automatically every
|
||||
If there is a slow producer, the bulk helper might send data with a very large
|
||||
period of time, and if the process crashes for any reason, the data would be
|
||||
lost. This pr introduces a `flushInterval` option in the bulk helper to avoid
|
||||
this issue. By default, the bulk helper will flush the data automatically every
|
||||
30 seconds, unless the threshold has been reached before.
|
||||
|
||||
[source,js]
|
||||
@ -543,8 +436,8 @@ const b = client.helpers.bulk({
|
||||
})
|
||||
----
|
||||
|
||||
The same problem might happen with the multi search helper, where the user is
|
||||
not sending search requests fast enough. A `flushInterval` options has been
|
||||
The same problem might happen with the multi search helper, where the user is
|
||||
not sending search requests fast enough. A `flushInterval` options has been
|
||||
added as well, with a default value of 500 milliseconds.
|
||||
|
||||
[source,js]
|
||||
@ -560,16 +453,16 @@ const m = client.helpers.msearch({
|
||||
[discrete]
|
||||
===== Use filter_path for improving the search helpers performances https://github.com/elastic/elasticsearch-js/pull/1199[#1199]
|
||||
|
||||
From now on, all he search helpers will use the `filter_path` option
|
||||
automatically when needed to retrieve only the hits source. This change will
|
||||
From now on, all he search helpers will use the `filter_path` option
|
||||
automatically when needed to retrieve only the hits source. This change will
|
||||
result in less netwprk traffic and improved deserialization performances.
|
||||
|
||||
[discrete]
|
||||
===== Search helpers documents getter https://github.com/elastic/elasticsearch-js/pull/1186[#1186]
|
||||
|
||||
Before this, the `documents` key that you can access in any search helper was
|
||||
computed as soon as we got the search result from Elasticsearch. With this
|
||||
change the `documents` key is now a getter, which makes this process lazy,
|
||||
Before this, the `documents` key that you can access in any search helper was
|
||||
computed as soon as we got the search result from Elasticsearch. With this
|
||||
change the `documents` key is now a getter, which makes this process lazy,
|
||||
resulting in better performances and lower memory impact.
|
||||
|
||||
[discrete]
|
||||
@ -581,8 +474,8 @@ resulting in better performances and lower memory impact.
|
||||
[discrete]
|
||||
===== Disable client Helpers in Node.js < 10 - https://github.com/elastic/elasticsearch-js/pull/1194[#1194]
|
||||
|
||||
The client helpers can't be used in Node.js < 10 because it needs a custom flag
|
||||
to be able to use them. Given that not every provider allows the user to specify
|
||||
The client helpers can't be used in Node.js < 10 because it needs a custom flag
|
||||
to be able to use them. Given that not every provider allows the user to specify
|
||||
custom Node.js flags, the Helpers has been disabled completely in Node.js < 10.
|
||||
|
||||
[discrete]
|
||||
@ -600,16 +493,16 @@ will be no conflicts in case of the same header with different casing.
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v7.7`.
|
||||
|
||||
You can find all the API changes
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.7/release-notes-7.7.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Introduced client helpers - https://github.com/elastic/elasticsearch-js/pull/1107[#1107]
|
||||
|
||||
From now on, the client comes with an handy collection of helpers to give you a
|
||||
From now on, the client comes with an handy collection of helpers to give you a
|
||||
more comfortable experience with some APIs.
|
||||
|
||||
CAUTION: The client helpers are experimental, and the API may change in the next
|
||||
CAUTION: The client helpers are experimental, and the API may change in the next
|
||||
minor releases.
|
||||
|
||||
The following helpers has been introduced:
|
||||
@ -622,16 +515,16 @@ The following helpers has been introduced:
|
||||
[discrete]
|
||||
===== The `ConnectionPool.getConnection` now always returns a `Connection` - https://github.com/elastic/elasticsearch-js/pull/1127[#1127]
|
||||
|
||||
What does this mean? It means that you will see less `NoLivingConnectionError`,
|
||||
which now can only be caused if you set a selector/filter too strict. For
|
||||
improving the debugging experience, the `NoLivingConnectionsError` error message
|
||||
What does this mean? It means that you will see less `NoLivingConnectionError`,
|
||||
which now can only be caused if you set a selector/filter too strict. For
|
||||
improving the debugging experience, the `NoLivingConnectionsError` error message
|
||||
has been updated.
|
||||
|
||||
[discrete]
|
||||
===== Abortable promises - https://github.com/elastic/elasticsearch-js/pull/1141[#1141]
|
||||
|
||||
From now on, it will be possible to abort a request generated with the
|
||||
promise-styl API. If you abort a request generated from a promise, the promise
|
||||
From now on, it will be possible to abort a request generated with the
|
||||
promise-styl API. If you abort a request generated from a promise, the promise
|
||||
will be rejected with a `RequestAbortedError`.
|
||||
|
||||
|
||||
@ -653,8 +546,8 @@ promise.abort()
|
||||
[discrete]
|
||||
===== Major refactor of the Type Definitions - https://github.com/elastic/elasticsearch-js/pull/1119[#1119] https://github.com/elastic/elasticsearch-js/issues/1130[#1130] https://github.com/elastic/elasticsearch-js/pull/1132[#1132]
|
||||
|
||||
Now every API makes better use of the generics and overloading, so you can (or
|
||||
not, by default request/response bodies are `Record<string, any>`) define the
|
||||
Now every API makes better use of the generics and overloading, so you can (or
|
||||
not, by default request/response bodies are `Record<string, any>`) define the
|
||||
request/response bodies in the generics.
|
||||
|
||||
[source,ts]
|
||||
@ -667,10 +560,10 @@ client.search<SearchResponse>(...)
|
||||
client.search<SearchResponse, SearchBody>(...)
|
||||
----
|
||||
|
||||
This *should* not be a breaking change, as every generics defaults to `any`. It
|
||||
might happen to some users that the code breaks, but our test didn't detect any
|
||||
of it, probably because they were not robust enough. However, given the gigantic
|
||||
improvement in the developer experience, we have decided to release this change
|
||||
This *should* not be a breaking change, as every generics defaults to `any`. It
|
||||
might happen to some users that the code breaks, but our test didn't detect any
|
||||
of it, probably because they were not robust enough. However, given the gigantic
|
||||
improvement in the developer experience, we have decided to release this change
|
||||
in the 7.x line.
|
||||
|
||||
[discrete]
|
||||
@ -679,35 +572,35 @@ in the 7.x line.
|
||||
[discrete]
|
||||
===== The `ConnectionPool.update` method now cleans the `dead` list - https://github.com/elastic/elasticsearch-js/issues/1122[#1122] https://github.com/elastic/elasticsearch-js/pull/1127[#1127]
|
||||
|
||||
It can happen in a situation where we are updating the connections list and
|
||||
running sniff, leaving the `dead` list in a dirty state. Now the
|
||||
`ConnectionPool.update` cleans up the `dead` list every time, which makes way
|
||||
It can happen in a situation where we are updating the connections list and
|
||||
running sniff, leaving the `dead` list in a dirty state. Now the
|
||||
`ConnectionPool.update` cleans up the `dead` list every time, which makes way
|
||||
more sense given that all the new connections are alive.
|
||||
|
||||
[discrete]
|
||||
===== `ConnectionPoolmarkDead` should ignore connections that no longer exists - https://github.com/elastic/elasticsearch-js/pull/1159[#1159]
|
||||
|
||||
It might happen that markDead is called just after a pool update, and in such
|
||||
case, the client was adding the dead list a node that no longer exists, causing
|
||||
It might happen that markDead is called just after a pool update, and in such
|
||||
case, the client was adding the dead list a node that no longer exists, causing
|
||||
unhandled exceptions later.
|
||||
|
||||
[discrete]
|
||||
===== Do not retry a request if the body is a stream - https://github.com/elastic/elasticsearch-js/pull/1143[#1143]
|
||||
|
||||
The client should not retry if it's sending a stream body, because it should
|
||||
store in memory a copy of the stream to be able to send it again, but since it
|
||||
doesn't know in advance the size of the stream, it risks to take too much
|
||||
memory. Furthermore, copying everytime the stream is very an expensive
|
||||
The client should not retry if it's sending a stream body, because it should
|
||||
store in memory a copy of the stream to be able to send it again, but since it
|
||||
doesn't know in advance the size of the stream, it risks to take too much
|
||||
memory. Furthermore, copying everytime the stream is very an expensive
|
||||
operation.
|
||||
|
||||
[discrete]
|
||||
===== Return an error if the request has been aborted - https://github.com/elastic/elasticsearch-js/pull/1141[#1141]
|
||||
|
||||
Until now, aborting a request was blocking the HTTP request, but never calling
|
||||
the callback or resolving the promise to notify the user. This is a bug because
|
||||
it could lead to dangerous memory leaks. From now on if the user calls the
|
||||
`request.abort()` method, the callback style API will be called with a
|
||||
`RequestAbortedError`, the promise will be rejected with `RequestAbortedError`
|
||||
Until now, aborting a request was blocking the HTTP request, but never calling
|
||||
the callback or resolving the promise to notify the user. This is a bug because
|
||||
it could lead to dangerous memory leaks. From now on if the user calls the
|
||||
`request.abort()` method, the callback style API will be called with a
|
||||
`RequestAbortedError`, the promise will be rejected with `RequestAbortedError`
|
||||
as well.
|
||||
|
||||
[discrete]
|
||||
@ -715,14 +608,14 @@ as well.
|
||||
|
||||
**Fixes:**
|
||||
|
||||
- Secure json parsing -
|
||||
- Secure json parsing -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1110[#1110]
|
||||
- ApiKey should take precedence over basic auth -
|
||||
- ApiKey should take precedence over basic auth -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1115[#1115]
|
||||
|
||||
**Documentation:**
|
||||
|
||||
- Fix typo in api reference -
|
||||
- Fix typo in api reference -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1109[#1109]
|
||||
|
||||
[discrete]
|
||||
@ -735,22 +628,22 @@ Support for Elasticsearch `v7.6`.
|
||||
|
||||
**Fixes:**
|
||||
|
||||
- Skip compression in case of empty string body -
|
||||
- Skip compression in case of empty string body -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1080[#1080]
|
||||
- Fix typo in NoLivingConnectionsError -
|
||||
- Fix typo in NoLivingConnectionsError -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1045[#1045]
|
||||
- Change TransportRequestOptions.ignore to number[] -
|
||||
- Change TransportRequestOptions.ignore to number[] -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1053[#1053]
|
||||
- ClientOptions["cloud"] should have optional auth fields -
|
||||
- ClientOptions["cloud"] should have optional auth fields -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1032[#1032]
|
||||
|
||||
**Documentation:**
|
||||
|
||||
- Docs: Return super in example Transport subclass -
|
||||
- Docs: Return super in example Transport subclass -
|
||||
https://github.com/elastic/elasticsearch-js/pull/980[#980]
|
||||
- Add examples to reference -
|
||||
- Add examples to reference -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1076[#1076]
|
||||
- Added new examples -
|
||||
- Added new examples -
|
||||
https://github.com/elastic/elasticsearch-js/pull/1031[#1031]
|
||||
|
||||
[discrete]
|
||||
@ -769,21 +662,21 @@ Support for Elasticsearch `v7.4`.
|
||||
|
||||
**Fixes:**
|
||||
|
||||
- Fix issue; node roles are defaulting to true when undefined is breaking usage
|
||||
of nodeFilter option -
|
||||
- Fix issue; node roles are defaulting to true when undefined is breaking usage
|
||||
of nodeFilter option -
|
||||
https://github.com/elastic/elasticsearch-js/pull/967[#967]
|
||||
|
||||
**Documentation:**
|
||||
|
||||
- Updated API reference doc -
|
||||
https://github.com/elastic/elasticsearch-js/pull/945[#945],
|
||||
- Updated API reference doc -
|
||||
https://github.com/elastic/elasticsearch-js/pull/945[#945],
|
||||
https://github.com/elastic/elasticsearch-js/pull/969[#969]
|
||||
- Fix inaccurate description sniffEndpoint -
|
||||
- Fix inaccurate description sniffEndpoint -
|
||||
https://github.com/elastic/elasticsearch-js/pull/959[#959]
|
||||
|
||||
**Internals:**
|
||||
|
||||
- Update code generation
|
||||
- Update code generation
|
||||
https://github.com/elastic/elasticsearch-js/pull/969[#969]
|
||||
|
||||
[discrete]
|
||||
@ -793,26 +686,26 @@ Support for Elasticsearch `v7.3`.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Added `auth` option -
|
||||
- Added `auth` option -
|
||||
https://github.com/elastic/elasticsearch-js/pull/908[#908]
|
||||
- Added support for `ApiKey` authentication -
|
||||
- Added support for `ApiKey` authentication -
|
||||
https://github.com/elastic/elasticsearch-js/pull/908[#908]
|
||||
|
||||
**Fixes:**
|
||||
|
||||
- fix(Typings): sniffInterval can also be boolean -
|
||||
- fix(Typings): sniffInterval can also be boolean -
|
||||
https://github.com/elastic/elasticsearch-js/pull/914[#914]
|
||||
|
||||
**Internals:**
|
||||
|
||||
- Refactored connection pool -
|
||||
- Refactored connection pool -
|
||||
https://github.com/elastic/elasticsearch-js/pull/913[#913]
|
||||
|
||||
**Documentation:**
|
||||
|
||||
- Better reference code examples -
|
||||
- Better reference code examples -
|
||||
https://github.com/elastic/elasticsearch-js/pull/920[#920]
|
||||
- Improve README -
|
||||
- Improve README -
|
||||
https://github.com/elastic/elasticsearch-js/pull/909[#909]
|
||||
|
||||
[discrete]
|
||||
@ -822,7 +715,7 @@ Support for Elasticsearch `v7.2`
|
||||
|
||||
**Fixes:**
|
||||
|
||||
- Remove auth data from inspect and toJSON in connection class -
|
||||
- Remove auth data from inspect and toJSON in connection class -
|
||||
https://github.com/elastic/elasticsearch-js/pull/887[#887]
|
||||
|
||||
[discrete]
|
||||
@ -832,9 +725,9 @@ Support for Elasticsearch `v7.1`
|
||||
|
||||
**Fixes:**
|
||||
|
||||
- Support for non-friendly chars in url username and password -
|
||||
- Support for non-friendly chars in url username and password -
|
||||
https://github.com/elastic/elasticsearch-js/pull/858[#858]
|
||||
- Patch deprecated parameters -
|
||||
- Patch deprecated parameters -
|
||||
https://github.com/elastic/elasticsearch-js/pull/851[#851]
|
||||
|
||||
[discrete]
|
||||
@ -842,17 +735,17 @@ Support for Elasticsearch `v7.1`
|
||||
|
||||
**Fixes:**
|
||||
|
||||
- Fix TypeScript export *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/841[#841])* -
|
||||
- Fix TypeScript export *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/841[#841])* -
|
||||
https://github.com/elastic/elasticsearch-js/pull/842[#842]
|
||||
- Fix http and https port handling *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/843[#843])* -
|
||||
- Fix http and https port handling *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/843[#843])* -
|
||||
https://github.com/elastic/elasticsearch-js/pull/845[#845]
|
||||
- Fix TypeScript definiton *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/803[#803])* -
|
||||
- Fix TypeScript definiton *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/803[#803])* -
|
||||
https://github.com/elastic/elasticsearch-js/pull/846[#846]
|
||||
- Added toJSON method to Connection class *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/848[#848])* -
|
||||
- Added toJSON method to Connection class *(issue
|
||||
https://github.com/elastic/elasticsearch-js/pull/848[#848])* -
|
||||
https://github.com/elastic/elasticsearch-js/pull/849[#849]
|
||||
|
||||
[discrete]
|
||||
|
||||
@ -8,7 +8,6 @@ This page contains the information you need to connect and use the Client with
|
||||
|
||||
* <<auth-reference, Authentication options>>
|
||||
* <<client-usage, Using the client>>
|
||||
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
|
||||
* <<client-connect-proxy, Connecting through a proxy>>
|
||||
* <<client-error-handling, Handling errors>>
|
||||
* <<product-check, Automatic product check>>
|
||||
@ -61,12 +60,11 @@ const client = new Client({
|
||||
==== ApiKey authentication
|
||||
|
||||
You can use the
|
||||
{ref-7x}/security-api-create-api-key.html[ApiKey]
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey]
|
||||
authentication by passing the `apiKey` parameter via the `auth` option. The
|
||||
`apiKey` parameter can be either a base64 encoded string or an object with the
|
||||
values that you can obtain from the
|
||||
{ref-7x}/security-api-create-api-key.html[create api key endpoint].
|
||||
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[create api key endpoint].
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the ApiKey
|
||||
configuration, the ApiKey takes precedence.
|
||||
@ -179,29 +177,6 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-ca-fingerprint]]
|
||||
==== CA fingerprint
|
||||
|
||||
You can configure the client to only trust certificates that are signed by a specific CA certificate ( CA certificate pinning ) by providing a `caFingerprint` option. This will verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied value.
|
||||
a `caFingerprint` option, which will verify the supplied certificate authority fingerprint.
|
||||
You must configure a SHA256 digest.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://example.com'
|
||||
auth: { ... },
|
||||
// the fingerprint (SHA256) of the CA certificate that is used to sign the certificate that the Elasticsearch node presents for TLS.
|
||||
caFingerprint: '20:0D:CA:FA:76:...',
|
||||
ssl: {
|
||||
// might be required if it's a self-signed certificate
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[client-usage]]
|
||||
=== Usage
|
||||
@ -419,87 +394,8 @@ _Default:_ `null`
|
||||
|`context`
|
||||
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
[[client-faas-env]]
|
||||
=== Using the Client in a Function-as-a-Service Environment
|
||||
|
||||
This section illustrates the best practices for leveraging the {es} client in a Function-as-a-Service (FaaS) environment.
|
||||
The most influential optimization is to initialize the client outside of the function, the global scope.
|
||||
This practice does not only improve performance but also enables background functionality as – for example – https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[sniffing].
|
||||
The following examples provide a skeleton for the best practices.
|
||||
|
||||
[discrete]
|
||||
==== GCP Cloud Functions
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.testFunction = async function (req, res) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== AWS Lambda
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.handler = async function (event, context) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Azure Functions
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
module.exports = async function (context, req) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
Resources used to assess these recommendations:
|
||||
|
||||
- https://cloud.google.com/functions/docs/bestpractices/tips#use_global_variables_to_reuse_objects_in_future_invocations[GCP Cloud Functions: Tips & Tricks]
|
||||
- https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html[Best practices for working with AWS Lambda functions]
|
||||
- https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python?tabs=azurecli-linux%2Capplication-level#global-variables[Azure Functions Python developer guide]
|
||||
- https://docs.aws.amazon.com/lambda/latest/operatorguide/global-scope.html[AWS Lambda: Comparing the effect of global scope]
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-connect-proxy]]
|
||||
|
||||
@ -1,13 +1,8 @@
|
||||
= Elasticsearch JavaScript Client
|
||||
= Elasticsearch Node.js client
|
||||
|
||||
:branch: 7.16
|
||||
:branch: 7.x
|
||||
include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
|
||||
|
||||
// 7.x exists in this repo but not in stack repos
|
||||
// This line overwrites the jsclient attribute so it can point to 7.x, but stack links can point to 7.16
|
||||
// Remove this line when a 7.16 branch exists in this repo
|
||||
:jsclient: https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/7.x
|
||||
|
||||
include::introduction.asciidoc[]
|
||||
include::installation.asciidoc[]
|
||||
include::connecting.asciidoc[]
|
||||
|
||||
@ -24,7 +24,7 @@ To learn more about the supported major versions, please refer to the
|
||||
[[nodejs-support]]
|
||||
=== Node.js support
|
||||
|
||||
NOTE: The minimum supported version of Node.js is `v12`.
|
||||
NOTE: The minimum supported version of Node.js is `v10`.
|
||||
|
||||
The client versioning follows the {stack} versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
@ -62,8 +62,12 @@ of `^7.10.0`).
|
||||
[[js-compatibility-matrix]]
|
||||
=== Compatibility matrix
|
||||
|
||||
Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch.
|
||||
Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made.
|
||||
Elastic language clients are guaranteed to be able to communicate with Elasticsearch
|
||||
or Elastic solutions running on the same major version and greater or equal minor version.
|
||||
|
||||
Language clients are forward compatible; meaning that clients support communicating
|
||||
with greater minor versions of Elasticsearch. Elastic language clients are not
|
||||
guaranteed to be backwards compatible.
|
||||
|
||||
[%header,cols=2*]
|
||||
|===
|
||||
|
||||
@ -132,9 +132,6 @@ async function run () {
|
||||
run().catch(console.log)
|
||||
----
|
||||
|
||||
TIP: For an elaborate example of how to ingest data into Elastic Cloud,
|
||||
refer to {cloud}/ec-getting-started-node-js.html[this page].
|
||||
|
||||
[discrete]
|
||||
==== Install multiple versions
|
||||
|
||||
|
||||
@ -1969,7 +1969,7 @@ link:{ref}/cluster-allocation-explain.html[Documentation] +
|
||||
|`boolean` - Return information about disk usage and shard sizes (default: false)
|
||||
|
||||
|`body`
|
||||
|`object` - The index, shard, and primary flag to explain. Empty means 'explain a randomly-chosen unassigned shard'
|
||||
|`object` - The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard'
|
||||
|
||||
|===
|
||||
|
||||
@ -4238,44 +4238,6 @@ link:{ref}/indices-templates.html[Documentation] +
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== indices.diskUsage
|
||||
*Stability:* experimental
|
||||
[source,ts]
|
||||
----
|
||||
client.indices.diskUsage({
|
||||
index: string,
|
||||
run_expensive_tasks: boolean,
|
||||
flush: boolean,
|
||||
ignore_unavailable: boolean,
|
||||
allow_no_indices: boolean,
|
||||
expand_wildcards: 'open' | 'closed' | 'hidden' | 'none' | 'all'
|
||||
})
|
||||
----
|
||||
link:{ref}/indices-disk-usage.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`index`
|
||||
|`string` - Comma-separated list of indices or data streams to analyze the disk usage
|
||||
|
||||
|`run_expensive_tasks` or `runExpensiveTasks`
|
||||
|`boolean` - Must be set to [true] in order for the task to be performed. Defaults to false.
|
||||
|
||||
|`flush`
|
||||
|`boolean` - Whether flush or not before analyzing the index disk usage. Defaults to true
|
||||
|
||||
|`ignore_unavailable` or `ignoreUnavailable`
|
||||
|`boolean` - Whether specified concrete indices should be ignored when unavailable (missing or closed)
|
||||
|
||||
|`allow_no_indices` or `allowNoIndices`
|
||||
|`boolean` - Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
|
||||
|
||||
|`expand_wildcards` or `expandWildcards`
|
||||
|`'open' \| 'closed' \| 'hidden' \| 'none' \| 'all'` - Whether to expand wildcard expression to concrete indices that are open, closed or both. +
|
||||
_Default:_ `open`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== indices.exists
|
||||
|
||||
@ -4452,40 +4414,6 @@ _Default:_ `open`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== indices.fieldUsageStats
|
||||
*Stability:* experimental
|
||||
[source,ts]
|
||||
----
|
||||
client.indices.fieldUsageStats({
|
||||
index: string,
|
||||
fields: string | string[],
|
||||
ignore_unavailable: boolean,
|
||||
allow_no_indices: boolean,
|
||||
expand_wildcards: 'open' | 'closed' | 'hidden' | 'none' | 'all'
|
||||
})
|
||||
----
|
||||
link:{ref}/field-usage-stats.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`index`
|
||||
|`string` - A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices
|
||||
|
||||
|`fields`
|
||||
|`string \| string[]` - A comma-separated list of fields to include in the stats if only a subset of fields should be returned (supports wildcards)
|
||||
|
||||
|`ignore_unavailable` or `ignoreUnavailable`
|
||||
|`boolean` - Whether specified concrete indices should be ignored when unavailable (missing or closed)
|
||||
|
||||
|`allow_no_indices` or `allowNoIndices`
|
||||
|`boolean` - Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
|
||||
|
||||
|`expand_wildcards` or `expandWildcards`
|
||||
|`'open' \| 'closed' \| 'hidden' \| 'none' \| 'all'` - Whether to expand wildcard expression to concrete indices that are open, closed or both. +
|
||||
_Default:_ `open`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== indices.flush
|
||||
|
||||
@ -7688,10 +7616,6 @@ link:{ref}/ml-put-filter.html[Documentation] +
|
||||
----
|
||||
client.ml.putJob({
|
||||
job_id: string,
|
||||
ignore_unavailable: boolean,
|
||||
allow_no_indices: boolean,
|
||||
ignore_throttled: boolean,
|
||||
expand_wildcards: 'open' | 'closed' | 'hidden' | 'none' | 'all',
|
||||
body: object
|
||||
})
|
||||
----
|
||||
@ -7701,18 +7625,6 @@ link:{ref}/ml-put-job.html[Documentation] +
|
||||
|`job_id` or `jobId`
|
||||
|`string` - The ID of the job to create
|
||||
|
||||
|`ignore_unavailable` or `ignoreUnavailable`
|
||||
|`boolean` - Ignore unavailable indexes (default: false). Only set if datafeed_config is provided.
|
||||
|
||||
|`allow_no_indices` or `allowNoIndices`
|
||||
|`boolean` - Ignore if the source indices expressions resolves to no concrete indices (default: true). Only set if datafeed_config is provided.
|
||||
|
||||
|`ignore_throttled` or `ignoreThrottled`
|
||||
|`boolean` - Ignore indices that are marked as throttled (default: true). Only set if datafeed_config is provided.
|
||||
|
||||
|`expand_wildcards` or `expandWildcards`
|
||||
|`'open' \| 'closed' \| 'hidden' \| 'none' \| 'all'` - Whether source index expressions should get expanded to open or closed indices (default: open). Only set if datafeed_config is provided.
|
||||
|
||||
|`body`
|
||||
|`object` - The job
|
||||
|
||||
@ -8368,44 +8280,6 @@ _Default:_ `true`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== nodes.clearMeteringArchive
|
||||
*Stability:* experimental
|
||||
[source,ts]
|
||||
----
|
||||
client.nodes.clearMeteringArchive({
|
||||
node_id: string | string[],
|
||||
max_archive_version: number
|
||||
})
|
||||
----
|
||||
link:{ref}/clear-repositories-metering-archive-api.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`node_id` or `nodeId`
|
||||
|`string \| string[]` - Comma-separated list of node IDs or names used to limit returned information.
|
||||
|
||||
|`max_archive_version` or `maxArchiveVersion`
|
||||
|`number` - Specifies the maximum archive_version to be cleared from the archive.
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== nodes.getMeteringInfo
|
||||
*Stability:* experimental
|
||||
[source,ts]
|
||||
----
|
||||
client.nodes.getMeteringInfo({
|
||||
node_id: string | string[]
|
||||
})
|
||||
----
|
||||
link:{ref}/get-repositories-metering-api.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`node_id` or `nodeId`
|
||||
|`string \| string[]` - A comma-separated list of node IDs or names to limit the returned information.
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== nodes.hotThreads
|
||||
|
||||
@ -9248,67 +9122,6 @@ _Default:_ `5`
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== searchMvt
|
||||
*Stability:* experimental
|
||||
[source,ts]
|
||||
----
|
||||
client.searchMvt({
|
||||
index: string | string[],
|
||||
field: string,
|
||||
zoom: number,
|
||||
x: number,
|
||||
y: number,
|
||||
exact_bounds: boolean,
|
||||
extent: number,
|
||||
grid_precision: number,
|
||||
grid_type: 'grid' | 'point',
|
||||
size: number,
|
||||
body: object
|
||||
})
|
||||
----
|
||||
link:{ref}/search-vector-tile-api.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`index`
|
||||
|`string \| string[]` - Comma-separated list of data streams, indices, or aliases to search
|
||||
|
||||
|`field`
|
||||
|`string` - Field containing geospatial data to return
|
||||
|
||||
|`zoom`
|
||||
|`number` - Zoom level for the vector tile to search
|
||||
|
||||
|`x`
|
||||
|`number` - X coordinate for the vector tile to search
|
||||
|
||||
|`y`
|
||||
|`number` - Y coordinate for the vector tile to search
|
||||
|
||||
|`exact_bounds` or `exactBounds`
|
||||
|`boolean` - If false, the meta layer's feature is the bounding box of the tile. If true, the meta layer's feature is a bounding box resulting from a `geo_bounds` aggregation.
|
||||
|
||||
|`extent`
|
||||
|`number` - Size, in pixels, of a side of the vector tile. +
|
||||
_Default:_ `4096`
|
||||
|
||||
|`grid_precision` or `gridPrecision`
|
||||
|`number` - Additional zoom levels available through the aggs layer. Accepts 0-8. +
|
||||
_Default:_ `8`
|
||||
|
||||
|`grid_type` or `gridType`
|
||||
|`'grid' \| 'point'` - Determines the geometry type for features in the aggs layer. +
|
||||
_Default:_ `grid`
|
||||
|
||||
|`size`
|
||||
|`number` - Maximum number of features to return in the hits layer. Accepts 0-10000. +
|
||||
_Default:_ `10000`
|
||||
|
||||
|`body`
|
||||
|`object` - Search request body.
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== searchShards
|
||||
|
||||
@ -10250,23 +10063,6 @@ link:{ref}/security-api-put-user.html[Documentation] +
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== security.queryApiKeys
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
client.security.queryApiKeys({
|
||||
body: object
|
||||
})
|
||||
----
|
||||
link:{ref}/security-api-query-api-key.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`body`
|
||||
|`object` - From, size, query, sort and search_after
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== security.samlAuthenticate
|
||||
|
||||
@ -10943,7 +10739,7 @@ client.sql.clearCursor({
|
||||
body: object
|
||||
})
|
||||
----
|
||||
link:{ref}/clear-sql-cursor-api.html[Documentation] +
|
||||
link:{ref}/sql-pagination.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`body`
|
||||
@ -11030,7 +10826,7 @@ client.sql.query({
|
||||
body: object
|
||||
})
|
||||
----
|
||||
link:{ref}/sql-search-api.html[Documentation] +
|
||||
link:{ref}/sql-rest-overview.html[Documentation] +
|
||||
{jsclient}/sql_query_examples.html[Code Example] +
|
||||
[cols=2*]
|
||||
|===
|
||||
@ -11051,7 +10847,7 @@ client.sql.translate({
|
||||
body: object
|
||||
})
|
||||
----
|
||||
link:{ref}/sql-translate-api.html[Documentation] +
|
||||
link:{ref}/sql-translate.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`body`
|
||||
|
||||
@ -32,10 +32,3 @@ class MyTransport extends Transport {
|
||||
}
|
||||
----
|
||||
|
||||
==== Supported content types
|
||||
|
||||
- `application/json`, in this case the transport will return a plain JavaScript object
|
||||
- `text/plain`, in this case the transport will return a plain string
|
||||
- `application/vnd.mapbox-vector-tile`, in this case the transport will return a Buffer
|
||||
- `application/vnd.elasticsearch+json`, in this case the transport will return a plain JavaScript object
|
||||
|
||||
|
||||
51
index.d.ts
vendored
51
index.d.ts
vendored
@ -118,9 +118,6 @@ interface ClientOptions {
|
||||
password?: string;
|
||||
};
|
||||
disablePrototypePoisoningProtection?: boolean | 'proto' | 'constructor';
|
||||
caFingerprint?: string;
|
||||
maxResponseSize?: number;
|
||||
maxCompressedResponseSize?: number;
|
||||
}
|
||||
|
||||
declare class Client {
|
||||
@ -970,14 +967,6 @@ declare class Client {
|
||||
deleteTemplate<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
deleteTemplate<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesDeleteTemplate, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
deleteTemplate<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesDeleteTemplate, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
disk_usage<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDiskUsage, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
disk_usage<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
disk_usage<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesDiskUsage, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
disk_usage<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesDiskUsage, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
diskUsage<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesDiskUsage, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
diskUsage<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
diskUsage<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesDiskUsage, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
diskUsage<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesDiskUsage, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
exists<TResponse = boolean, TContext = Context>(params?: RequestParams.IndicesExists, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
exists<TResponse = boolean, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
exists<TResponse = boolean, TContext = Context>(params: RequestParams.IndicesExists, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
@ -1014,14 +1003,6 @@ declare class Client {
|
||||
existsType<TResponse = boolean, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
existsType<TResponse = boolean, TContext = Context>(params: RequestParams.IndicesExistsType, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
existsType<TResponse = boolean, TContext = Context>(params: RequestParams.IndicesExistsType, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
field_usage_stats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesFieldUsageStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
field_usage_stats<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
field_usage_stats<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesFieldUsageStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
field_usage_stats<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesFieldUsageStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
fieldUsageStats<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesFieldUsageStats, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
fieldUsageStats<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
fieldUsageStats<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesFieldUsageStats, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
fieldUsageStats<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesFieldUsageStats, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
flush<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.IndicesFlush, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
flush<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
flush<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.IndicesFlush, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
@ -1928,22 +1909,6 @@ declare class Client {
|
||||
mtermvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Mtermvectors<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
mtermvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Mtermvectors<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
nodes: {
|
||||
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesClearMeteringArchive, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesClearMeteringArchive, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
get_metering_info<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesGetMeteringInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
get_metering_info<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
get_metering_info<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
get_metering_info<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesGetMeteringInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
hot_threads<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesHotThreads, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
hot_threads<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
hot_threads<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesHotThreads, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
@ -2107,14 +2072,6 @@ declare class Client {
|
||||
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Search<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
search<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Search<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
search_mvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SearchMvt<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
search_mvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
search_mvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SearchMvt<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
search_mvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SearchMvt<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
searchMvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SearchMvt<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
searchMvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
searchMvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SearchMvt<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
searchMvt<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SearchMvt<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
search_shards<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SearchShards, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
search_shards<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
search_shards<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.SearchShards, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
@ -2468,14 +2425,6 @@ declare class Client {
|
||||
putUser<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
putUser<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityPutUser<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
putUser<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityPutUser<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
query_api_keys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityQueryApiKeys<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
query_api_keys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
query_api_keys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityQueryApiKeys<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
query_api_keys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityQueryApiKeys<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
queryApiKeys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityQueryApiKeys<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
queryApiKeys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
queryApiKeys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityQueryApiKeys<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
queryApiKeys<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityQueryApiKeys<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
saml_authenticate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecuritySamlAuthenticate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
saml_authenticate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
saml_authenticate<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecuritySamlAuthenticate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
|
||||
31
index.js
31
index.js
@ -21,7 +21,6 @@
|
||||
|
||||
const { EventEmitter } = require('events')
|
||||
const { URL } = require('url')
|
||||
const buffer = require('buffer')
|
||||
const debug = require('debug')('elasticsearch')
|
||||
const Transport = require('./lib/Transport')
|
||||
const Connection = require('./lib/Connection')
|
||||
@ -103,7 +102,6 @@ class Client extends ESAPI {
|
||||
suggestCompression: false,
|
||||
compression: false,
|
||||
ssl: null,
|
||||
caFingerprint: null,
|
||||
agent: null,
|
||||
headers: {},
|
||||
nodeFilter: null,
|
||||
@ -115,23 +113,9 @@ class Client extends ESAPI {
|
||||
context: null,
|
||||
proxy: null,
|
||||
enableMetaHeader: true,
|
||||
disablePrototypePoisoningProtection: false,
|
||||
maxResponseSize: null,
|
||||
maxCompressedResponseSize: null
|
||||
disablePrototypePoisoningProtection: false
|
||||
}, opts)
|
||||
|
||||
if (options.maxResponseSize !== null && options.maxResponseSize > buffer.constants.MAX_STRING_LENGTH) {
|
||||
throw new ConfigurationError(`The maxResponseSize cannot be bigger than ${buffer.constants.MAX_STRING_LENGTH}`)
|
||||
}
|
||||
|
||||
if (options.maxCompressedResponseSize !== null && options.maxCompressedResponseSize > buffer.constants.MAX_LENGTH) {
|
||||
throw new ConfigurationError(`The maxCompressedResponseSize cannot be bigger than ${buffer.constants.MAX_LENGTH}`)
|
||||
}
|
||||
|
||||
if (options.caFingerprint !== null && isHttpConnection(opts.node || opts.nodes)) {
|
||||
throw new ConfigurationError('You can\'t configure the caFingerprint with a http connection')
|
||||
}
|
||||
|
||||
if (process.env.ELASTIC_CLIENT_APIVERSIONING === 'true') {
|
||||
options.headers = Object.assign({ accept: 'application/vnd.elasticsearch+json; compatible-with=7' }, options.headers)
|
||||
}
|
||||
@ -162,7 +146,6 @@ class Client extends ESAPI {
|
||||
Connection: options.Connection,
|
||||
auth: options.auth,
|
||||
emit: this[kEventEmitter].emit.bind(this[kEventEmitter]),
|
||||
caFingerprint: options.caFingerprint,
|
||||
sniffEnabled: options.sniffInterval !== false ||
|
||||
options.sniffOnStart !== false ||
|
||||
options.sniffOnConnectionFault !== false
|
||||
@ -189,9 +172,7 @@ class Client extends ESAPI {
|
||||
generateRequestId: options.generateRequestId,
|
||||
name: options.name,
|
||||
opaqueIdPrefix: options.opaqueIdPrefix,
|
||||
context: options.context,
|
||||
maxResponseSize: options.maxResponseSize,
|
||||
maxCompressedResponseSize: options.maxCompressedResponseSize
|
||||
context: options.context
|
||||
})
|
||||
|
||||
this.helpers = new Helpers({
|
||||
@ -334,14 +315,6 @@ function getAuth (node) {
|
||||
}
|
||||
}
|
||||
|
||||
function isHttpConnection (node) {
|
||||
if (Array.isArray(node)) {
|
||||
return node.some((n) => (typeof n === 'string' ? new URL(n).protocol : n.url.protocol) === 'http:')
|
||||
} else {
|
||||
return (typeof node === 'string' ? new URL(node).protocol : node.url.protocol) === 'http:'
|
||||
}
|
||||
}
|
||||
|
||||
const events = {
|
||||
RESPONSE: 'response',
|
||||
REQUEST: 'request',
|
||||
|
||||
1
lib/Connection.d.ts
vendored
1
lib/Connection.d.ts
vendored
@ -40,7 +40,6 @@ export interface ConnectionOptions {
|
||||
roles?: ConnectionRoles;
|
||||
auth?: BasicAuth | ApiKeyAuth;
|
||||
proxy?: string | URL;
|
||||
caFingerprint?: string;
|
||||
}
|
||||
|
||||
interface ConnectionRoles {
|
||||
|
||||
@ -42,7 +42,6 @@ class Connection {
|
||||
this.headers = prepareHeaders(opts.headers, opts.auth)
|
||||
this.deadCount = 0
|
||||
this.resurrectTimeout = 0
|
||||
this.caFingerprint = opts.caFingerprint
|
||||
|
||||
this._openRequests = 0
|
||||
this._status = opts.status || Connection.statuses.ALIVE
|
||||
@ -113,14 +112,7 @@ class Connection {
|
||||
const onError = err => {
|
||||
cleanListeners()
|
||||
this._openRequests--
|
||||
let message = err.message
|
||||
if (err.code === 'ECONNRESET') {
|
||||
/* istanbul ignore next */
|
||||
const socket = request.socket || {}
|
||||
/* istanbul ignore next */
|
||||
message += ` - Local: ${socket.localAddress || 'unknown'}:${socket.localPort || 'unknown'}, Remote: ${socket.remoteAddress || 'unknown'}:${socket.remotePort || 'unknown'}`
|
||||
}
|
||||
callback(new ConnectionError(message), null)
|
||||
callback(new ConnectionError(err.message), null)
|
||||
}
|
||||
|
||||
const onAbort = () => {
|
||||
@ -131,36 +123,10 @@ class Connection {
|
||||
callback(new RequestAbortedError(), null)
|
||||
}
|
||||
|
||||
const onSocket = socket => {
|
||||
/* istanbul ignore else */
|
||||
if (!socket.isSessionReused()) {
|
||||
socket.once('secureConnect', () => {
|
||||
const issuerCertificate = getIssuerCertificate(socket)
|
||||
/* istanbul ignore next */
|
||||
if (issuerCertificate == null) {
|
||||
onError(new Error('Invalid or malformed certificate'))
|
||||
request.once('error', () => {}) // we need to catch the request aborted error
|
||||
return request.abort()
|
||||
}
|
||||
|
||||
// Check if fingerprint matches
|
||||
/* istanbul ignore else */
|
||||
if (this.caFingerprint !== issuerCertificate.fingerprint256) {
|
||||
onError(new Error('Server certificate CA fingerprint does not match the value configured in caFingerprint'))
|
||||
request.once('error', () => {}) // we need to catch the request aborted error
|
||||
return request.abort()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
request.on('response', onResponse)
|
||||
request.on('timeout', onTimeout)
|
||||
request.on('error', onError)
|
||||
request.on('abort', onAbort)
|
||||
if (this.caFingerprint != null) {
|
||||
request.on('socket', onSocket)
|
||||
}
|
||||
|
||||
// Disables the Nagle algorithm
|
||||
request.setNoDelay(true)
|
||||
@ -186,7 +152,6 @@ class Connection {
|
||||
request.removeListener('timeout', onTimeout)
|
||||
request.removeListener('error', onError)
|
||||
request.removeListener('abort', onAbort)
|
||||
request.removeListener('socket', onSocket)
|
||||
cleanedListeners = true
|
||||
}
|
||||
}
|
||||
@ -375,25 +340,5 @@ function prepareHeaders (headers = {}, auth) {
|
||||
return headers
|
||||
}
|
||||
|
||||
function getIssuerCertificate (socket) {
|
||||
let certificate = socket.getPeerCertificate(true)
|
||||
while (certificate && Object.keys(certificate).length > 0) {
|
||||
// invalid certificate
|
||||
if (certificate.issuerCertificate == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
// We have reached the root certificate.
|
||||
// In case of self-signed certificates, `issuerCertificate` may be a circular reference.
|
||||
if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) {
|
||||
break
|
||||
}
|
||||
|
||||
// continue the loop
|
||||
certificate = certificate.issuerCertificate
|
||||
}
|
||||
return certificate
|
||||
}
|
||||
|
||||
module.exports = Connection
|
||||
module.exports.internals = { prepareHeaders, getIssuerCertificate }
|
||||
module.exports.internals = { prepareHeaders }
|
||||
|
||||
8
lib/Transport.d.ts
vendored
8
lib/Transport.d.ts
vendored
@ -61,8 +61,6 @@ interface TransportOptions {
|
||||
generateRequestId?: generateRequestIdFn;
|
||||
name?: string;
|
||||
opaqueIdPrefix?: string;
|
||||
maxResponseSize?: number;
|
||||
maxCompressedResponseSize?: number;
|
||||
}
|
||||
|
||||
export interface RequestEvent<TResponse = Record<string, any>, TContext = Context> {
|
||||
@ -115,8 +113,6 @@ export interface TransportRequestOptions {
|
||||
context?: Context;
|
||||
warnings?: string[];
|
||||
opaqueId?: string;
|
||||
maxResponseSize?: number;
|
||||
maxCompressedResponseSize?: number;
|
||||
}
|
||||
|
||||
export interface TransportRequestCallback {
|
||||
@ -159,8 +155,8 @@ export default class Transport {
|
||||
_nextSniff: number;
|
||||
_isSniffing: boolean;
|
||||
constructor(opts: TransportOptions);
|
||||
request<TResponse = Record<string, any>, TContext = Context>(params: TransportRequestParams, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>;
|
||||
request<TResponse = Record<string, any>, TContext = Context>(params: TransportRequestParams, options?: TransportRequestOptions, callback?: (err: ApiError, result: ApiResponse<TResponse, TContext>) => void): TransportRequestCallback;
|
||||
request(params: TransportRequestParams, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse>;
|
||||
request(params: TransportRequestParams, options?: TransportRequestOptions, callback?: (err: ApiError, result: ApiResponse) => void): TransportRequestCallback;
|
||||
getConnection(opts: TransportGetConnectionOptions): Connection | null;
|
||||
sniff(opts?: TransportSniffOptions, callback?: (...args: any[]) => void): void;
|
||||
}
|
||||
|
||||
@ -36,15 +36,13 @@ const {
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
const productCheckEmitter = new EventEmitter()
|
||||
const clientVersion = require('../package.json').version
|
||||
const userAgent = `elasticsearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})`
|
||||
const MAX_BUFFER_LENGTH = buffer.constants.MAX_LENGTH
|
||||
const MAX_STRING_LENGTH = buffer.constants.MAX_STRING_LENGTH
|
||||
const kProductCheck = Symbol('product check')
|
||||
const kApiVersioning = Symbol('api versioning')
|
||||
const kEventEmitter = Symbol('event emitter')
|
||||
const kMaxResponseSize = Symbol('max response size')
|
||||
const kMaxCompressedResponseSize = Symbol('max compressed response size')
|
||||
|
||||
class Transport {
|
||||
constructor (opts) {
|
||||
@ -73,9 +71,6 @@ class Transport {
|
||||
this.opaqueIdPrefix = opts.opaqueIdPrefix
|
||||
this[kProductCheck] = 0 // 0 = to be checked, 1 = checking, 2 = checked-ok, 3 checked-notok, 4 checked-nodefault
|
||||
this[kApiVersioning] = process.env.ELASTIC_CLIENT_APIVERSIONING === 'true'
|
||||
this[kEventEmitter] = new EventEmitter()
|
||||
this[kMaxResponseSize] = opts.maxResponseSize || MAX_STRING_LENGTH
|
||||
this[kMaxCompressedResponseSize] = opts.maxCompressedResponseSize || MAX_BUFFER_LENGTH
|
||||
|
||||
this.nodeFilter = opts.nodeFilter || defaultNodeFilter
|
||||
if (typeof opts.nodeSelector === 'function') {
|
||||
@ -166,8 +161,6 @@ class Transport {
|
||||
? 0
|
||||
: (typeof options.maxRetries === 'number' ? options.maxRetries : this.maxRetries)
|
||||
const compression = options.compression !== undefined ? options.compression : this.compression
|
||||
const maxResponseSize = options.maxResponseSize || this[kMaxResponseSize]
|
||||
const maxCompressedResponseSize = options.maxCompressedResponseSize || this[kMaxCompressedResponseSize]
|
||||
let request = { abort: noop }
|
||||
const transportReturn = {
|
||||
then (onFulfilled, onRejected) {
|
||||
@ -189,7 +182,6 @@ class Transport {
|
||||
|
||||
const makeRequest = () => {
|
||||
if (meta.aborted === true) {
|
||||
this.emit('request', new RequestAbortedError(), result)
|
||||
return process.nextTick(callback, new RequestAbortedError(), result)
|
||||
}
|
||||
meta.connection = this.getConnection({ requestId: meta.request.id })
|
||||
@ -245,28 +237,26 @@ class Transport {
|
||||
|
||||
const contentEncoding = (result.headers['content-encoding'] || '').toLowerCase()
|
||||
const isCompressed = contentEncoding.indexOf('gzip') > -1 || contentEncoding.indexOf('deflate') > -1
|
||||
const isVectorTile = (result.headers['content-type'] || '').indexOf('application/vnd.mapbox-vector-tile') > -1
|
||||
|
||||
/* istanbul ignore else */
|
||||
if (result.headers['content-length'] !== undefined) {
|
||||
const contentLength = Number(result.headers['content-length'])
|
||||
if (isCompressed && contentLength > maxCompressedResponseSize) {
|
||||
if (isCompressed && contentLength > MAX_BUFFER_LENGTH) {
|
||||
response.destroy()
|
||||
return onConnectionError(
|
||||
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed buffer (${maxCompressedResponseSize})`, result)
|
||||
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed buffer (${MAX_BUFFER_LENGTH})`, result)
|
||||
)
|
||||
} else if (contentLength > maxResponseSize) {
|
||||
} else if (contentLength > MAX_STRING_LENGTH) {
|
||||
response.destroy()
|
||||
return onConnectionError(
|
||||
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed string (${maxResponseSize})`, result)
|
||||
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed string (${MAX_STRING_LENGTH})`, result)
|
||||
)
|
||||
}
|
||||
}
|
||||
// if the response is compressed, we must handle it
|
||||
// as buffer for allowing decompression later
|
||||
// while if it's a vector tile, we should return it as buffer
|
||||
let payload = isCompressed || isVectorTile ? [] : ''
|
||||
const onData = isCompressed || isVectorTile
|
||||
let payload = isCompressed ? [] : ''
|
||||
const onData = isCompressed
|
||||
? chunk => { payload.push(chunk) }
|
||||
: chunk => { payload += chunk }
|
||||
const onEnd = err => {
|
||||
@ -282,7 +272,7 @@ class Transport {
|
||||
if (isCompressed) {
|
||||
unzip(Buffer.concat(payload), onBody)
|
||||
} else {
|
||||
onBody(null, isVectorTile ? Buffer.concat(payload) : payload)
|
||||
onBody(null, payload)
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,7 +281,7 @@ class Transport {
|
||||
onEnd(new Error('Response aborted while reading the body'))
|
||||
}
|
||||
|
||||
if (!isCompressed && !isVectorTile) {
|
||||
if (!isCompressed) {
|
||||
response.setEncoding('utf8')
|
||||
}
|
||||
|
||||
@ -307,9 +297,7 @@ class Transport {
|
||||
this.emit('response', err, result)
|
||||
return callback(err, result)
|
||||
}
|
||||
|
||||
const isVectorTile = (result.headers['content-type'] || '').indexOf('application/vnd.mapbox-vector-tile') > -1
|
||||
if (Buffer.isBuffer(payload) && !isVectorTile) {
|
||||
if (Buffer.isBuffer(payload)) {
|
||||
payload = payload.toString()
|
||||
}
|
||||
const isHead = params.method === 'HEAD'
|
||||
@ -428,6 +416,8 @@ class Transport {
|
||||
// handles request timeout
|
||||
params.timeout = toMs(options.requestTimeout || this.requestTimeout)
|
||||
if (options.asStream === true) params.asStream = true
|
||||
meta.request.params = params
|
||||
meta.request.options = options
|
||||
|
||||
// handle compression
|
||||
if (params.body !== '' && params.body != null) {
|
||||
@ -458,8 +448,6 @@ class Transport {
|
||||
}
|
||||
}
|
||||
|
||||
meta.request.params = params
|
||||
meta.request.options = options
|
||||
// still need to check the product or waiting for the check to finish
|
||||
if (this[kProductCheck] === 0 || this[kProductCheck] === 1) {
|
||||
// let pass info requests
|
||||
@ -467,7 +455,7 @@ class Transport {
|
||||
prepareRequest()
|
||||
} else {
|
||||
// wait for product check to finish
|
||||
this[kEventEmitter].once('product-check', (error, status) => {
|
||||
productCheckEmitter.once('product-check', (error, status) => {
|
||||
if (status === false) {
|
||||
const err = error || new ProductNotSupportedError(result)
|
||||
if (this[kProductCheck] === 4) {
|
||||
@ -567,52 +555,49 @@ class Transport {
|
||||
debug('Product check failed', err)
|
||||
if (err.statusCode === 401 || err.statusCode === 403) {
|
||||
this[kProductCheck] = 2
|
||||
process.emitWarning(
|
||||
'The client is unable to verify that the server is Elasticsearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.',
|
||||
'ProductNotSupportedSecurityError'
|
||||
)
|
||||
this[kEventEmitter].emit('product-check', null, true)
|
||||
process.emitWarning('The client is unable to verify that the server is Elasticsearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.')
|
||||
productCheckEmitter.emit('product-check', null, true)
|
||||
} else {
|
||||
this[kProductCheck] = 0
|
||||
this[kEventEmitter].emit('product-check', err, false)
|
||||
productCheckEmitter.emit('product-check', err, false)
|
||||
}
|
||||
} else {
|
||||
debug('Checking elasticsearch version', result.body, result.headers)
|
||||
if (result.body.version == null || typeof result.body.version.number !== 'string') {
|
||||
debug('Can\'t access Elasticsearch version')
|
||||
return this[kEventEmitter].emit('product-check', null, false)
|
||||
return productCheckEmitter.emit('product-check', null, false)
|
||||
}
|
||||
const tagline = result.body.tagline
|
||||
const version = result.body.version.number.split('.')
|
||||
const major = Number(version[0])
|
||||
const minor = Number(version[1])
|
||||
if (major < 6) {
|
||||
return this[kEventEmitter].emit('product-check', null, false)
|
||||
return productCheckEmitter.emit('product-check', null, false)
|
||||
} else if (major >= 6 && major < 7) {
|
||||
if (tagline !== 'You Know, for Search') {
|
||||
debug('Bad tagline')
|
||||
return this[kEventEmitter].emit('product-check', null, false)
|
||||
return productCheckEmitter.emit('product-check', null, false)
|
||||
}
|
||||
} else if (major === 7 && minor < 14) {
|
||||
if (tagline !== 'You Know, for Search') {
|
||||
debug('Bad tagline')
|
||||
return this[kEventEmitter].emit('product-check', null, false)
|
||||
return productCheckEmitter.emit('product-check', null, false)
|
||||
}
|
||||
|
||||
if (result.body.version.build_flavor !== 'default') {
|
||||
debug('Bad build_flavor')
|
||||
this[kProductCheck] = 4
|
||||
return this[kEventEmitter].emit('product-check', null, false)
|
||||
return productCheckEmitter.emit('product-check', null, false)
|
||||
}
|
||||
} else {
|
||||
if (result.headers['x-elastic-product'] !== 'Elasticsearch') {
|
||||
debug('x-elastic-product not recognized')
|
||||
return this[kEventEmitter].emit('product-check', null, false)
|
||||
return productCheckEmitter.emit('product-check', null, false)
|
||||
}
|
||||
}
|
||||
debug('Valid Elasticsearch distribution')
|
||||
this[kProductCheck] = 2
|
||||
this[kEventEmitter].emit('product-check', null, true)
|
||||
productCheckEmitter.emit('product-check', null, true)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -97,10 +97,8 @@ class ResponseError extends ElasticsearchClientError {
|
||||
} else {
|
||||
this.message = meta.body.error.type
|
||||
}
|
||||
} else if (typeof meta.body === 'object' && meta.body != null) {
|
||||
this.message = JSON.stringify(meta.body)
|
||||
} else {
|
||||
this.message = meta.body || 'Response Error'
|
||||
this.message = 'Response Error'
|
||||
}
|
||||
this.meta = meta
|
||||
}
|
||||
|
||||
@ -36,7 +36,6 @@ class BaseConnectionPool {
|
||||
this._ssl = opts.ssl
|
||||
this._agent = opts.agent
|
||||
this._proxy = opts.proxy || null
|
||||
this._caFingerprint = opts.caFingerprint || null
|
||||
}
|
||||
|
||||
getConnection () {
|
||||
@ -73,8 +72,6 @@ class BaseConnectionPool {
|
||||
if (opts.agent == null) opts.agent = this._agent
|
||||
/* istanbul ignore else */
|
||||
if (opts.proxy == null) opts.proxy = this._proxy
|
||||
/* istanbul ignore else */
|
||||
if (opts.caFingerprint == null) opts.caFingerprint = this._caFingerprint
|
||||
|
||||
const connection = new this.Connection(opts)
|
||||
|
||||
|
||||
1
lib/pool/index.d.ts
vendored
1
lib/pool/index.d.ts
vendored
@ -31,7 +31,6 @@ interface BaseConnectionPoolOptions {
|
||||
auth?: BasicAuth | ApiKeyAuth;
|
||||
emit: (event: string | symbol, ...args: any[]) => boolean;
|
||||
Connection: typeof Connection;
|
||||
caFingerprint?: string;
|
||||
}
|
||||
|
||||
interface ConnectionPoolOptions extends BaseConnectionPoolOptions {
|
||||
|
||||
10
package.json
10
package.json
@ -6,14 +6,13 @@
|
||||
"exports": {
|
||||
".": {
|
||||
"require": "./index.js",
|
||||
"import": "./index.mjs",
|
||||
"types": "./index.d.ts"
|
||||
"import": "./index.mjs"
|
||||
},
|
||||
"./": "./"
|
||||
},
|
||||
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
"version": "7.16.0",
|
||||
"versionCanary": "7.16.0-canary.4",
|
||||
"version": "7.14.0",
|
||||
"versionCanary": "7.14.0-canary.7",
|
||||
"keywords": [
|
||||
"elasticsearch",
|
||||
"elastic",
|
||||
@ -101,7 +100,6 @@
|
||||
"jsx": false,
|
||||
"flow": false,
|
||||
"coverage": false,
|
||||
"jobs-auto": true,
|
||||
"check-coverage": false
|
||||
"jobs-auto": true
|
||||
}
|
||||
}
|
||||
|
||||
@ -20,7 +20,7 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { Client, errors } = require('../../')
|
||||
const { Client } = require('../../')
|
||||
const {
|
||||
connection: {
|
||||
MockConnectionTimeout,
|
||||
@ -470,7 +470,7 @@ test('Errors v6', t => {
|
||||
})
|
||||
|
||||
test('Auth error - 401', t => {
|
||||
t.plan(9)
|
||||
t.plan(8)
|
||||
const MockConnection = buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
@ -487,7 +487,6 @@ test('Auth error - 401', t => {
|
||||
|
||||
process.on('warning', onWarning)
|
||||
function onWarning (warning) {
|
||||
t.equal(warning.name, 'ProductNotSupportedSecurityError')
|
||||
t.equal(warning.message, 'The client is unable to verify that the server is Elasticsearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.')
|
||||
}
|
||||
|
||||
@ -525,7 +524,7 @@ test('Auth error - 401', t => {
|
||||
})
|
||||
|
||||
test('Auth error - 403', t => {
|
||||
t.plan(9)
|
||||
t.plan(8)
|
||||
const MockConnection = buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
@ -542,7 +541,6 @@ test('Auth error - 403', t => {
|
||||
|
||||
process.on('warning', onWarning)
|
||||
function onWarning (warning) {
|
||||
t.equal(warning.name, 'ProductNotSupportedSecurityError')
|
||||
t.equal(warning.message, 'The client is unable to verify that the server is Elasticsearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.')
|
||||
}
|
||||
|
||||
@ -651,7 +649,7 @@ test('500 error', t => {
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
t.equal(err.message, '{"error":"kaboom"}')
|
||||
t.equal(err.message, 'Response Error')
|
||||
|
||||
client.search({
|
||||
index: 'foo',
|
||||
@ -1247,102 +1245,3 @@ test('No multiple checks with child clients', t => {
|
||||
})
|
||||
}, 100)
|
||||
})
|
||||
|
||||
test('Observability events should have all the expected properties', t => {
|
||||
t.plan(5)
|
||||
const MockConnection = buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: {
|
||||
name: '1ef419078577',
|
||||
cluster_name: 'docker-cluster',
|
||||
cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA',
|
||||
tagline: 'You Know, for Search'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on('request', (e, event) => {
|
||||
t.ok(event.meta.request.params)
|
||||
t.ok(event.meta.request.options)
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'foo',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
t.equal(err.message, 'The client noticed that the server is not Elasticsearch and we do not support this unknown product.')
|
||||
})
|
||||
})
|
||||
|
||||
test('Abort a request while running the product check', t => {
|
||||
t.plan(4)
|
||||
const MockConnection = buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
headers: {
|
||||
'x-elastic-product': 'Elasticsearch'
|
||||
},
|
||||
body: {
|
||||
name: '1ef419078577',
|
||||
cluster_name: 'docker-cluster',
|
||||
cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA',
|
||||
version: {
|
||||
number: '8.0.0-SNAPSHOT',
|
||||
build_flavor: 'default',
|
||||
build_type: 'docker',
|
||||
build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3',
|
||||
build_date: '2021-07-10T01:45:02.136546168Z',
|
||||
build_snapshot: true,
|
||||
lucene_version: '8.9.0',
|
||||
minimum_wire_compatibility_version: '7.15.0',
|
||||
minimum_index_compatibility_version: '7.0.0'
|
||||
},
|
||||
tagline: 'You Know, for Search'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on('request', (err, event) => {
|
||||
if (event.meta.request.params.path.includes('search')) {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
}
|
||||
})
|
||||
|
||||
// the response event won't be executed for the search
|
||||
client.on('response', (err, event) => {
|
||||
t.error(err)
|
||||
t.equal(event.meta.request.params.path, '/')
|
||||
})
|
||||
|
||||
const req = client.search({
|
||||
index: 'foo',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
}, (err, result) => {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
})
|
||||
|
||||
setImmediate(() => req.abort())
|
||||
})
|
||||
|
||||
@ -77,7 +77,6 @@ test('Should update the connection pool', t => {
|
||||
t.same(hosts[i], {
|
||||
url: new URL(nodes[id].url),
|
||||
id: id,
|
||||
caFingerprint: null,
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
"name": "parcel-test",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node index.js",
|
||||
"build": "parcel build index.js --no-source-maps"
|
||||
|
||||
@ -1,71 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { Client, errors } = require('../../')
|
||||
const Mock = require('@elastic/elasticsearch-mock')
|
||||
|
||||
test('Mock should work', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const mock = new Mock()
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: mock.getConnection()
|
||||
})
|
||||
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/_cat/indices'
|
||||
}, () => {
|
||||
return { status: 'ok' }
|
||||
})
|
||||
|
||||
const response = await client.cat.indices()
|
||||
t.same(response.body, { status: 'ok' })
|
||||
})
|
||||
|
||||
test('Return an error', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const mock = new Mock()
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: mock.getConnection()
|
||||
})
|
||||
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/_cat/indices'
|
||||
}, () => {
|
||||
return new errors.ResponseError({
|
||||
body: { errors: {}, status: 500 },
|
||||
statusCode: 500
|
||||
})
|
||||
})
|
||||
|
||||
try {
|
||||
await client.cat.indices()
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
})
|
||||
@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "mock",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "standard && tap index.js"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@elastic/elasticsearch": "file:../..",
|
||||
"@elastic/elasticsearch-mock": "^0.3.1",
|
||||
"standard": "^16.0.3",
|
||||
"tap": "^15.0.9"
|
||||
}
|
||||
}
|
||||
@ -25,9 +25,7 @@ const buffer = require('buffer')
|
||||
const intoStream = require('into-stream')
|
||||
const { ConnectionPool, Transport, Connection, errors } = require('../../index')
|
||||
const { CloudConnectionPool } = require('../../lib/pool')
|
||||
const { Client, buildServer, connection } = require('../utils')
|
||||
const { buildMockConnection } = connection
|
||||
|
||||
const { Client, buildServer } = require('../utils')
|
||||
let clientVersion = require('../../package.json').version
|
||||
if (clientVersion.includes('-')) {
|
||||
clientVersion = clientVersion.slice(0, clientVersion.indexOf('-')) + 'p'
|
||||
@ -1308,223 +1306,6 @@ test('Content length too big (string)', t => {
|
||||
})
|
||||
})
|
||||
|
||||
test('Content length too big custom (buffer)', t => {
|
||||
t.plan(4)
|
||||
|
||||
class MockConnection extends Connection {
|
||||
request (params, callback) {
|
||||
const stream = intoStream(JSON.stringify({ hello: 'world' }))
|
||||
stream.statusCode = 200
|
||||
stream.headers = {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-encoding': 'gzip',
|
||||
'content-length': 1100,
|
||||
connection: 'keep-alive',
|
||||
date: new Date().toISOString()
|
||||
}
|
||||
stream.on('close', () => t.pass('Stream destroyed'))
|
||||
process.nextTick(callback, null, stream)
|
||||
return { abort () {} }
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
maxCompressedResponseSize: 1000
|
||||
})
|
||||
client.info((err, result) => {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed buffer (1000)')
|
||||
t.equal(result.meta.attempts, 0)
|
||||
})
|
||||
})
|
||||
|
||||
test('Content length too big custom (string)', t => {
|
||||
t.plan(4)
|
||||
|
||||
class MockConnection extends Connection {
|
||||
request (params, callback) {
|
||||
const stream = intoStream(JSON.stringify({ hello: 'world' }))
|
||||
stream.statusCode = 200
|
||||
stream.headers = {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-length': 1100,
|
||||
connection: 'keep-alive',
|
||||
date: new Date().toISOString()
|
||||
}
|
||||
stream.on('close', () => t.pass('Stream destroyed'))
|
||||
process.nextTick(callback, null, stream)
|
||||
return { abort () {} }
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
maxResponseSize: 1000
|
||||
})
|
||||
client.info((err, result) => {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed string (1000)')
|
||||
t.equal(result.meta.attempts, 0)
|
||||
})
|
||||
})
|
||||
|
||||
test('Content length too big custom option (buffer)', t => {
|
||||
t.plan(4)
|
||||
|
||||
class MockConnection extends Connection {
|
||||
request (params, callback) {
|
||||
const stream = intoStream(JSON.stringify({ hello: 'world' }))
|
||||
stream.statusCode = 200
|
||||
stream.headers = {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-encoding': 'gzip',
|
||||
'content-length': 1100,
|
||||
connection: 'keep-alive',
|
||||
date: new Date().toISOString()
|
||||
}
|
||||
stream.on('close', () => t.pass('Stream destroyed'))
|
||||
process.nextTick(callback, null, stream)
|
||||
return { abort () {} }
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
client.info({}, { maxCompressedResponseSize: 1000 }, (err, result) => {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed buffer (1000)')
|
||||
t.equal(result.meta.attempts, 0)
|
||||
})
|
||||
})
|
||||
|
||||
test('Content length too big custom option (string)', t => {
|
||||
t.plan(4)
|
||||
|
||||
class MockConnection extends Connection {
|
||||
request (params, callback) {
|
||||
const stream = intoStream(JSON.stringify({ hello: 'world' }))
|
||||
stream.statusCode = 200
|
||||
stream.headers = {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-length': 1100,
|
||||
connection: 'keep-alive',
|
||||
date: new Date().toISOString()
|
||||
}
|
||||
stream.on('close', () => t.pass('Stream destroyed'))
|
||||
process.nextTick(callback, null, stream)
|
||||
return { abort () {} }
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
client.info({}, { maxResponseSize: 1000 }, (err, result) => {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed string (1000)')
|
||||
t.equal(result.meta.attempts, 0)
|
||||
})
|
||||
})
|
||||
|
||||
test('Content length too big custom option override (buffer)', t => {
|
||||
t.plan(4)
|
||||
|
||||
class MockConnection extends Connection {
|
||||
request (params, callback) {
|
||||
const stream = intoStream(JSON.stringify({ hello: 'world' }))
|
||||
stream.statusCode = 200
|
||||
stream.headers = {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-encoding': 'gzip',
|
||||
'content-length': 1100,
|
||||
connection: 'keep-alive',
|
||||
date: new Date().toISOString()
|
||||
}
|
||||
stream.on('close', () => t.pass('Stream destroyed'))
|
||||
process.nextTick(callback, null, stream)
|
||||
return { abort () {} }
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
maxCompressedResponseSize: 2000
|
||||
})
|
||||
client.info({}, { maxCompressedResponseSize: 1000 }, (err, result) => {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed buffer (1000)')
|
||||
t.equal(result.meta.attempts, 0)
|
||||
})
|
||||
})
|
||||
|
||||
test('Content length too big custom option override (string)', t => {
|
||||
t.plan(4)
|
||||
|
||||
class MockConnection extends Connection {
|
||||
request (params, callback) {
|
||||
const stream = intoStream(JSON.stringify({ hello: 'world' }))
|
||||
stream.statusCode = 200
|
||||
stream.headers = {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-length': 1100,
|
||||
connection: 'keep-alive',
|
||||
date: new Date().toISOString()
|
||||
}
|
||||
stream.on('close', () => t.pass('Stream destroyed'))
|
||||
process.nextTick(callback, null, stream)
|
||||
return { abort () {} }
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
maxResponseSize: 2000
|
||||
})
|
||||
client.info({}, { maxResponseSize: 1000 }, (err, result) => {
|
||||
t.ok(err instanceof errors.RequestAbortedError)
|
||||
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed string (1000)')
|
||||
t.equal(result.meta.attempts, 0)
|
||||
})
|
||||
})
|
||||
|
||||
test('maxResponseSize cannot be bigger than buffer.constants.MAX_STRING_LENGTH', t => {
|
||||
t.plan(2)
|
||||
|
||||
try {
|
||||
new Client({ // eslint-disable-line
|
||||
node: 'http://localhost:9200',
|
||||
maxResponseSize: buffer.constants.MAX_STRING_LENGTH + 10
|
||||
})
|
||||
t.fail('should throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, `The maxResponseSize cannot be bigger than ${buffer.constants.MAX_STRING_LENGTH}`)
|
||||
}
|
||||
})
|
||||
|
||||
test('maxCompressedResponseSize cannot be bigger than buffer.constants.MAX_STRING_LENGTH', t => {
|
||||
t.plan(2)
|
||||
|
||||
try {
|
||||
new Client({ // eslint-disable-line
|
||||
node: 'http://localhost:9200',
|
||||
maxCompressedResponseSize: buffer.constants.MAX_LENGTH + 10
|
||||
})
|
||||
t.fail('should throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, `The maxCompressedResponseSize cannot be bigger than ${buffer.constants.MAX_LENGTH}`)
|
||||
}
|
||||
})
|
||||
|
||||
test('Meta header enabled', t => {
|
||||
t.plan(2)
|
||||
|
||||
@ -1717,146 +1498,3 @@ test('Bearer auth', t => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Check server fingerprint (success)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.end('ok')
|
||||
}
|
||||
|
||||
buildServer(handler, { secure: true }, ({ port, caFingerprint }, server) => {
|
||||
const client = new Client({
|
||||
node: `https://localhost:${port}`,
|
||||
caFingerprint
|
||||
})
|
||||
|
||||
client.info((err, res) => {
|
||||
t.error(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Check server fingerprint (failure)', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.end('ok')
|
||||
}
|
||||
|
||||
buildServer(handler, { secure: true }, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `https://localhost:${port}`,
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
|
||||
client.info((err, res) => {
|
||||
t.ok(err instanceof errors.ConnectionError)
|
||||
t.equal(err.message, 'Server certificate CA fingerprint does not match the value configured in caFingerprint')
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('caFingerprint can\'t be configured over http / 1', t => {
|
||||
t.plan(2)
|
||||
|
||||
try {
|
||||
new Client({ // eslint-disable-line
|
||||
node: 'http://localhost:9200',
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
t.fail('shuld throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'You can\'t configure the caFingerprint with a http connection')
|
||||
}
|
||||
})
|
||||
|
||||
test('caFingerprint can\'t be configured over http / 2', t => {
|
||||
t.plan(2)
|
||||
|
||||
try {
|
||||
new Client({ // eslint-disable-line
|
||||
nodes: ['http://localhost:9200'],
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
t.fail('should throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'You can\'t configure the caFingerprint with a http connection')
|
||||
}
|
||||
})
|
||||
|
||||
test('caFingerprint can\'t be configured over http / 3', t => {
|
||||
t.plan(1)
|
||||
|
||||
try {
|
||||
new Client({ // eslint-disable-line
|
||||
nodes: ['https://localhost:9200'],
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
t.pass('should not throw')
|
||||
} catch (err) {
|
||||
t.fail('shuld not throw')
|
||||
}
|
||||
})
|
||||
|
||||
test('caFingerprint can\'t be configured over http / 4', t => {
|
||||
t.plan(2)
|
||||
|
||||
try {
|
||||
new Client({ // eslint-disable-line
|
||||
node: { url: new URL('http://localhost:9200') },
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
t.fail('shuld throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'You can\'t configure the caFingerprint with a http connection')
|
||||
}
|
||||
})
|
||||
|
||||
test('caFingerprint can\'t be configured over http / 5', t => {
|
||||
t.plan(2)
|
||||
|
||||
try {
|
||||
new Client({ // eslint-disable-line
|
||||
nodes: [{ url: new URL('http://localhost:9200') }],
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
t.fail('should throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'You can\'t configure the caFingerprint with a http connection')
|
||||
}
|
||||
})
|
||||
|
||||
test('Error body that is not a json', t => {
|
||||
t.plan(5)
|
||||
|
||||
const MockConnection = buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: '<html><body>error!</body></html>',
|
||||
headers: { 'content-type': 'text/html' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
maxRetries: 1
|
||||
})
|
||||
|
||||
client.info((err, result) => {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.equal(err.name, 'ResponseError')
|
||||
t.equal(err.body, '<html><body>error!</body></html>')
|
||||
t.equal(err.message, '<html><body>error!</body></html>')
|
||||
t.equal(err.statusCode, 400)
|
||||
})
|
||||
})
|
||||
|
||||
@ -28,8 +28,7 @@ const hpagent = require('hpagent')
|
||||
const intoStream = require('into-stream')
|
||||
const { buildServer } = require('../utils')
|
||||
const Connection = require('../../lib/Connection')
|
||||
const { TimeoutError, ConfigurationError, RequestAbortedError, ConnectionError } = require('../../lib/errors')
|
||||
const { getIssuerCertificate } = Connection.internals
|
||||
const { TimeoutError, ConfigurationError, RequestAbortedError } = require('../../lib/errors')
|
||||
|
||||
test('Basic (http)', t => {
|
||||
t.plan(4)
|
||||
@ -948,161 +947,3 @@ test('Abort with a slow body', t => {
|
||||
|
||||
setImmediate(() => request.abort())
|
||||
})
|
||||
|
||||
test('Check server fingerprint (success)', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.end('ok')
|
||||
}
|
||||
|
||||
buildServer(handler, { secure: true }, ({ port, caFingerprint }, server) => {
|
||||
const connection = new Connection({
|
||||
url: new URL(`https://localhost:${port}`),
|
||||
caFingerprint
|
||||
})
|
||||
|
||||
connection.request({
|
||||
path: '/hello',
|
||||
method: 'GET'
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
|
||||
let payload = ''
|
||||
res.setEncoding('utf8')
|
||||
res.on('data', chunk => { payload += chunk })
|
||||
res.on('error', err => t.fail(err))
|
||||
res.on('end', () => {
|
||||
t.equal(payload, 'ok')
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Check server fingerprint (failure)', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.end('ok')
|
||||
}
|
||||
|
||||
buildServer(handler, { secure: true }, ({ port }, server) => {
|
||||
const connection = new Connection({
|
||||
url: new URL(`https://localhost:${port}`),
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
|
||||
connection.request({
|
||||
path: '/hello',
|
||||
method: 'GET'
|
||||
}, (err, res) => {
|
||||
t.ok(err instanceof ConnectionError)
|
||||
t.equal(err.message, 'Server certificate CA fingerprint does not match the value configured in caFingerprint')
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('getIssuerCertificate returns the root CA', t => {
|
||||
t.plan(2)
|
||||
const issuerCertificate = {
|
||||
fingerprint256: 'BA:ZF:AZ',
|
||||
subject: {
|
||||
C: '1',
|
||||
ST: '1',
|
||||
L: '1',
|
||||
O: '1',
|
||||
OU: '1',
|
||||
CN: '1'
|
||||
},
|
||||
issuer: {
|
||||
C: '1',
|
||||
ST: '1',
|
||||
L: '1',
|
||||
O: '1',
|
||||
OU: '1',
|
||||
CN: '1'
|
||||
}
|
||||
}
|
||||
issuerCertificate.issuerCertificate = issuerCertificate
|
||||
|
||||
const socket = {
|
||||
getPeerCertificate (bool) {
|
||||
t.ok(bool)
|
||||
return {
|
||||
fingerprint256: 'FO:OB:AR',
|
||||
subject: {
|
||||
C: '1',
|
||||
ST: '1',
|
||||
L: '1',
|
||||
O: '1',
|
||||
OU: '1',
|
||||
CN: '1'
|
||||
},
|
||||
issuer: {
|
||||
C: '2',
|
||||
ST: '2',
|
||||
L: '2',
|
||||
O: '2',
|
||||
OU: '2',
|
||||
CN: '2'
|
||||
},
|
||||
issuerCertificate
|
||||
}
|
||||
}
|
||||
}
|
||||
t.same(getIssuerCertificate(socket), issuerCertificate)
|
||||
})
|
||||
|
||||
test('getIssuerCertificate detects invalid/malformed certificates', t => {
|
||||
t.plan(2)
|
||||
const socket = {
|
||||
getPeerCertificate (bool) {
|
||||
t.ok(bool)
|
||||
return {
|
||||
fingerprint256: 'FO:OB:AR',
|
||||
subject: {
|
||||
C: '1',
|
||||
ST: '1',
|
||||
L: '1',
|
||||
O: '1',
|
||||
OU: '1',
|
||||
CN: '1'
|
||||
},
|
||||
issuer: {
|
||||
C: '2',
|
||||
ST: '2',
|
||||
L: '2',
|
||||
O: '2',
|
||||
OU: '2',
|
||||
CN: '2'
|
||||
}
|
||||
// missing issuerCertificate
|
||||
}
|
||||
}
|
||||
}
|
||||
t.equal(getIssuerCertificate(socket), null)
|
||||
})
|
||||
|
||||
test('Should show local/remote socket address in case of ECONNRESET', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.destroy()
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const connection = new Connection({
|
||||
url: new URL(`http://localhost:${port}`)
|
||||
})
|
||||
connection.request({
|
||||
path: '/hello',
|
||||
method: 'GET'
|
||||
}, (err, res) => {
|
||||
t.ok(err instanceof ConnectionError)
|
||||
t.match(err.message, /socket\shang\sup\s-\sLocal:\s127.0.0.1:\d+,\sRemote:\s127.0.0.1:\d+/)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -197,29 +197,3 @@ test('ResponseError with meaningful message / 3', t => {
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError with meaningful message when body is not json', t => {
|
||||
const meta = {
|
||||
statusCode: 400,
|
||||
body: '<html><body>error!</body></html>',
|
||||
headers: { 'content-type': 'text/html' }
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.equal(err.name, 'ResponseError')
|
||||
t.equal(err.message, '<html><body>error!</body></html>')
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError with meaningful message when body is falsy', t => {
|
||||
const meta = {
|
||||
statusCode: 400,
|
||||
body: '',
|
||||
headers: { 'content-type': 'text/plain' }
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.equal(err.name, 'ResponseError')
|
||||
t.equal(err.message, 'Response Error')
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
|
||||
@ -2689,76 +2689,3 @@ test('The callback with a sync error should be called in the next tick - ndjson'
|
||||
t.type(transportReturn.catch, 'function')
|
||||
t.type(transportReturn.abort, 'function')
|
||||
})
|
||||
|
||||
test('Support mapbox vector tile', t => {
|
||||
t.plan(2)
|
||||
const mvtContent = 'GoMCCgRtZXRhEikSFAAAAQACAQMBBAAFAgYDBwAIBAkAGAMiDwkAgEAagEAAAP8//z8ADxoOX3NoYXJkcy5mYWlsZWQaD19zaGFyZHMuc2tpcHBlZBoSX3NoYXJkcy5zdWNjZXNzZnVsGg1fc2hhcmRzLnRvdGFsGhlhZ2dyZWdhdGlvbnMuX2NvdW50LmNvdW50GhdhZ2dyZWdhdGlvbnMuX2NvdW50LnN1bRoTaGl0cy50b3RhbC5yZWxhdGlvbhoQaGl0cy50b3RhbC52YWx1ZRoJdGltZWRfb3V0GgR0b29rIgIwACICMAIiCRkAAAAAAAAAACIECgJlcSICOAAogCB4Ag=='
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/vnd.mapbox-vector-tile')
|
||||
res.end(Buffer.from(mvtContent, 'base64'))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection(`http://localhost:${port}`)
|
||||
|
||||
const transport = new Transport({
|
||||
emit: () => {},
|
||||
connectionPool: pool,
|
||||
serializer: new Serializer(),
|
||||
maxRetries: 3,
|
||||
requestTimeout: 30000,
|
||||
sniffInterval: false,
|
||||
sniffOnStart: false
|
||||
})
|
||||
skipProductCheck(transport)
|
||||
|
||||
transport.request({
|
||||
method: 'GET',
|
||||
path: '/hello'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.same(body.toString('base64'), Buffer.from(mvtContent, 'base64').toString('base64'))
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Compressed mapbox vector tile', t => {
|
||||
t.plan(2)
|
||||
const mvtContent = 'GoMCCgRtZXRhEikSFAAAAQACAQMBBAAFAgYDBwAIBAkAGAMiDwkAgEAagEAAAP8//z8ADxoOX3NoYXJkcy5mYWlsZWQaD19zaGFyZHMuc2tpcHBlZBoSX3NoYXJkcy5zdWNjZXNzZnVsGg1fc2hhcmRzLnRvdGFsGhlhZ2dyZWdhdGlvbnMuX2NvdW50LmNvdW50GhdhZ2dyZWdhdGlvbnMuX2NvdW50LnN1bRoTaGl0cy50b3RhbC5yZWxhdGlvbhoQaGl0cy50b3RhbC52YWx1ZRoJdGltZWRfb3V0GgR0b29rIgIwACICMAIiCRkAAAAAAAAAACIECgJlcSICOAAogCB4Ag=='
|
||||
|
||||
function handler (req, res) {
|
||||
const body = gzipSync(Buffer.from(mvtContent, 'base64'))
|
||||
res.setHeader('Content-Type', 'application/vnd.mapbox-vector-tile')
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
res.setHeader('Content-Length', Buffer.byteLength(body))
|
||||
res.end(body)
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection(`http://localhost:${port}`)
|
||||
|
||||
const transport = new Transport({
|
||||
emit: () => {},
|
||||
connectionPool: pool,
|
||||
serializer: new Serializer(),
|
||||
maxRetries: 3,
|
||||
requestTimeout: 30000,
|
||||
sniffInterval: false,
|
||||
sniffOnStart: false
|
||||
})
|
||||
skipProductCheck(transport)
|
||||
|
||||
transport.request({
|
||||
method: 'GET',
|
||||
path: '/hello'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.same(body.toString('base64'), Buffer.from(mvtContent, 'base64').toString('base64'))
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -19,7 +19,6 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
const crypto = require('crypto')
|
||||
const debug = require('debug')('elasticsearch-test')
|
||||
const stoppable = require('stoppable')
|
||||
|
||||
@ -36,13 +35,6 @@ const secureOpts = {
|
||||
cert: readFileSync(join(__dirname, '..', 'fixtures', 'https.cert'), 'utf8')
|
||||
}
|
||||
|
||||
const caFingerprint = getFingerprint(secureOpts.cert
|
||||
.split('\n')
|
||||
.slice(1, -1)
|
||||
.map(line => line.trim())
|
||||
.join('')
|
||||
)
|
||||
|
||||
let id = 0
|
||||
function buildServer (handler, opts, cb) {
|
||||
const serverId = id++
|
||||
@ -66,7 +58,7 @@ function buildServer (handler, opts, cb) {
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port
|
||||
debug(`Server '${serverId}' booted on port ${port}`)
|
||||
resolve([Object.assign({}, secureOpts, { port, caFingerprint }), server])
|
||||
resolve([Object.assign({}, secureOpts, { port }), server])
|
||||
})
|
||||
})
|
||||
} else {
|
||||
@ -78,11 +70,4 @@ function buildServer (handler, opts, cb) {
|
||||
}
|
||||
}
|
||||
|
||||
function getFingerprint (content, inputEncoding = 'base64', outputEncoding = 'hex') {
|
||||
const shasum = crypto.createHash('sha256')
|
||||
shasum.update(content, inputEncoding)
|
||||
const res = shasum.digest(outputEncoding)
|
||||
return res.toUpperCase().match(/.{1,2}/g).join(':')
|
||||
}
|
||||
|
||||
module.exports = buildServer
|
||||
|
||||
Reference in New Issue
Block a user