Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 4def742e47 | |||
| 1b7402d06c | |||
| 3d728bcad7 | |||
| 2d51ef429f | |||
| 39789031c3 | |||
| dfb09b4827 | |||
| 11c11e4568 | |||
| 27a4e908c9 | |||
| c20109ec78 | |||
| d6270b17c4 |
@ -1,6 +1,6 @@
|
||||
---
|
||||
STACK_VERSION:
|
||||
- 7.x-SNAPSHOT
|
||||
- 7.10.0-SNAPSHOT
|
||||
|
||||
NODE_JS_VERSION:
|
||||
- 14
|
||||
|
||||
4
.github/workflows/nodejs.yml
vendored
4
.github/workflows/nodejs.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [10.x, 12.x, 14.x]
|
||||
node-version: [10.x, 12.x, 14.x, 15.x]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
@ -86,7 +86,7 @@ jobs:
|
||||
- name: Runs Elasticsearch
|
||||
uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
with:
|
||||
stack-version: 7.x-SNAPSHOT
|
||||
stack-version: 7.10.0-SNAPSHOT
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
|
||||
28
README.md
28
README.md
@ -26,9 +26,33 @@ The official Node.js client for Elasticsearch.
|
||||
npm install @elastic/elasticsearch
|
||||
```
|
||||
|
||||
### Compatibility
|
||||
### Node.js support
|
||||
|
||||
The minimum supported version of Node.js is `v8`.
|
||||
NOTE: The minimum supported version of Node.js is `v8`.
|
||||
|
||||
The client versioning follows the Elastc Stack versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
often does not coincide with the [Node.js release](https://nodejs.org/en/about/releases/) times.
|
||||
|
||||
To avoid support insecure and unsupported versions of Node.js, the
|
||||
client **will drop the support of EOL versions of Node.js between minor releases**.
|
||||
Typically, as soon as a Node.js version goes into EOL, the client will continue
|
||||
to support that version for at least another minor release. If you are using the client
|
||||
with a version of Node.js that will be unsupported soon, you will see a warning
|
||||
in your logs (the client will start logging the warning with two minors in advance).
|
||||
|
||||
Unless you are **always** using a supported version of Node.js,
|
||||
we recommend defining the client dependency in your
|
||||
`package.json` with the `~` instead of `^`. In this way, you will lock the
|
||||
dependency on the minor release and not the major. (for example, `~7.10.0` instead
|
||||
of `^7.10.0`).
|
||||
|
||||
| Node.js Version | Node.js EOL date | End of support |
|
||||
| --------------- |------------------| ---------------------- |
|
||||
| `8.x` | `December 2019` | `7.11` (early 2021) |
|
||||
| `10.x` | `Apri 2021` | `7.12` (mid 2021) |
|
||||
|
||||
### Compatibility
|
||||
|
||||
The library is compatible with all Elasticsearch versions since 5.x, and you should use the same major version of the Elasticsearch instance that you are using.
|
||||
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['pretty', 'human', 'error_trace', 'source', 'filter_path', 'wait_for_completion_timeout', 'keep_alive', 'typed_keys', 'keep_on_completion', 'batched_reduce_size', 'request_cache', 'analyzer', 'analyze_wildcard', 'default_operator', 'df', 'explain', 'stored_fields', 'docvalue_fields', 'from', 'ignore_unavailable', 'ignore_throttled', 'allow_no_indices', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'search_type', 'size', 'sort', '_source', '_source_excludes', '_source_includes', 'terminate_after', 'stats', 'suggest_field', 'suggest_mode', 'suggest_size', 'suggest_text', 'timeout', 'track_scores', 'track_total_hits', 'allow_partial_search_results', 'version', 'seq_no_primary_term', 'max_concurrent_shard_requests']
|
||||
const snakeCase = { errorTrace: 'error_trace', filterPath: 'filter_path', waitForCompletionTimeout: 'wait_for_completion_timeout', keepAlive: 'keep_alive', typedKeys: 'typed_keys', keepOnCompletion: 'keep_on_completion', batchedReduceSize: 'batched_reduce_size', requestCache: 'request_cache', analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', storedFields: 'stored_fields', docvalueFields: 'docvalue_fields', ignoreUnavailable: 'ignore_unavailable', ignoreThrottled: 'ignore_throttled', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', terminateAfter: 'terminate_after', suggestField: 'suggest_field', suggestMode: 'suggest_mode', suggestSize: 'suggest_size', suggestText: 'suggest_text', trackScores: 'track_scores', trackTotalHits: 'track_total_hits', allowPartialSearchResults: 'allow_partial_search_results', seqNoPrimaryTerm: 'seq_no_primary_term', maxConcurrentShardRequests: 'max_concurrent_shard_requests' }
|
||||
const acceptedQuerystring = ['pretty', 'human', 'error_trace', 'source', 'filter_path', 'wait_for_completion_timeout', 'keep_alive', 'typed_keys', 'keep_on_completion', 'batched_reduce_size', 'request_cache', 'analyzer', 'analyze_wildcard', 'default_operator', 'df', 'explain', 'stored_fields', 'docvalue_fields', 'from', 'ignore_unavailable', 'ignore_throttled', 'allow_no_indices', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'search_type', 'size', 'sort', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'terminate_after', 'stats', 'suggest_field', 'suggest_mode', 'suggest_size', 'suggest_text', 'timeout', 'track_scores', 'track_total_hits', 'allow_partial_search_results', 'version', 'seq_no_primary_term', 'max_concurrent_shard_requests']
|
||||
const snakeCase = { errorTrace: 'error_trace', filterPath: 'filter_path', waitForCompletionTimeout: 'wait_for_completion_timeout', keepAlive: 'keep_alive', typedKeys: 'typed_keys', keepOnCompletion: 'keep_on_completion', batchedReduceSize: 'batched_reduce_size', requestCache: 'request_cache', analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', storedFields: 'stored_fields', docvalueFields: 'docvalue_fields', ignoreUnavailable: 'ignore_unavailable', ignoreThrottled: 'ignore_throttled', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', terminateAfter: 'terminate_after', suggestField: 'suggest_field', suggestMode: 'suggest_mode', suggestSize: 'suggest_size', suggestText: 'suggest_text', trackScores: 'track_scores', trackTotalHits: 'track_total_hits', allowPartialSearchResults: 'allow_partial_search_results', seqNoPrimaryTerm: 'seq_no_primary_term', maxConcurrentShardRequests: 'max_concurrent_shard_requests' }
|
||||
|
||||
function AsyncSearchApi (transport, ConfigurationError) {
|
||||
this.transport = transport
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['wait_for_active_shards', 'refresh', 'routing', 'timeout', 'type', '_source', '_source_excludes', '_source_includes', 'pipeline', 'require_alias', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { waitForActiveShards: 'wait_for_active_shards', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', requireAlias: 'require_alias', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['wait_for_active_shards', 'refresh', 'routing', 'timeout', 'type', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'pipeline', 'require_alias', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { waitForActiveShards: 'wait_for_active_shards', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', requireAlias: 'require_alias', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function bulkApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'default_operator', 'df', 'from', 'ignore_unavailable', 'allow_no_indices', 'conflicts', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'scroll', 'search_type', 'search_timeout', 'size', 'max_docs', 'sort', '_source', '_source_excludes', '_source_includes', 'terminate_after', 'stats', 'version', 'request_cache', 'refresh', 'timeout', 'wait_for_active_shards', 'scroll_size', 'wait_for_completion', 'requests_per_second', 'slices', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', searchTimeout: 'search_timeout', maxDocs: 'max_docs', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', terminateAfter: 'terminate_after', requestCache: 'request_cache', waitForActiveShards: 'wait_for_active_shards', scrollSize: 'scroll_size', waitForCompletion: 'wait_for_completion', requestsPerSecond: 'requests_per_second', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'default_operator', 'df', 'from', 'ignore_unavailable', 'allow_no_indices', 'conflicts', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'scroll', 'search_type', 'search_timeout', 'size', 'max_docs', 'sort', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'terminate_after', 'stats', 'version', 'request_cache', 'refresh', 'timeout', 'wait_for_active_shards', 'scroll_size', 'wait_for_completion', 'requests_per_second', 'slices', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', searchTimeout: 'search_timeout', maxDocs: 'max_docs', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', terminateAfter: 'terminate_after', requestCache: 'request_cache', waitForActiveShards: 'wait_for_active_shards', scrollSize: 'scroll_size', waitForCompletion: 'wait_for_completion', requestsPerSecond: 'requests_per_second', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function deleteByQueryApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['stored_fields', 'preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_includes', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['stored_fields', 'preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function existsApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_includes', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function existsSourceApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['analyze_wildcard', 'analyzer', 'default_operator', 'df', 'stored_fields', 'lenient', 'preference', 'q', 'routing', '_source', '_source_excludes', '_source_includes', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['analyze_wildcard', 'analyzer', 'default_operator', 'df', 'stored_fields', 'lenient', 'preference', 'q', 'routing', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function explainApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['stored_fields', 'preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_includes', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['stored_fields', 'preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function getApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_includes', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'version', 'version_type', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', versionType: 'version_type', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function getSourceApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['stored_fields', 'preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_includes', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['stored_fields', 'preference', 'realtime', 'refresh', 'routing', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { storedFields: 'stored_fields', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function mgetApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'ccs_minimize_roundtrips', 'default_operator', 'df', 'explain', 'stored_fields', 'docvalue_fields', 'from', 'ignore_unavailable', 'ignore_throttled', 'allow_no_indices', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'scroll', 'search_type', 'size', 'sort', '_source', '_source_excludes', '_source_includes', 'terminate_after', 'stats', 'suggest_field', 'suggest_mode', 'suggest_size', 'suggest_text', 'timeout', 'track_scores', 'track_total_hits', 'allow_partial_search_results', 'typed_keys', 'version', 'seq_no_primary_term', 'request_cache', 'batched_reduce_size', 'max_concurrent_shard_requests', 'pre_filter_shard_size', 'rest_total_hits_as_int', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', ccsMinimizeRoundtrips: 'ccs_minimize_roundtrips', defaultOperator: 'default_operator', storedFields: 'stored_fields', docvalueFields: 'docvalue_fields', ignoreUnavailable: 'ignore_unavailable', ignoreThrottled: 'ignore_throttled', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', terminateAfter: 'terminate_after', suggestField: 'suggest_field', suggestMode: 'suggest_mode', suggestSize: 'suggest_size', suggestText: 'suggest_text', trackScores: 'track_scores', trackTotalHits: 'track_total_hits', allowPartialSearchResults: 'allow_partial_search_results', typedKeys: 'typed_keys', seqNoPrimaryTerm: 'seq_no_primary_term', requestCache: 'request_cache', batchedReduceSize: 'batched_reduce_size', maxConcurrentShardRequests: 'max_concurrent_shard_requests', preFilterShardSize: 'pre_filter_shard_size', restTotalHitsAsInt: 'rest_total_hits_as_int', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'ccs_minimize_roundtrips', 'default_operator', 'df', 'explain', 'stored_fields', 'docvalue_fields', 'from', 'ignore_unavailable', 'ignore_throttled', 'allow_no_indices', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'scroll', 'search_type', 'size', 'sort', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'terminate_after', 'stats', 'suggest_field', 'suggest_mode', 'suggest_size', 'suggest_text', 'timeout', 'track_scores', 'track_total_hits', 'allow_partial_search_results', 'typed_keys', 'version', 'seq_no_primary_term', 'request_cache', 'batched_reduce_size', 'max_concurrent_shard_requests', 'pre_filter_shard_size', 'rest_total_hits_as_int', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', ccsMinimizeRoundtrips: 'ccs_minimize_roundtrips', defaultOperator: 'default_operator', storedFields: 'stored_fields', docvalueFields: 'docvalue_fields', ignoreUnavailable: 'ignore_unavailable', ignoreThrottled: 'ignore_throttled', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', terminateAfter: 'terminate_after', suggestField: 'suggest_field', suggestMode: 'suggest_mode', suggestSize: 'suggest_size', suggestText: 'suggest_text', trackScores: 'track_scores', trackTotalHits: 'track_total_hits', allowPartialSearchResults: 'allow_partial_search_results', typedKeys: 'typed_keys', seqNoPrimaryTerm: 'seq_no_primary_term', requestCache: 'request_cache', batchedReduceSize: 'batched_reduce_size', maxConcurrentShardRequests: 'max_concurrent_shard_requests', preFilterShardSize: 'pre_filter_shard_size', restTotalHitsAsInt: 'rest_total_hits_as_int', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function searchApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -594,6 +594,33 @@ SecurityApi.prototype.getUserPrivileges = function securityGetUserPrivilegesApi
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
SecurityApi.prototype.grantApiKey = function securityGrantApiKeyApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
// check required parameters
|
||||
if (params['body'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
var path = ''
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_security' + '/' + 'api_key' + '/' + 'grant'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
SecurityApi.prototype.hasPrivileges = function securityHasPrivilegesApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -821,6 +848,7 @@ Object.defineProperties(SecurityApi.prototype, {
|
||||
get_token: { get () { return this.getToken } },
|
||||
get_user: { get () { return this.getUser } },
|
||||
get_user_privileges: { get () { return this.getUserPrivileges } },
|
||||
grant_api_key: { get () { return this.grantApiKey } },
|
||||
has_privileges: { get () { return this.hasPrivileges } },
|
||||
invalidate_api_key: { get () { return this.invalidateApiKey } },
|
||||
invalidate_token: { get () { return this.invalidateToken } },
|
||||
|
||||
@ -58,6 +58,54 @@ SnapshotApi.prototype.cleanupRepository = function snapshotCleanupRepositoryApi
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
SnapshotApi.prototype.clone = function snapshotCloneApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
// check required parameters
|
||||
if (params['repository'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: repository')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['snapshot'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: snapshot')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['target_snapshot'] == null && params['targetSnapshot'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: target_snapshot or targetSnapshot')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['body'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// check required url components
|
||||
if ((params['target_snapshot'] != null || params['targetSnapshot'] != null) && (params['snapshot'] == null || params['repository'] == null)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: snapshot, repository')
|
||||
return handleError(err, callback)
|
||||
} else if (params['snapshot'] != null && (params['repository'] == null)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: repository')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var { method, body, repository, snapshot, targetSnapshot, target_snapshot, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
var path = ''
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + '_snapshot' + '/' + encodeURIComponent(repository) + '/' + encodeURIComponent(snapshot) + '/' + '_clone' + '/' + encodeURIComponent(target_snapshot || targetSnapshot)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
|
||||
SnapshotApi.prototype.create = function snapshotCreateApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['wait_for_active_shards', '_source', '_source_excludes', '_source_includes', 'lang', 'refresh', 'retry_on_conflict', 'routing', 'timeout', 'if_seq_no', 'if_primary_term', 'require_alias', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { waitForActiveShards: 'wait_for_active_shards', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', retryOnConflict: 'retry_on_conflict', ifSeqNo: 'if_seq_no', ifPrimaryTerm: 'if_primary_term', requireAlias: 'require_alias', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['wait_for_active_shards', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'lang', 'refresh', 'retry_on_conflict', 'routing', 'timeout', 'if_seq_no', 'if_primary_term', 'require_alias', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { waitForActiveShards: 'wait_for_active_shards', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', retryOnConflict: 'retry_on_conflict', ifSeqNo: 'if_seq_no', ifPrimaryTerm: 'if_primary_term', requireAlias: 'require_alias', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function updateApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
@ -23,8 +23,8 @@
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'default_operator', 'df', 'from', 'ignore_unavailable', 'allow_no_indices', 'conflicts', 'expand_wildcards', 'lenient', 'pipeline', 'preference', 'q', 'routing', 'scroll', 'search_type', 'search_timeout', 'size', 'max_docs', 'sort', '_source', '_source_excludes', '_source_includes', 'terminate_after', 'stats', 'version', 'version_type', 'request_cache', 'refresh', 'timeout', 'wait_for_active_shards', 'scroll_size', 'wait_for_completion', 'requests_per_second', 'slices', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', searchTimeout: 'search_timeout', maxDocs: 'max_docs', _sourceExcludes: '_source_excludes', _sourceIncludes: '_source_includes', terminateAfter: 'terminate_after', versionType: 'version_type', requestCache: 'request_cache', waitForActiveShards: 'wait_for_active_shards', scrollSize: 'scroll_size', waitForCompletion: 'wait_for_completion', requestsPerSecond: 'requests_per_second', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'default_operator', 'df', 'from', 'ignore_unavailable', 'allow_no_indices', 'conflicts', 'expand_wildcards', 'lenient', 'pipeline', 'preference', 'q', 'routing', 'scroll', 'search_type', 'search_timeout', 'size', 'max_docs', 'sort', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'terminate_after', 'stats', 'version', 'version_type', 'request_cache', 'refresh', 'timeout', 'wait_for_active_shards', 'scroll_size', 'wait_for_completion', 'requests_per_second', 'slices', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
|
||||
const snakeCase = { analyzeWildcard: 'analyze_wildcard', defaultOperator: 'default_operator', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', searchTimeout: 'search_timeout', maxDocs: 'max_docs', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', terminateAfter: 'terminate_after', versionType: 'version_type', requestCache: 'request_cache', waitForActiveShards: 'wait_for_active_shards', scrollSize: 'scroll_size', waitForCompletion: 'wait_for_completion', requestsPerSecond: 'requests_per_second', errorTrace: 'error_trace', filterPath: 'filter_path' }
|
||||
|
||||
function updateByQueryApi (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
2
api/kibana.d.ts
vendored
2
api/kibana.d.ts
vendored
@ -397,6 +397,7 @@ interface KibanaClient {
|
||||
getToken<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetToken<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getUser<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetUser, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
getUserPrivileges<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGetUserPrivileges, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
grantApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGrantApiKey<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
hasPrivileges<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityHasPrivileges<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
invalidateApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityInvalidateApiKey<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
invalidateToken<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityInvalidateToken<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
@ -418,6 +419,7 @@ interface KibanaClient {
|
||||
}
|
||||
snapshot: {
|
||||
cleanupRepository<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotCleanupRepository, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clone<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotClone<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
create<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotCreate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
createRepository<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotCreateRepository<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
delete<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotDelete, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
|
||||
41
api/requestParams.d.ts
vendored
41
api/requestParams.d.ts
vendored
@ -41,6 +41,8 @@ export interface AsyncSearchGet extends Generic {
|
||||
|
||||
export interface AsyncSearchSubmit<T = RequestBody> extends Generic {
|
||||
index?: string | string[];
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
wait_for_completion_timeout?: string;
|
||||
keep_on_completion?: boolean;
|
||||
keep_alive?: string;
|
||||
@ -104,6 +106,8 @@ export interface AutoscalingPutAutoscalingPolicy<T = RequestBody> extends Generi
|
||||
export interface Bulk<T = RequestNDBody> extends Generic {
|
||||
index?: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
wait_for_active_shards?: string;
|
||||
refresh?: 'wait_for' | boolean;
|
||||
routing?: string;
|
||||
@ -643,6 +647,8 @@ export interface Delete extends Generic {
|
||||
export interface DeleteByQuery<T = RequestBody> extends Generic {
|
||||
index: string | string[];
|
||||
type?: string | string[];
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
analyzer?: string;
|
||||
analyze_wildcard?: boolean;
|
||||
default_operator?: 'AND' | 'OR';
|
||||
@ -733,6 +739,8 @@ export interface Exists extends Generic {
|
||||
id: string;
|
||||
index: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
stored_fields?: string | string[];
|
||||
preference?: string;
|
||||
realtime?: boolean;
|
||||
@ -749,6 +757,8 @@ export interface ExistsSource extends Generic {
|
||||
id: string;
|
||||
index: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
preference?: string;
|
||||
realtime?: boolean;
|
||||
refresh?: boolean;
|
||||
@ -764,6 +774,8 @@ export interface Explain<T = RequestBody> extends Generic {
|
||||
id: string;
|
||||
index: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
analyze_wildcard?: boolean;
|
||||
analyzer?: string;
|
||||
default_operator?: 'AND' | 'OR';
|
||||
@ -793,6 +805,8 @@ export interface Get extends Generic {
|
||||
id: string;
|
||||
index: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
stored_fields?: string | string[];
|
||||
preference?: string;
|
||||
realtime?: boolean;
|
||||
@ -820,6 +834,8 @@ export interface GetSource extends Generic {
|
||||
id: string;
|
||||
index: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
preference?: string;
|
||||
realtime?: boolean;
|
||||
refresh?: boolean;
|
||||
@ -1419,6 +1435,8 @@ export interface LicensePostStartTrial extends Generic {
|
||||
export interface Mget<T = RequestBody> extends Generic {
|
||||
index?: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
stored_fields?: string | string[];
|
||||
preference?: string;
|
||||
realtime?: boolean;
|
||||
@ -2044,6 +2062,8 @@ export interface Scroll<T = RequestBody> extends Generic {
|
||||
export interface Search<T = RequestBody> extends Generic {
|
||||
index?: string | string[];
|
||||
type?: string | string[];
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
analyzer?: string;
|
||||
analyze_wildcard?: boolean;
|
||||
ccs_minimize_roundtrips?: boolean;
|
||||
@ -2220,11 +2240,11 @@ export interface SecurityGetPrivileges extends Generic {
|
||||
}
|
||||
|
||||
export interface SecurityGetRole extends Generic {
|
||||
name?: string;
|
||||
name?: string | string[];
|
||||
}
|
||||
|
||||
export interface SecurityGetRoleMapping extends Generic {
|
||||
name?: string;
|
||||
name?: string | string[];
|
||||
}
|
||||
|
||||
export interface SecurityGetToken<T = RequestBody> extends Generic {
|
||||
@ -2238,6 +2258,11 @@ export interface SecurityGetUser extends Generic {
|
||||
export interface SecurityGetUserPrivileges extends Generic {
|
||||
}
|
||||
|
||||
export interface SecurityGrantApiKey<T = RequestBody> extends Generic {
|
||||
refresh?: 'wait_for' | boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SecurityHasPrivileges<T = RequestBody> extends Generic {
|
||||
user?: string;
|
||||
body: T;
|
||||
@ -2312,6 +2337,14 @@ export interface SnapshotCleanupRepository extends Generic {
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export interface SnapshotClone<T = RequestBody> extends Generic {
|
||||
repository: string;
|
||||
snapshot: string;
|
||||
target_snapshot: string;
|
||||
master_timeout?: string;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface SnapshotCreate<T = RequestBody> extends Generic {
|
||||
repository: string;
|
||||
snapshot: string;
|
||||
@ -2486,6 +2519,8 @@ export interface Update<T = RequestBody> extends Generic {
|
||||
id: string;
|
||||
index: string;
|
||||
type?: string;
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
wait_for_active_shards?: string;
|
||||
_source?: string | string[];
|
||||
_source_excludes?: string | string[];
|
||||
@ -2504,6 +2539,8 @@ export interface Update<T = RequestBody> extends Generic {
|
||||
export interface UpdateByQuery<T = RequestBody> extends Generic {
|
||||
index: string | string[];
|
||||
type?: string | string[];
|
||||
_source_exclude?: string | string[];
|
||||
_source_include?: string | string[];
|
||||
analyzer?: string;
|
||||
analyze_wildcard?: boolean;
|
||||
default_operator?: 'AND' | 'OR';
|
||||
|
||||
@ -1,133 +0,0 @@
|
||||
[[auth-reference]]
|
||||
== Authentication
|
||||
|
||||
This document contains code snippets to show you how to connect to various {es}
|
||||
providers.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Elastic Cloud
|
||||
|
||||
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers
|
||||
an easy way to connect to it via the `cloud` option. You must pass the Cloud ID
|
||||
that you can find in the cloud console, then your username and password inside
|
||||
the `auth` option.
|
||||
|
||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable
|
||||
both request and response compression by default, since it yields significant
|
||||
throughput improvements. Moreover, the client will also set the ssl option
|
||||
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
|
||||
override this option by configuring them.
|
||||
|
||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
|
||||
behind a load balancer, Elastic Cloud will take care of everything for you.
|
||||
Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here]
|
||||
to know more.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==',
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Basic authentication
|
||||
|
||||
You can provide your credentials by passing the `username` and `password`
|
||||
parameters via the `auth` option.
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the Api Key configuration, the Api Key will take precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
Otherwise, you can provide your credentials in the node(s) URL.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://username:password@localhost:9200'
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
=== ApiKey authentication
|
||||
|
||||
You can use the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey]
|
||||
authentication by passing the `apiKey` parameter via the `auth` option. The
|
||||
`apiKey` parameter can be either a base64 encoded string or an object with the
|
||||
values that you can obtain from the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[create api key endpoint].
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the Api Key configuration, the Api Key will take precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: 'base64EncodedKey'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: {
|
||||
id: 'foo',
|
||||
api_key: 'bar'
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
=== SSL configuration
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, and
|
||||
the certificates used to sign these requests will be verified. To turn off certificate verification, you must specify an `ssl` object in the top level config and set `rejectUnauthorized: false`. The default `ssl` values are the same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
||||
uses.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
ssl: {
|
||||
ca: fs.readFileSync('./cacert.pem'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
@ -1,5 +1,164 @@
|
||||
[[client-connecting]]
|
||||
== Connecting
|
||||
|
||||
This page contains the information you need to connect and use the Client with
|
||||
{es}.
|
||||
|
||||
**On this page**
|
||||
|
||||
* <<auth-reference, Authentication options>>
|
||||
* <<client-usage, Using the client>>
|
||||
* <<client-connect-proxy, Connecting through a proxy>>
|
||||
* <<client-error-handling, Handling errors>>
|
||||
|
||||
[discrete]
|
||||
[[authentication]]
|
||||
=== Authentication
|
||||
|
||||
This document contains code snippets to show you how to connect to various {es}
|
||||
providers.
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-ec]]
|
||||
==== Elastic Cloud
|
||||
|
||||
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers
|
||||
an easy way to connect to it via the `cloud` option. You must pass the Cloud ID
|
||||
that you can find in the cloud console, then your username and password inside
|
||||
the `auth` option.
|
||||
|
||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable
|
||||
both request and response compression by default, since it yields significant
|
||||
throughput improvements. Moreover, the client will also set the ssl option
|
||||
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
|
||||
override this option by configuring them.
|
||||
|
||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
|
||||
behind a load balancer, Elastic Cloud will take care of everything for you.
|
||||
Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here]
|
||||
to know more.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==',
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-apikey]]
|
||||
==== ApiKey authentication
|
||||
|
||||
You can use the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey]
|
||||
authentication by passing the `apiKey` parameter via the `auth` option. The
|
||||
`apiKey` parameter can be either a base64 encoded string or an object with the
|
||||
values that you can obtain from the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[create api key endpoint].
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the ApiKey
|
||||
configuration, the ApiKey takes precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: 'base64EncodedKey'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: {
|
||||
id: 'foo',
|
||||
api_key: 'bar'
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-basic]]
|
||||
==== Basic authentication
|
||||
|
||||
You can provide your credentials by passing the `username` and `password`
|
||||
parameters via the `auth` option.
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the Api Key
|
||||
configuration, the Api Key will take precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
Otherwise, you can provide your credentials in the node(s) URL.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://username:password@localhost:9200'
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-ssl]]
|
||||
==== SSL configuration
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, and
|
||||
the certificates used to sign these requests will be verified. To turn off
|
||||
certificate verification, you must specify an `ssl` object in the top level
|
||||
config and set `rejectUnauthorized: false`. The default `ssl` values are the
|
||||
same that Node.js's
|
||||
https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
||||
uses.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
ssl: {
|
||||
ca: fs.readFileSync('./cacert.pem'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[client-usage]]
|
||||
== Usage
|
||||
=== Usage
|
||||
|
||||
Using the client is straightforward, it supports all the public APIs of {es},
|
||||
and every method exposes the same signature.
|
||||
@ -33,7 +192,7 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
The returned value of every API call is formed as follows:
|
||||
The returned value of every API call is designed as follows:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
@ -82,11 +241,13 @@ client.search({
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Aborting a request
|
||||
==== Aborting a request
|
||||
|
||||
If needed, you can abort a running request by calling the `request.abort()` method returned by the API.
|
||||
If needed, you can abort a running request by calling the `request.abort()`
|
||||
method returned by the API.
|
||||
|
||||
CAUTION: If you abort a request, the request will fail with a `RequestAbortedError`.
|
||||
CAUTION: If you abort a request, the request will fail with a
|
||||
`RequestAbortedError`.
|
||||
|
||||
|
||||
[source,js]
|
||||
@ -113,6 +274,7 @@ request.abort()
|
||||
----
|
||||
|
||||
The same behavior is valid for the promise style API as well.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const request = client.search({
|
||||
@ -136,7 +298,8 @@ request.abort()
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Request specific options
|
||||
==== Request specific options
|
||||
|
||||
If needed you can pass request specific options in a second object:
|
||||
|
||||
[source,js]
|
||||
@ -214,13 +377,14 @@ _Default:_ `null`
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-connect-proxy]]
|
||||
=== Connecting through a proxy
|
||||
|
||||
~Added~ ~in~ ~`v7.10.0`~
|
||||
|
||||
If you need to pass through an http(s) proxy for connecting to Elasticsearch, the client offers
|
||||
out of the box a handy configuration for helping you with it. Under the hood it
|
||||
uses the https://github.com/delvedor/hpagent[`hpagent`] module.
|
||||
If you need to pass through an http(s) proxy for connecting to {es}, the client
|
||||
offers out of the box a handy configuration for helping you with it. Under the
|
||||
hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -256,6 +420,7 @@ const client = new Client({
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-error-handling]]
|
||||
=== Error handling
|
||||
|
||||
The client exposes a variety of error objects that you can use to enhance your
|
||||
@ -5,12 +5,11 @@ include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
|
||||
|
||||
include::introduction.asciidoc[]
|
||||
include::installation.asciidoc[]
|
||||
include::connecting.asciidoc[]
|
||||
include::changelog.asciidoc[]
|
||||
include::usage.asciidoc[]
|
||||
include::configuration.asciidoc[]
|
||||
include::reference.asciidoc[]
|
||||
include::breaking-changes.asciidoc[]
|
||||
include::authentication.asciidoc[]
|
||||
include::observability.asciidoc[]
|
||||
include::child.asciidoc[]
|
||||
include::extend.asciidoc[]
|
||||
@ -18,3 +17,4 @@ include::helpers.asciidoc[]
|
||||
include::typescript.asciidoc[]
|
||||
include::testing.asciidoc[]
|
||||
include::examples/index.asciidoc[]
|
||||
include::redirects.asciidoc[]
|
||||
|
||||
@ -18,4 +18,76 @@ npm install @elastic/elasticsearch@<major>
|
||||
----
|
||||
|
||||
To learn more about the supported major versions, please refer to the
|
||||
<<js-compatibility-matrix>>.
|
||||
<<js-compatibility-matrix>>.
|
||||
|
||||
[discrete]
|
||||
[[nodejs-support]]
|
||||
=== Node.js support
|
||||
|
||||
NOTE: The minimum supported version of Node.js is `v8`.
|
||||
|
||||
The client versioning follows the {stack} versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
often does not coincide with the https://nodejs.org/en/about/releases/[Node.js release] times.
|
||||
|
||||
To avoid support insecure and unsupported versions of Node.js, the
|
||||
client *will drop the support of EOL versions of Node.js between minor releases*.
|
||||
Typically, as soon as a Node.js version goes into EOL, the client will continue
|
||||
to support that version for at least another minor release. If you are using the client
|
||||
with a version of Node.js that will be unsupported soon, you will see a warning
|
||||
in your logs (the client will start logging the warning with two minors in advance).
|
||||
|
||||
Unless you are *always* using a supported version of Node.js,
|
||||
we recommend defining the client dependency in your
|
||||
`package.json` with the `~` instead of `^`. In this way, you will lock the
|
||||
dependency on the minor release and not the major. (for example, `~7.10.0` instead
|
||||
of `^7.10.0`).
|
||||
|
||||
[%header,cols=3*]
|
||||
|===
|
||||
|Node.js Version
|
||||
|Node.js EOL date
|
||||
|End of support
|
||||
|
||||
|`8.x`
|
||||
|December 2019
|
||||
|`7.11` (early 2021)
|
||||
|
||||
|`10.x`
|
||||
|April 2021
|
||||
|`7.12` (mid 2021)
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
[[js-compatibility-matrix]]
|
||||
=== Compatibility matrix
|
||||
|
||||
The library is compatible with all {es} versions since 5.x. We recommend you to
|
||||
use the same major version of the client as the {es} instance that you are
|
||||
using.
|
||||
|
||||
[%header,cols=2*]
|
||||
|===
|
||||
|{es} Version
|
||||
|Client Version
|
||||
|
||||
|`master`
|
||||
|`master`
|
||||
|
||||
|`7.x`
|
||||
|`7.x`
|
||||
|
||||
|`6.x`
|
||||
|`6.x`
|
||||
|
||||
|`5.x`
|
||||
|`5.x`
|
||||
|===
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Browser
|
||||
|
||||
WARNING: There is no official support for the browser environment. It exposes
|
||||
your {es} instance to everyone, which could lead to security issues. We
|
||||
recommend you to write a lightweight proxy that uses this client instead.
|
||||
|
||||
@ -17,44 +17,6 @@ about the features of the client.
|
||||
* TypeScript support out of the box.
|
||||
|
||||
|
||||
[discrete]
|
||||
[[js-compatibility-matrix]]
|
||||
=== Compatibility matrix
|
||||
|
||||
The minimum supported version of Node.js is `v8`.
|
||||
|
||||
The library is compatible with all {es} versions since 5.x. We recommend you to
|
||||
use the same major version of the client as the {es} instance that you are
|
||||
using.
|
||||
|
||||
|
||||
[%header,cols=2*]
|
||||
|===
|
||||
|{es} Version
|
||||
|Client Version
|
||||
|
||||
|`master`
|
||||
|`master`
|
||||
|
||||
|`7.x`
|
||||
|`7.x`
|
||||
|
||||
|`6.x`
|
||||
|`6.x`
|
||||
|
||||
|`5.x`
|
||||
|`5.x`
|
||||
|===
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Browser
|
||||
|
||||
WARNING: There is no official support for the browser environment. It exposes
|
||||
your {es} instance to everyone, which could lead to security issues. We
|
||||
recommend you to write a lightweight proxy that uses this client instead.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Quick start
|
||||
|
||||
|
||||
9
docs/redirects.asciidoc
Normal file
9
docs/redirects.asciidoc
Normal file
@ -0,0 +1,9 @@
|
||||
"appendix",role="exclude",id="redirects"]
|
||||
= Deleted pages
|
||||
|
||||
The following pages have moved or been deleted.
|
||||
|
||||
[role="exclude",id="auth-reference"]
|
||||
== Authentication
|
||||
|
||||
This page has moved. See <<authentication>>.
|
||||
@ -9259,14 +9259,14 @@ link:{ref}/security-api-get-privileges.html[Documentation] +
|
||||
[source,ts]
|
||||
----
|
||||
client.security.getRole({
|
||||
name: string
|
||||
name: string | string[]
|
||||
})
|
||||
----
|
||||
link:{ref}/security-api-get-role.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`name`
|
||||
|`string` - Role name
|
||||
|`string \| string[]` - A comma-separated list of role names
|
||||
|
||||
|===
|
||||
|
||||
@ -9276,14 +9276,14 @@ link:{ref}/security-api-get-role.html[Documentation] +
|
||||
[source,ts]
|
||||
----
|
||||
client.security.getRoleMapping({
|
||||
name: string
|
||||
name: string | string[]
|
||||
})
|
||||
----
|
||||
link:{ref}/security-api-get-role-mapping.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`name`
|
||||
|`string` - Role-Mapping name
|
||||
|`string \| string[]` - A comma-separated list of role-mapping names
|
||||
|
||||
|===
|
||||
|
||||
@ -9331,6 +9331,27 @@ client.security.getUserPrivileges()
|
||||
link:{ref}/security-api-get-privileges.html[Documentation] +
|
||||
|
||||
|
||||
[discrete]
|
||||
=== security.grantApiKey
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
client.security.grantApiKey({
|
||||
refresh: 'true' | 'false' | 'wait_for',
|
||||
body: object
|
||||
})
|
||||
----
|
||||
link:{ref}/security-api-grant-api-key.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`refresh`
|
||||
|`'true' \| 'false' \| 'wait_for'` - If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes.
|
||||
|
||||
|`body`
|
||||
|`object` - The api key request to create an API key
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== security.hasPrivileges
|
||||
|
||||
@ -9629,6 +9650,39 @@ link:{ref}/clean-up-snapshot-repo-api.html[Documentation] +
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== snapshot.clone
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
client.snapshot.clone({
|
||||
repository: string,
|
||||
snapshot: string,
|
||||
target_snapshot: string,
|
||||
master_timeout: string,
|
||||
body: object
|
||||
})
|
||||
----
|
||||
link:{ref}/modules-snapshots.html[Documentation] +
|
||||
[cols=2*]
|
||||
|===
|
||||
|`repository`
|
||||
|`string` - A repository name
|
||||
|
||||
|`snapshot`
|
||||
|`string` - The name of the snapshot to clone from
|
||||
|
||||
|`target_snapshot` or `targetSnapshot`
|
||||
|`string` - The name of the cloned snapshot to create
|
||||
|
||||
|`master_timeout` or `masterTimeout`
|
||||
|`string` - Explicit operation timeout for connection to master node
|
||||
|
||||
|`body`
|
||||
|`object` - The snapshot clone definition
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
=== snapshot.create
|
||||
|
||||
|
||||
12
index.d.ts
vendored
12
index.d.ts
vendored
@ -2156,6 +2156,14 @@ declare class Client {
|
||||
getUserPrivileges<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
getUserPrivileges<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.SecurityGetUserPrivileges, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
getUserPrivileges<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.SecurityGetUserPrivileges, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
grant_api_key<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGrantApiKey<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
grant_api_key<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
grant_api_key<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityGrantApiKey<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
grant_api_key<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityGrantApiKey<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
grantApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityGrantApiKey<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
grantApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
grantApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityGrantApiKey<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
grantApiKey<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityGrantApiKey<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
has_privileges<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SecurityHasPrivileges<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
has_privileges<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
has_privileges<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SecurityHasPrivileges<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
@ -2288,6 +2296,10 @@ declare class Client {
|
||||
cleanupRepository<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
cleanupRepository<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.SnapshotCleanupRepository, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
cleanupRepository<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.SnapshotCleanupRepository, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clone<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotClone<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
clone<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clone<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SnapshotClone<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
clone<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SnapshotClone<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
create<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.SnapshotCreate<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
|
||||
create<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
create<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.SnapshotCreate<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
|
||||
|
||||
18
index.js
18
index.js
@ -41,6 +41,24 @@ const kEventEmitter = Symbol('elasticsearchjs-event-emitter')
|
||||
|
||||
const ESAPI = require('./api')
|
||||
|
||||
/* istanbul ignore next */
|
||||
if (nodeMajor < 10) {
|
||||
process.emitWarning('You are using a version of Node.js that is currently in EOL. ' +
|
||||
'The support for this version will be dropped in 7.12. ' +
|
||||
'Please refer to https://ela.st/nodejs-support for additional information.',
|
||||
'DeprecationWarning'
|
||||
)
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
if (nodeMajor >= 10 && nodeMajor < 12) {
|
||||
process.emitWarning('You are using a version of Node.js that will reach EOL in April 2021. ' +
|
||||
'The support for this version will be dropped in 7.13. ' +
|
||||
'Please refer to https://ela.st/nodejs-support for additional information.',
|
||||
'DeprecationWarning'
|
||||
)
|
||||
}
|
||||
|
||||
class Client extends ESAPI {
|
||||
constructor (opts = {}) {
|
||||
super({ ConfigurationError })
|
||||
|
||||
@ -25,7 +25,6 @@ const hpagent = require('hpagent')
|
||||
const http = require('http')
|
||||
const https = require('https')
|
||||
const debug = require('debug')('elasticsearch')
|
||||
const decompressResponse = require('decompress-response')
|
||||
const pump = require('pump')
|
||||
const INVALID_PATH_REGEX = /[^\u0021-\u00ff]/
|
||||
const {
|
||||
@ -83,7 +82,6 @@ class Connection {
|
||||
|
||||
request (params, callback) {
|
||||
this._openRequests++
|
||||
var ended = false
|
||||
|
||||
const requestParams = this.buildRequestObject(params)
|
||||
// https://github.com/nodejs/node/commit/b961d9fd83
|
||||
@ -96,53 +94,38 @@ class Connection {
|
||||
debug('Starting a new request', params)
|
||||
const request = this.makeRequest(requestParams)
|
||||
|
||||
// listen for the response event
|
||||
// TODO: handle redirects?
|
||||
request.on('response', response => {
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
const onResponse = response => {
|
||||
cleanListeners()
|
||||
this._openRequests--
|
||||
callback(null, response)
|
||||
}
|
||||
|
||||
if (params.asStream === true) {
|
||||
callback(null, response)
|
||||
} else {
|
||||
callback(null, decompressResponse(response))
|
||||
}
|
||||
}
|
||||
})
|
||||
const onTimeout = () => {
|
||||
cleanListeners()
|
||||
this._openRequests--
|
||||
request.once('error', () => {}) // we need to catch the request aborted error
|
||||
request.abort()
|
||||
callback(new TimeoutError('Request timed out', params), null)
|
||||
}
|
||||
|
||||
// handles request timeout
|
||||
request.on('timeout', () => {
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
request.abort()
|
||||
callback(new TimeoutError('Request timed out', params), null)
|
||||
}
|
||||
})
|
||||
const onError = err => {
|
||||
cleanListeners()
|
||||
this._openRequests--
|
||||
callback(new ConnectionError(err.message), null)
|
||||
}
|
||||
|
||||
// handles request error
|
||||
request.on('error', err => {
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
callback(new ConnectionError(err.message), null)
|
||||
}
|
||||
})
|
||||
|
||||
// updates the ended state
|
||||
request.on('abort', () => {
|
||||
const onAbort = () => {
|
||||
cleanListeners()
|
||||
request.once('error', () => {}) // we need to catch the request aborted error
|
||||
debug('Request aborted', params)
|
||||
/* istanbul ignore else */
|
||||
if (ended === false) {
|
||||
ended = true
|
||||
this._openRequests--
|
||||
callback(new RequestAbortedError(), null)
|
||||
}
|
||||
})
|
||||
this._openRequests--
|
||||
callback(new RequestAbortedError(), null)
|
||||
}
|
||||
|
||||
request.on('response', onResponse)
|
||||
request.on('timeout', onTimeout)
|
||||
request.on('error', onError)
|
||||
request.on('abort', onAbort)
|
||||
|
||||
// Disables the Nagle algorithm
|
||||
request.setNoDelay(true)
|
||||
@ -151,8 +134,8 @@ class Connection {
|
||||
if (isStream(params.body) === true) {
|
||||
pump(params.body, request, err => {
|
||||
/* istanbul ignore if */
|
||||
if (err != null && /* istanbul ignore next */ ended === false) {
|
||||
ended = true
|
||||
if (err != null) {
|
||||
cleanListeners()
|
||||
this._openRequests--
|
||||
callback(err, null)
|
||||
}
|
||||
@ -162,6 +145,13 @@ class Connection {
|
||||
}
|
||||
|
||||
return request
|
||||
|
||||
function cleanListeners () {
|
||||
request.removeListener('response', onResponse)
|
||||
request.removeListener('timeout', onTimeout)
|
||||
request.removeListener('error', onError)
|
||||
request.removeListener('abort', onAbort)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: write a better closing logic
|
||||
|
||||
222
lib/Transport.js
222
lib/Transport.js
@ -21,7 +21,7 @@
|
||||
|
||||
const debug = require('debug')('elasticsearch')
|
||||
const os = require('os')
|
||||
const { gzip, createGzip } = require('zlib')
|
||||
const { gzip, unzip, createGzip } = require('zlib')
|
||||
const ms = require('ms')
|
||||
const {
|
||||
ConnectionError,
|
||||
@ -174,37 +174,40 @@ class Transport {
|
||||
request = meta.connection.request(params, onResponse)
|
||||
}
|
||||
|
||||
const onResponse = (err, response) => {
|
||||
if (err !== null) {
|
||||
if (err.name !== 'RequestAbortedError') {
|
||||
// if there is an error in the connection
|
||||
// let's mark the connection as dead
|
||||
this.connectionPool.markDead(meta.connection)
|
||||
const onConnectionError = (err) => {
|
||||
if (err.name !== 'RequestAbortedError') {
|
||||
// if there is an error in the connection
|
||||
// let's mark the connection as dead
|
||||
this.connectionPool.markDead(meta.connection)
|
||||
|
||||
if (this.sniffOnConnectionFault === true) {
|
||||
this.sniff({
|
||||
reason: Transport.sniffReasons.SNIFF_ON_CONNECTION_FAULT,
|
||||
requestId: meta.request.id
|
||||
})
|
||||
}
|
||||
|
||||
// retry logic
|
||||
if (meta.attempts < maxRetries) {
|
||||
meta.attempts++
|
||||
debug(`Retrying request, there are still ${maxRetries - meta.attempts} attempts`, params)
|
||||
makeRequest()
|
||||
return
|
||||
}
|
||||
if (this.sniffOnConnectionFault === true) {
|
||||
this.sniff({
|
||||
reason: Transport.sniffReasons.SNIFF_ON_CONNECTION_FAULT,
|
||||
requestId: meta.request.id
|
||||
})
|
||||
}
|
||||
|
||||
err.meta = result
|
||||
this.emit('response', err, result)
|
||||
return callback(err, result)
|
||||
// retry logic
|
||||
if (meta.attempts < maxRetries) {
|
||||
meta.attempts++
|
||||
debug(`Retrying request, there are still ${maxRetries - meta.attempts} attempts`, params)
|
||||
makeRequest()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const { statusCode, headers } = response
|
||||
result.statusCode = statusCode
|
||||
result.headers = headers
|
||||
err.meta = result
|
||||
this.emit('response', err, result)
|
||||
return callback(err, result)
|
||||
}
|
||||
|
||||
const onResponse = (err, response) => {
|
||||
if (err !== null) {
|
||||
return onConnectionError(err)
|
||||
}
|
||||
|
||||
result.statusCode = response.statusCode
|
||||
result.headers = response.headers
|
||||
|
||||
if (options.asStream === true) {
|
||||
result.body = response
|
||||
@ -213,74 +216,109 @@ class Transport {
|
||||
return
|
||||
}
|
||||
|
||||
var payload = ''
|
||||
// collect the payload
|
||||
response.setEncoding('utf8')
|
||||
response.on('data', chunk => { payload += chunk })
|
||||
/* istanbul ignore next */
|
||||
response.on('error', err => {
|
||||
const error = new ConnectionError(err.message, result)
|
||||
const contentEncoding = (result.headers['content-encoding'] || '').toLowerCase()
|
||||
const isCompressed = contentEncoding.indexOf('gzip') > -1 || contentEncoding.indexOf('deflate') > -1
|
||||
// if the response is compressed, we must handle it
|
||||
// as buffer for allowing decompression later
|
||||
let payload = isCompressed ? [] : ''
|
||||
const onData = isCompressed
|
||||
? chunk => { payload.push(chunk) }
|
||||
: chunk => { payload += chunk }
|
||||
const onEnd = err => {
|
||||
response.removeListener('data', onData)
|
||||
response.removeListener('end', onEnd)
|
||||
response.removeListener('error', onEnd)
|
||||
response.removeListener('aborted', onAbort)
|
||||
|
||||
if (err) {
|
||||
return onConnectionError(new ConnectionError(err.message))
|
||||
}
|
||||
|
||||
if (isCompressed) {
|
||||
unzip(Buffer.concat(payload), onBody)
|
||||
} else {
|
||||
onBody(null, payload)
|
||||
}
|
||||
}
|
||||
|
||||
const onAbort = () => {
|
||||
response.destroy()
|
||||
onEnd(new Error('Response aborted while reading the body'))
|
||||
}
|
||||
|
||||
if (!isCompressed) {
|
||||
response.setEncoding('utf8')
|
||||
}
|
||||
response.on('data', onData)
|
||||
response.on('error', onEnd)
|
||||
response.on('end', onEnd)
|
||||
response.on('aborted', onAbort)
|
||||
}
|
||||
|
||||
const onBody = (err, payload) => {
|
||||
if (err) {
|
||||
this.emit('response', err, result)
|
||||
return callback(err, result)
|
||||
}
|
||||
if (Buffer.isBuffer(payload)) {
|
||||
payload = payload.toString()
|
||||
}
|
||||
const isHead = params.method === 'HEAD'
|
||||
// we should attempt the payload deserialization only if:
|
||||
// - a `content-type` is defined and is equal to `application/json`
|
||||
// - the request is not a HEAD request
|
||||
// - the payload is not an empty string
|
||||
if (result.headers['content-type'] !== undefined &&
|
||||
result.headers['content-type'].indexOf('application/json') > -1 &&
|
||||
isHead === false &&
|
||||
payload !== ''
|
||||
) {
|
||||
try {
|
||||
result.body = this.serializer.deserialize(payload)
|
||||
} catch (err) {
|
||||
this.emit('response', err, result)
|
||||
return callback(err, result)
|
||||
}
|
||||
} else {
|
||||
// cast to boolean if the request method was HEAD
|
||||
result.body = isHead === true ? true : payload
|
||||
}
|
||||
|
||||
// we should ignore the statusCode if the user has configured the `ignore` field with
|
||||
// the statusCode we just got or if the request method is HEAD and the statusCode is 404
|
||||
const ignoreStatusCode = (Array.isArray(options.ignore) && options.ignore.indexOf(result.statusCode) > -1) ||
|
||||
(isHead === true && result.statusCode === 404)
|
||||
|
||||
if (ignoreStatusCode === false &&
|
||||
(result.statusCode === 502 || result.statusCode === 503 || result.statusCode === 504)) {
|
||||
// if the statusCode is 502/3/4 we should run our retry strategy
|
||||
// and mark the connection as dead
|
||||
this.connectionPool.markDead(meta.connection)
|
||||
// retry logic (we shoukd not retry on "429 - Too Many Requests")
|
||||
if (meta.attempts < maxRetries && result.statusCode !== 429) {
|
||||
meta.attempts++
|
||||
debug(`Retrying request, there are still ${maxRetries - meta.attempts} attempts`, params)
|
||||
makeRequest()
|
||||
return
|
||||
}
|
||||
} else {
|
||||
// everything has worked as expected, let's mark
|
||||
// the connection as alive (or confirm it)
|
||||
this.connectionPool.markAlive(meta.connection)
|
||||
}
|
||||
|
||||
if (ignoreStatusCode === false && result.statusCode >= 400) {
|
||||
const error = new ResponseError(result)
|
||||
this.emit('response', error, result)
|
||||
callback(error, result)
|
||||
})
|
||||
response.on('end', () => {
|
||||
const isHead = params.method === 'HEAD'
|
||||
// we should attempt the payload deserialization only if:
|
||||
// - a `content-type` is defined and is equal to `application/json`
|
||||
// - the request is not a HEAD request
|
||||
// - the payload is not an empty string
|
||||
if (headers['content-type'] !== undefined &&
|
||||
headers['content-type'].indexOf('application/json') > -1 &&
|
||||
isHead === false &&
|
||||
payload !== ''
|
||||
) {
|
||||
try {
|
||||
result.body = this.serializer.deserialize(payload)
|
||||
} catch (err) {
|
||||
this.emit('response', err, result)
|
||||
return callback(err, result)
|
||||
}
|
||||
} else {
|
||||
// cast to boolean if the request method was HEAD
|
||||
result.body = isHead === true ? true : payload
|
||||
} else {
|
||||
// cast to boolean if the request method was HEAD
|
||||
if (isHead === true && result.statusCode === 404) {
|
||||
result.body = false
|
||||
}
|
||||
|
||||
// we should ignore the statusCode if the user has configured the `ignore` field with
|
||||
// the statusCode we just got or if the request method is HEAD and the statusCode is 404
|
||||
const ignoreStatusCode = (Array.isArray(options.ignore) && options.ignore.indexOf(statusCode) > -1) ||
|
||||
(isHead === true && statusCode === 404)
|
||||
|
||||
if (ignoreStatusCode === false &&
|
||||
(statusCode === 502 || statusCode === 503 || statusCode === 504)) {
|
||||
// if the statusCode is 502/3/4 we should run our retry strategy
|
||||
// and mark the connection as dead
|
||||
this.connectionPool.markDead(meta.connection)
|
||||
// retry logic (we shoukd not retry on "429 - Too Many Requests")
|
||||
if (meta.attempts < maxRetries && statusCode !== 429) {
|
||||
meta.attempts++
|
||||
debug(`Retrying request, there are still ${maxRetries - meta.attempts} attempts`, params)
|
||||
makeRequest()
|
||||
return
|
||||
}
|
||||
} else {
|
||||
// everything has worked as expected, let's mark
|
||||
// the connection as alive (or confirm it)
|
||||
this.connectionPool.markAlive(meta.connection)
|
||||
}
|
||||
|
||||
if (ignoreStatusCode === false && statusCode >= 400) {
|
||||
const error = new ResponseError(result)
|
||||
this.emit('response', error, result)
|
||||
callback(error, result)
|
||||
} else {
|
||||
// cast to boolean if the request method was HEAD
|
||||
if (isHead === true && statusCode === 404) {
|
||||
result.body = false
|
||||
}
|
||||
this.emit('response', null, result)
|
||||
callback(null, result)
|
||||
}
|
||||
})
|
||||
this.emit('response', null, result)
|
||||
callback(null, result)
|
||||
}
|
||||
}
|
||||
|
||||
const headers = Object.assign({}, this.headers, lowerCaseHeaders(options.headers))
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
"./": "./"
|
||||
},
|
||||
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
"version": "7.7.1",
|
||||
"version": "7.10.0",
|
||||
"keywords": [
|
||||
"elasticsearch",
|
||||
"elastic",
|
||||
@ -75,7 +75,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"decompress-response": "^4.2.0",
|
||||
"hpagent": "^0.1.1",
|
||||
"ms": "^2.1.1",
|
||||
"pump": "^3.0.0",
|
||||
|
||||
@ -279,20 +279,26 @@ function build (opts = {}) {
|
||||
// eg: 'Basic ${auth}' we search the stahed value 'auth'
|
||||
// and the resulting value will be 'Basic valueOfAuth'
|
||||
if (typeof val === 'string' && val.includes('${')) {
|
||||
const start = val.indexOf('${')
|
||||
const end = val.indexOf('}', val.indexOf('${'))
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = stash.get(stashedKey)
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
while (obj[key].includes('${')) {
|
||||
const val = obj[key]
|
||||
const start = val.indexOf('${')
|
||||
const end = val.indexOf('}', val.indexOf('${'))
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = stash.get(stashedKey)
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// handle json strings, eg: '{"hello":"$world"}'
|
||||
if (typeof val === 'string' && val.includes('"$')) {
|
||||
const start = val.indexOf('"$')
|
||||
const end = val.indexOf('"', start + 1)
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = '"' + stash.get(stashedKey) + '"'
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
while (obj[key].includes('"$')) {
|
||||
const val = obj[key]
|
||||
const start = val.indexOf('"$')
|
||||
const end = val.indexOf('"', start + 1)
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = '"' + stash.get(stashedKey) + '"'
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// if the key value is a string, and the string includes '$'
|
||||
|
||||
@ -21,7 +21,7 @@
|
||||
|
||||
const { test } = require('tap')
|
||||
const { URL } = require('url')
|
||||
const { Client, ConnectionPool, Transport } = require('../../index')
|
||||
const { Client, ConnectionPool, Transport, errors } = require('../../index')
|
||||
const { CloudConnectionPool } = require('../../lib/pool')
|
||||
const { buildServer } = require('../utils')
|
||||
|
||||
@ -1191,3 +1191,55 @@ test('name property as symbol', t => {
|
||||
|
||||
t.strictEqual(client.name, symbol)
|
||||
})
|
||||
|
||||
// The nodejs http agent will try to wait for the whole
|
||||
// body to arrive before closing the request, so this
|
||||
// test might take some time.
|
||||
test('Bad content length', t => {
|
||||
t.plan(3)
|
||||
|
||||
let count = 0
|
||||
function handler (req, res) {
|
||||
count += 1
|
||||
const body = JSON.stringify({ hello: 'world' })
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.setHeader('Content-Length', body.length + '')
|
||||
res.end(body.slice(0, -5))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({ node: `http://localhost:${port}`, maxRetries: 1 })
|
||||
client.info((err, { body }) => {
|
||||
t.ok(err instanceof errors.ConnectionError)
|
||||
t.is(err.message, 'Response aborted while reading the body')
|
||||
t.strictEqual(count, 2)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Socket destryed while reading the body', t => {
|
||||
t.plan(3)
|
||||
|
||||
let count = 0
|
||||
function handler (req, res) {
|
||||
count += 1
|
||||
const body = JSON.stringify({ hello: 'world' })
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.setHeader('Content-Length', body.length + '')
|
||||
res.write(body.slice(0, -5))
|
||||
setTimeout(() => {
|
||||
res.socket.destroy()
|
||||
}, 500)
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({ node: `http://localhost:${port}`, maxRetries: 1 })
|
||||
client.info((err, { body }) => {
|
||||
t.ok(err instanceof errors.ConnectionError)
|
||||
t.is(err.message, 'Response aborted while reading the body')
|
||||
t.strictEqual(count, 2)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -21,7 +21,6 @@
|
||||
|
||||
const { test } = require('tap')
|
||||
const { inspect } = require('util')
|
||||
const { createGzip, createDeflate } = require('zlib')
|
||||
const { URL } = require('url')
|
||||
const { Agent } = require('http')
|
||||
const hpagent = require('hpagent')
|
||||
@ -400,90 +399,6 @@ test('Send body as stream', t => {
|
||||
})
|
||||
})
|
||||
|
||||
test('Should handle compression', t => {
|
||||
t.test('gzip', t => {
|
||||
t.plan(3)
|
||||
|
||||
function handler (req, res) {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'application/json;utf=8',
|
||||
'Content-Encoding': 'gzip'
|
||||
})
|
||||
intoStream(JSON.stringify({ hello: 'world' }))
|
||||
.pipe(createGzip())
|
||||
.pipe(res)
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const connection = new Connection({
|
||||
url: new URL(`http://localhost:${port}`)
|
||||
})
|
||||
connection.request({
|
||||
path: '/hello',
|
||||
method: 'GET'
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
|
||||
t.match(res.headers, {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-encoding': 'gzip'
|
||||
})
|
||||
|
||||
var payload = ''
|
||||
res.setEncoding('utf8')
|
||||
res.on('data', chunk => { payload += chunk })
|
||||
res.on('error', err => t.fail(err))
|
||||
res.on('end', () => {
|
||||
t.deepEqual(JSON.parse(payload), { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('deflate', t => {
|
||||
t.plan(3)
|
||||
|
||||
function handler (req, res) {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'application/json;utf=8',
|
||||
'Content-Encoding': 'deflate'
|
||||
})
|
||||
intoStream(JSON.stringify({ hello: 'world' }))
|
||||
.pipe(createDeflate())
|
||||
.pipe(res)
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const connection = new Connection({
|
||||
url: new URL(`http://localhost:${port}`)
|
||||
})
|
||||
connection.request({
|
||||
path: '/hello',
|
||||
method: 'GET'
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
|
||||
t.match(res.headers, {
|
||||
'content-type': 'application/json;utf=8',
|
||||
'content-encoding': 'deflate'
|
||||
})
|
||||
|
||||
var payload = ''
|
||||
res.setEncoding('utf8')
|
||||
res.on('data', chunk => { payload += chunk })
|
||||
res.on('error', err => t.fail(err))
|
||||
res.on('end', () => {
|
||||
t.deepEqual(JSON.parse(payload), { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Should not close a connection if there are open requests', t => {
|
||||
t.plan(4)
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
const { test } = require('tap')
|
||||
const { URL } = require('url')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
const { createGunzip } = require('zlib')
|
||||
const { createGunzip, gzipSync } = require('zlib')
|
||||
const os = require('os')
|
||||
const intoStream = require('into-stream')
|
||||
const {
|
||||
@ -1665,13 +1665,17 @@ test('Should cast to boolean HEAD request', t => {
|
||||
})
|
||||
|
||||
test('Suggest compression', t => {
|
||||
t.plan(2)
|
||||
t.plan(3)
|
||||
function handler (req, res) {
|
||||
t.match(req.headers, {
|
||||
'accept-encoding': 'gzip,deflate'
|
||||
})
|
||||
|
||||
const body = gzipSync(JSON.stringify({ hello: 'world' }))
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
res.setHeader('Content-Length', Buffer.byteLength(body))
|
||||
res.end(body)
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
@ -1694,6 +1698,46 @@ test('Suggest compression', t => {
|
||||
path: '/hello'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Broken compression', t => {
|
||||
t.plan(2)
|
||||
function handler (req, res) {
|
||||
t.match(req.headers, {
|
||||
'accept-encoding': 'gzip,deflate'
|
||||
})
|
||||
|
||||
const body = gzipSync(JSON.stringify({ hello: 'world' }))
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
// we are not setting the content length on purpose
|
||||
res.end(body.slice(0, -5))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection(`http://localhost:${port}`)
|
||||
|
||||
const transport = new Transport({
|
||||
emit: () => {},
|
||||
connectionPool: pool,
|
||||
serializer: new Serializer(),
|
||||
maxRetries: 3,
|
||||
requestTimeout: 30000,
|
||||
sniffInterval: false,
|
||||
sniffOnStart: false,
|
||||
suggestCompression: true
|
||||
})
|
||||
|
||||
transport.request({
|
||||
method: 'GET',
|
||||
path: '/hello'
|
||||
}, (err, { body }) => {
|
||||
t.ok(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
|
||||
@ -109,7 +109,7 @@ class MockConnectionSniff extends Connection {
|
||||
'content-type': 'application/json;utf=8',
|
||||
date: new Date().toISOString(),
|
||||
connection: 'keep-alive',
|
||||
'content-length': '205'
|
||||
'content-length': '191'
|
||||
}
|
||||
process.nextTick(() => {
|
||||
if (!aborted) {
|
||||
|
||||
Reference in New Issue
Block a user