Compare commits

..

11 Commits
7.x ... 7.15

21 changed files with 65 additions and 564 deletions

View File

@ -1,6 +1,6 @@
--- ---
STACK_VERSION: STACK_VERSION:
- 7.x-SNAPSHOT - 7.15.0-SNAPSHOT
NODE_JS_VERSION: NODE_JS_VERSION:
- 16 - 16

View File

@ -61,7 +61,7 @@ jobs:
- name: Runs Elasticsearch - name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master uses: elastic/elastic-github-actions/elasticsearch@master
with: with:
stack-version: 7.x-SNAPSHOT stack-version: 7.15.0-SNAPSHOT
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1 uses: actions/setup-node@v1
@ -93,7 +93,7 @@ jobs:
- name: Runs Elasticsearch - name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master uses: elastic/elastic-github-actions/elasticsearch@master
with: with:
stack-version: 8.0.0-SNAPSHOT stack-version: 7.15.0-SNAPSHOT
- name: Use Node.js 14.x - name: Use Node.js 14.x
uses: actions/setup-node@v1 uses: actions/setup-node@v1
@ -119,27 +119,6 @@ jobs:
npm start --prefix test/bundlers/rollup-test npm start --prefix test/bundlers/rollup-test
npm start --prefix test/bundlers/webpack-test npm start --prefix test/bundlers/webpack-test
mock-support:
name: Mock support
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- name: Install
run: |
npm install
npm install --prefix test/mock
- name: Run test
run: |
npm test --prefix test/mock
code-coverage: code-coverage:
name: Code coverage name: Code coverage
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@ -28,7 +28,7 @@ npm install @elastic/elasticsearch
### Node.js support ### Node.js support
NOTE: The minimum supported version of Node.js is `v12`. NOTE: The minimum supported version of Node.js is `v10`.
The client versioning follows the Elastc Stack versioning, this means that The client versioning follows the Elastc Stack versioning, this means that
major, minor, and patch releases are done following a precise schedule that major, minor, and patch releases are done following a precise schedule that
@ -49,13 +49,13 @@ of `^7.10.0`).
| Node.js Version | Node.js EOL date | End of support | | Node.js Version | Node.js EOL date | End of support |
| --------------- |------------------| ---------------------- | | --------------- |------------------| ---------------------- |
| `8.x` | `December 2019` | `7.11` (early 2021) | | `8.x` | `December 2019` | `7.11` (early 2021) |
| `10.x` | `April 2021` | `7.12` (mid 2021) | | `10.x` | `Apri 2021` | `7.12` (mid 2021) |
### Compatibility ### Compatibility
Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch. Language clients are forward compatible; meaning that clients support communicating with greater minor versions of Elasticsearch.
Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made. Elastic language clients are also backwards compatible with lesser supported minor Elasticsearch versions.
| Elasticsearch Version | Client Version | | Elasticsearch Version | Client Version |
| --------------------- |----------------| | --------------------- |----------------|

View File

@ -31,7 +31,7 @@ function NodesApi (transport, ConfigurationError) {
this[kConfigurationError] = ConfigurationError this[kConfigurationError] = ConfigurationError
} }
NodesApi.prototype.clearMeteringArchive = function nodesClearMeteringArchiveApi (params, options, callback) { NodesApi.prototype.clearRepositoriesMeteringArchive = function nodesClearRepositoriesMeteringArchiveApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback) ;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters // check required parameters
@ -68,7 +68,7 @@ NodesApi.prototype.clearMeteringArchive = function nodesClearMeteringArchiveApi
return this.transport.request(request, options, callback) return this.transport.request(request, options, callback)
} }
NodesApi.prototype.getMeteringInfo = function nodesGetMeteringInfoApi (params, options, callback) { NodesApi.prototype.getRepositoriesMeteringInfo = function nodesGetRepositoriesMeteringInfoApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback) ;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters // check required parameters
@ -259,8 +259,8 @@ NodesApi.prototype.usage = function nodesUsageApi (params, options, callback) {
} }
Object.defineProperties(NodesApi.prototype, { Object.defineProperties(NodesApi.prototype, {
clear_metering_archive: { get () { return this.clearMeteringArchive } }, clear_repositories_metering_archive: { get () { return this.clearRepositoriesMeteringArchive } },
get_metering_info: { get () { return this.getMeteringInfo } }, get_repositories_metering_info: { get () { return this.getRepositoriesMeteringInfo } },
hot_threads: { get () { return this.hotThreads } }, hot_threads: { get () { return this.hotThreads } },
reload_secure_settings: { get () { return this.reloadSecureSettings } } reload_secure_settings: { get () { return this.reloadSecureSettings } }
}) })

View File

@ -29,17 +29,18 @@ const snakeCase = { ignoreUnavailable: 'ignore_unavailable', expandWildcards: 'e
function openPointInTimeApi (params, options, callback) { function openPointInTimeApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback) ;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params.index == null) {
const err = new this[kConfigurationError]('Missing required parameter: index')
return handleError(err, callback)
}
let { method, body, index, ...querystring } = params let { method, body, index, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring) querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
let path = '' let path = ''
if ((index) != null) { if (method == null) method = 'POST'
if (method == null) method = 'POST' path = '/' + encodeURIComponent(index) + '/' + '_pit'
path = '/' + encodeURIComponent(index) + '/' + '_pit'
} else {
if (method == null) method = 'POST'
path = '/' + '_pit'
}
// build request object // build request object
const request = { const request = {

View File

@ -2025,12 +2025,12 @@ export interface Mtermvectors<T = RequestBody> extends Generic {
body?: T; body?: T;
} }
export interface NodesClearMeteringArchive extends Generic { export interface NodesClearRepositoriesMeteringArchive extends Generic {
node_id: string | string[]; node_id: string | string[];
max_archive_version: number; max_archive_version: number;
} }
export interface NodesGetMeteringInfo extends Generic { export interface NodesGetRepositoriesMeteringInfo extends Generic {
node_id: string | string[]; node_id: string | string[];
} }
@ -2079,7 +2079,7 @@ export interface NodesUsage extends Generic {
} }
export interface OpenPointInTime extends Generic { export interface OpenPointInTime extends Generic {
index?: string | string[]; index: string | string[];
preference?: string; preference?: string;
routing?: string; routing?: string;
ignore_unavailable?: boolean; ignore_unavailable?: boolean;

View File

@ -259,14 +259,6 @@ _Default:_ `false`
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. + |`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
_Default:_ `null` _Default:_ `null`
|`maxResponseSize`
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
_Default:_ `null`
|`maxCompressedResponseSize`
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
_Default:_ `null`
|=== |===
[discrete] [discrete]

View File

@ -1,60 +1,6 @@
[[changelog-client]] [[changelog-client]]
== Release notes == Release notes
[discrete]
=== 7.15.0
[discrete]
==== Features
[discrete]
===== Support for Elasticsearch `v7.15`
You can find all the API changes
https://www.elastic.co/guide/en/elasticsearch/reference/7.15/release-notes-7.15.0.html[here].
[discrete]
===== Support mapbox content type https://github.com/elastic/elasticsearch-js/pull/1500[#1500]
If you call an API that returns a mapbox conten type, the response body will be a buffer.
[discrete]
===== Support CA fingerprint validation https://github.com/elastic/elasticsearch-js/pull/1499[#1499]
You can configure the client to only trust certificates that are signed by a specific CA certificate ( CA certificate pinning ) by providing a `caFingerprint` option. This will verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied value.
a `caFingerprint` option, which will verify the supplied certificate authority fingerprint.
You must configure a SHA256 digest.
[source,js]
----
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
node: 'https://example.com'
auth: { ... },
// the fingerprint (SHA256) of the CA certificate that is used to sign the certificate that the Elasticsearch node presents for TLS.
caFingerprint: '20:0D:CA:FA:76:...',
ssl: {
// might be required if it's a self-signed certificate
rejectUnauthorized: false
}
})
----
[discrete]
===== Show the body as string if the response error can't be read as ES error https://github.com/elastic/elasticsearch-js/pull/1509[#1509]
Useful if the errored response does not come from Elasticsearch, but a proxy in the middle for example.
[discrete]
===== Always display request params and options in request event https://github.com/elastic/elasticsearch-js/pull/1531[#1531]
In some edge cases the params and options weren't available in observabilty events, now they are always defined.
[discrete]
===== Always emit request aborted event https://github.com/elastic/elasticsearch-js/pull/1534[#1534]
If the client is busy running an async operation, the `.abort()` call might be executed before sending the actual request. In such case, the error was swallowed, now it will always be emitted, either in the `request` or `response` event.
[discrete] [discrete]
=== 7.14.0 === 7.14.0

View File

@ -8,7 +8,6 @@ This page contains the information you need to connect and use the Client with
* <<auth-reference, Authentication options>> * <<auth-reference, Authentication options>>
* <<client-usage, Using the client>> * <<client-usage, Using the client>>
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
* <<client-connect-proxy, Connecting through a proxy>> * <<client-connect-proxy, Connecting through a proxy>>
* <<client-error-handling, Handling errors>> * <<client-error-handling, Handling errors>>
* <<product-check, Automatic product check>> * <<product-check, Automatic product check>>
@ -61,12 +60,11 @@ const client = new Client({
==== ApiKey authentication ==== ApiKey authentication
You can use the You can use the
{ref-7x}/security-api-create-api-key.html[ApiKey] https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey]
authentication by passing the `apiKey` parameter via the `auth` option. The authentication by passing the `apiKey` parameter via the `auth` option. The
`apiKey` parameter can be either a base64 encoded string or an object with the `apiKey` parameter can be either a base64 encoded string or an object with the
values that you can obtain from the values that you can obtain from the
{ref-7x}/security-api-create-api-key.html[create api key endpoint]. https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[create api key endpoint].
NOTE: If you provide both basic authentication credentials and the ApiKey NOTE: If you provide both basic authentication credentials and the ApiKey
configuration, the ApiKey takes precedence. configuration, the ApiKey takes precedence.
@ -419,87 +417,8 @@ _Default:_ `null`
|`context` |`context`
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ + |`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
_Default:_ `null` _Default:_ `null`
|`maxResponseSize`
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
_Default:_ `null`
|`maxCompressedResponseSize`
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
_Default:_ `null`
|=== |===
[discrete]
[[client-faas-env]]
=== Using the Client in a Function-as-a-Service Environment
This section illustrates the best practices for leveraging the {es} client in a Function-as-a-Service (FaaS) environment.
The most influential optimization is to initialize the client outside of the function, the global scope.
This practice does not only improve performance but also enables background functionality as for example https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[sniffing].
The following examples provide a skeleton for the best practices.
[discrete]
==== GCP Cloud Functions
[source,js]
----
'use strict'
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
// client initialisation
})
exports.testFunction = async function (req, res) {
// use the client
}
----
[discrete]
==== AWS Lambda
[source,js]
----
'use strict'
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
// client initialisation
})
exports.handler = async function (event, context) {
// use the client
}
----
[discrete]
==== Azure Functions
[source,js]
----
'use strict'
const { Client } = require('@elastic/elasticsearch')
const client = new Client({
// client initialisation
})
module.exports = async function (context, req) {
// use the client
}
----
Resources used to assess these recommendations:
- https://cloud.google.com/functions/docs/bestpractices/tips#use_global_variables_to_reuse_objects_in_future_invocations[GCP Cloud Functions: Tips & Tricks]
- https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html[Best practices for working with AWS Lambda functions]
- https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python?tabs=azurecli-linux%2Capplication-level#global-variables[Azure Functions Python developer guide]
- https://docs.aws.amazon.com/lambda/latest/operatorguide/global-scope.html[AWS Lambda: Comparing the effect of global scope]
[discrete] [discrete]
[[client-connect-proxy]] [[client-connect-proxy]]

View File

@ -1,13 +1,8 @@
= Elasticsearch JavaScript Client = Elasticsearch Node.js client
:branch: 7.16 :branch: 7.x
include::{asciidoc-dir}/../../shared/attributes.asciidoc[] include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
// 7.x exists in this repo but not in stack repos
// This line overwrites the jsclient attribute so it can point to 7.x, but stack links can point to 7.16
// Remove this line when a 7.16 branch exists in this repo
:jsclient: https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/7.x
include::introduction.asciidoc[] include::introduction.asciidoc[]
include::installation.asciidoc[] include::installation.asciidoc[]
include::connecting.asciidoc[] include::connecting.asciidoc[]

View File

@ -24,7 +24,7 @@ To learn more about the supported major versions, please refer to the
[[nodejs-support]] [[nodejs-support]]
=== Node.js support === Node.js support
NOTE: The minimum supported version of Node.js is `v12`. NOTE: The minimum supported version of Node.js is `v10`.
The client versioning follows the {stack} versioning, this means that The client versioning follows the {stack} versioning, this means that
major, minor, and patch releases are done following a precise schedule that major, minor, and patch releases are done following a precise schedule that
@ -62,8 +62,12 @@ of `^7.10.0`).
[[js-compatibility-matrix]] [[js-compatibility-matrix]]
=== Compatibility matrix === Compatibility matrix
Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch. Elastic language clients are guaranteed to be able to communicate with Elasticsearch
Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made. or Elastic solutions running on the same major version and greater or equal minor version.
Language clients are forward compatible; meaning that clients support communicating
with greater minor versions of Elasticsearch. Elastic language clients are not
guaranteed to be backwards compatible.
[%header,cols=2*] [%header,cols=2*]
|=== |===

View File

@ -132,9 +132,6 @@ async function run () {
run().catch(console.log) run().catch(console.log)
---- ----
TIP: For an elaborate example of how to ingest data into Elastic Cloud,
refer to {cloud}/ec-getting-started-node-js.html[this page].
[discrete] [discrete]
==== Install multiple versions ==== Install multiple versions

View File

@ -4132,8 +4132,8 @@ link:{ref}/indices-delete-index.html[Documentation] +
|`boolean` - Ignore if a wildcard expression resolves to no concrete indices (default: false) |`boolean` - Ignore if a wildcard expression resolves to no concrete indices (default: false)
|`expand_wildcards` or `expandWildcards` |`expand_wildcards` or `expandWildcards`
|`'open' \| 'closed' \| 'hidden' \| 'none' \| 'all'` - Whether wildcard expressions should get expanded to open or closed indices (default: open) + |`'open' \| 'closed' \| 'hidden' \| 'none' \| 'all'` - Whether wildcard expressions should get expanded to open, closed, or hidden indices +
_Default:_ `open` _Default:_ `open,closed`
|=== |===
@ -8369,11 +8369,11 @@ _Default:_ `true`
|=== |===
[discrete] [discrete]
=== nodes.clearMeteringArchive === nodes.clearRepositoriesMeteringArchive
*Stability:* experimental *Stability:* experimental
[source,ts] [source,ts]
---- ----
client.nodes.clearMeteringArchive({ client.nodes.clearRepositoriesMeteringArchive({
node_id: string | string[], node_id: string | string[],
max_archive_version: number max_archive_version: number
}) })
@ -8390,11 +8390,11 @@ link:{ref}/clear-repositories-metering-archive-api.html[Documentation] +
|=== |===
[discrete] [discrete]
=== nodes.getMeteringInfo === nodes.getRepositoriesMeteringInfo
*Stability:* experimental *Stability:* experimental
[source,ts] [source,ts]
---- ----
client.nodes.getMeteringInfo({ client.nodes.getRepositoriesMeteringInfo({
node_id: string | string[] node_id: string | string[]
}) })
---- ----

34
index.d.ts vendored
View File

@ -119,8 +119,6 @@ interface ClientOptions {
}; };
disablePrototypePoisoningProtection?: boolean | 'proto' | 'constructor'; disablePrototypePoisoningProtection?: boolean | 'proto' | 'constructor';
caFingerprint?: string; caFingerprint?: string;
maxResponseSize?: number;
maxCompressedResponseSize?: number;
} }
declare class Client { declare class Client {
@ -1928,22 +1926,22 @@ declare class Client {
mtermvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Mtermvectors<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback mtermvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Mtermvectors<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
mtermvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Mtermvectors<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback mtermvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Mtermvectors<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
nodes: { nodes: {
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesClearMeteringArchive, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>> clear_repositories_metering_archive<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesClearRepositoriesMeteringArchive, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback clear_repositories_metering_archive<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, callback: callbackFn<TResponse, TContext>): TransportRequestCallback clear_repositories_metering_archive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearRepositoriesMeteringArchive, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clear_metering_archive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback clear_repositories_metering_archive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearRepositoriesMeteringArchive, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesClearMeteringArchive, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>> clearRepositoriesMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesClearRepositoriesMeteringArchive, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback clearRepositoriesMeteringArchive<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, callback: callbackFn<TResponse, TContext>): TransportRequestCallback clearRepositoriesMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearRepositoriesMeteringArchive, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
clearMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearMeteringArchive, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback clearRepositoriesMeteringArchive<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesClearRepositoriesMeteringArchive, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_metering_info<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesGetMeteringInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>> get_repositories_metering_info<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesGetRepositoriesMeteringInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
get_metering_info<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback get_repositories_metering_info<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_metering_info<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, callback: callbackFn<TResponse, TContext>): TransportRequestCallback get_repositories_metering_info<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetRepositoriesMeteringInfo, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
get_metering_info<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback get_repositories_metering_info<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetRepositoriesMeteringInfo, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesGetMeteringInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>> getRepositoriesMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesGetRepositoriesMeteringInfo, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback getRepositoriesMeteringInfo<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, callback: callbackFn<TResponse, TContext>): TransportRequestCallback getRepositoriesMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetRepositoriesMeteringInfo, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
getMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetMeteringInfo, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback getRepositoriesMeteringInfo<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesGetRepositoriesMeteringInfo, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
hot_threads<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesHotThreads, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>> hot_threads<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.NodesHotThreads, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
hot_threads<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback hot_threads<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
hot_threads<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesHotThreads, callback: callbackFn<TResponse, TContext>): TransportRequestCallback hot_threads<TResponse = Record<string, any>, TContext = Context>(params: RequestParams.NodesHotThreads, callback: callbackFn<TResponse, TContext>): TransportRequestCallback

View File

@ -21,7 +21,6 @@
const { EventEmitter } = require('events') const { EventEmitter } = require('events')
const { URL } = require('url') const { URL } = require('url')
const buffer = require('buffer')
const debug = require('debug')('elasticsearch') const debug = require('debug')('elasticsearch')
const Transport = require('./lib/Transport') const Transport = require('./lib/Transport')
const Connection = require('./lib/Connection') const Connection = require('./lib/Connection')
@ -115,19 +114,9 @@ class Client extends ESAPI {
context: null, context: null,
proxy: null, proxy: null,
enableMetaHeader: true, enableMetaHeader: true,
disablePrototypePoisoningProtection: false, disablePrototypePoisoningProtection: false
maxResponseSize: null,
maxCompressedResponseSize: null
}, opts) }, opts)
if (options.maxResponseSize !== null && options.maxResponseSize > buffer.constants.MAX_STRING_LENGTH) {
throw new ConfigurationError(`The maxResponseSize cannot be bigger than ${buffer.constants.MAX_STRING_LENGTH}`)
}
if (options.maxCompressedResponseSize !== null && options.maxCompressedResponseSize > buffer.constants.MAX_LENGTH) {
throw new ConfigurationError(`The maxCompressedResponseSize cannot be bigger than ${buffer.constants.MAX_LENGTH}`)
}
if (options.caFingerprint !== null && isHttpConnection(opts.node || opts.nodes)) { if (options.caFingerprint !== null && isHttpConnection(opts.node || opts.nodes)) {
throw new ConfigurationError('You can\'t configure the caFingerprint with a http connection') throw new ConfigurationError('You can\'t configure the caFingerprint with a http connection')
} }
@ -189,9 +178,7 @@ class Client extends ESAPI {
generateRequestId: options.generateRequestId, generateRequestId: options.generateRequestId,
name: options.name, name: options.name,
opaqueIdPrefix: options.opaqueIdPrefix, opaqueIdPrefix: options.opaqueIdPrefix,
context: options.context, context: options.context
maxResponseSize: options.maxResponseSize,
maxCompressedResponseSize: options.maxCompressedResponseSize
}) })
this.helpers = new Helpers({ this.helpers = new Helpers({

4
lib/Transport.d.ts vendored
View File

@ -61,8 +61,6 @@ interface TransportOptions {
generateRequestId?: generateRequestIdFn; generateRequestId?: generateRequestIdFn;
name?: string; name?: string;
opaqueIdPrefix?: string; opaqueIdPrefix?: string;
maxResponseSize?: number;
maxCompressedResponseSize?: number;
} }
export interface RequestEvent<TResponse = Record<string, any>, TContext = Context> { export interface RequestEvent<TResponse = Record<string, any>, TContext = Context> {
@ -115,8 +113,6 @@ export interface TransportRequestOptions {
context?: Context; context?: Context;
warnings?: string[]; warnings?: string[];
opaqueId?: string; opaqueId?: string;
maxResponseSize?: number;
maxCompressedResponseSize?: number;
} }
export interface TransportRequestCallback { export interface TransportRequestCallback {

View File

@ -43,8 +43,6 @@ const MAX_STRING_LENGTH = buffer.constants.MAX_STRING_LENGTH
const kProductCheck = Symbol('product check') const kProductCheck = Symbol('product check')
const kApiVersioning = Symbol('api versioning') const kApiVersioning = Symbol('api versioning')
const kEventEmitter = Symbol('event emitter') const kEventEmitter = Symbol('event emitter')
const kMaxResponseSize = Symbol('max response size')
const kMaxCompressedResponseSize = Symbol('max compressed response size')
class Transport { class Transport {
constructor (opts) { constructor (opts) {
@ -74,8 +72,6 @@ class Transport {
this[kProductCheck] = 0 // 0 = to be checked, 1 = checking, 2 = checked-ok, 3 checked-notok, 4 checked-nodefault this[kProductCheck] = 0 // 0 = to be checked, 1 = checking, 2 = checked-ok, 3 checked-notok, 4 checked-nodefault
this[kApiVersioning] = process.env.ELASTIC_CLIENT_APIVERSIONING === 'true' this[kApiVersioning] = process.env.ELASTIC_CLIENT_APIVERSIONING === 'true'
this[kEventEmitter] = new EventEmitter() this[kEventEmitter] = new EventEmitter()
this[kMaxResponseSize] = opts.maxResponseSize || MAX_STRING_LENGTH
this[kMaxCompressedResponseSize] = opts.maxCompressedResponseSize || MAX_BUFFER_LENGTH
this.nodeFilter = opts.nodeFilter || defaultNodeFilter this.nodeFilter = opts.nodeFilter || defaultNodeFilter
if (typeof opts.nodeSelector === 'function') { if (typeof opts.nodeSelector === 'function') {
@ -166,8 +162,6 @@ class Transport {
? 0 ? 0
: (typeof options.maxRetries === 'number' ? options.maxRetries : this.maxRetries) : (typeof options.maxRetries === 'number' ? options.maxRetries : this.maxRetries)
const compression = options.compression !== undefined ? options.compression : this.compression const compression = options.compression !== undefined ? options.compression : this.compression
const maxResponseSize = options.maxResponseSize || this[kMaxResponseSize]
const maxCompressedResponseSize = options.maxCompressedResponseSize || this[kMaxCompressedResponseSize]
let request = { abort: noop } let request = { abort: noop }
const transportReturn = { const transportReturn = {
then (onFulfilled, onRejected) { then (onFulfilled, onRejected) {
@ -250,15 +244,15 @@ class Transport {
/* istanbul ignore else */ /* istanbul ignore else */
if (result.headers['content-length'] !== undefined) { if (result.headers['content-length'] !== undefined) {
const contentLength = Number(result.headers['content-length']) const contentLength = Number(result.headers['content-length'])
if (isCompressed && contentLength > maxCompressedResponseSize) { if (isCompressed && contentLength > MAX_BUFFER_LENGTH) {
response.destroy() response.destroy()
return onConnectionError( return onConnectionError(
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed buffer (${maxCompressedResponseSize})`, result) new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed buffer (${MAX_BUFFER_LENGTH})`, result)
) )
} else if (contentLength > maxResponseSize) { } else if (contentLength > MAX_STRING_LENGTH) {
response.destroy() response.destroy()
return onConnectionError( return onConnectionError(
new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed string (${maxResponseSize})`, result) new RequestAbortedError(`The content length (${contentLength}) is bigger than the maximum allowed string (${MAX_STRING_LENGTH})`, result)
) )
} }
} }

View File

@ -6,14 +6,13 @@
"exports": { "exports": {
".": { ".": {
"require": "./index.js", "require": "./index.js",
"import": "./index.mjs", "import": "./index.mjs"
"types": "./index.d.ts"
}, },
"./": "./" "./": "./"
}, },
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html", "homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
"version": "7.16.0", "version": "7.15.0",
"versionCanary": "7.16.0-canary.4", "versionCanary": "7.15.0-canary.4",
"keywords": [ "keywords": [
"elasticsearch", "elasticsearch",
"elastic", "elastic",

View File

@ -1,71 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
const { test } = require('tap')
const { Client, errors } = require('../../')
const Mock = require('@elastic/elasticsearch-mock')
test('Mock should work', async t => {
t.plan(1)
const mock = new Mock()
const client = new Client({
node: 'http://localhost:9200',
Connection: mock.getConnection()
})
mock.add({
method: 'GET',
path: '/_cat/indices'
}, () => {
return { status: 'ok' }
})
const response = await client.cat.indices()
t.same(response.body, { status: 'ok' })
})
test('Return an error', async t => {
t.plan(1)
const mock = new Mock()
const client = new Client({
node: 'http://localhost:9200',
Connection: mock.getConnection()
})
mock.add({
method: 'GET',
path: '/_cat/indices'
}, () => {
return new errors.ResponseError({
body: { errors: {}, status: 500 },
statusCode: 500
})
})
try {
await client.cat.indices()
t.fail('Should throw')
} catch (err) {
t.ok(err instanceof errors.ResponseError)
}
})

View File

@ -1,18 +0,0 @@
{
"name": "mock",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "standard && tap index.js"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@elastic/elasticsearch": "file:../..",
"@elastic/elasticsearch-mock": "^0.3.1",
"standard": "^16.0.3",
"tap": "^15.0.9"
}
}

View File

@ -1308,223 +1308,6 @@ test('Content length too big (string)', t => {
}) })
}) })
test('Content length too big custom (buffer)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-encoding': 'gzip',
'content-length': 1100,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxCompressedResponseSize: 1000
})
client.info((err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed buffer (1000)')
t.equal(result.meta.attempts, 0)
})
})
test('Content length too big custom (string)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-length': 1100,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxResponseSize: 1000
})
client.info((err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed string (1000)')
t.equal(result.meta.attempts, 0)
})
})
test('Content length too big custom option (buffer)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-encoding': 'gzip',
'content-length': 1100,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
client.info({}, { maxCompressedResponseSize: 1000 }, (err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed buffer (1000)')
t.equal(result.meta.attempts, 0)
})
})
test('Content length too big custom option (string)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-length': 1100,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection
})
client.info({}, { maxResponseSize: 1000 }, (err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed string (1000)')
t.equal(result.meta.attempts, 0)
})
})
test('Content length too big custom option override (buffer)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-encoding': 'gzip',
'content-length': 1100,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxCompressedResponseSize: 2000
})
client.info({}, { maxCompressedResponseSize: 1000 }, (err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed buffer (1000)')
t.equal(result.meta.attempts, 0)
})
})
test('Content length too big custom option override (string)', t => {
t.plan(4)
class MockConnection extends Connection {
request (params, callback) {
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = 200
stream.headers = {
'content-type': 'application/json;utf=8',
'content-length': 1100,
connection: 'keep-alive',
date: new Date().toISOString()
}
stream.on('close', () => t.pass('Stream destroyed'))
process.nextTick(callback, null, stream)
return { abort () {} }
}
}
const client = new Client({
node: 'http://localhost:9200',
Connection: MockConnection,
maxResponseSize: 2000
})
client.info({}, { maxResponseSize: 1000 }, (err, result) => {
t.ok(err instanceof errors.RequestAbortedError)
t.equal(err.message, 'The content length (1100) is bigger than the maximum allowed string (1000)')
t.equal(result.meta.attempts, 0)
})
})
test('maxResponseSize cannot be bigger than buffer.constants.MAX_STRING_LENGTH', t => {
t.plan(2)
try {
new Client({ // eslint-disable-line
node: 'http://localhost:9200',
maxResponseSize: buffer.constants.MAX_STRING_LENGTH + 10
})
t.fail('should throw')
} catch (err) {
t.ok(err instanceof errors.ConfigurationError)
t.equal(err.message, `The maxResponseSize cannot be bigger than ${buffer.constants.MAX_STRING_LENGTH}`)
}
})
test('maxCompressedResponseSize cannot be bigger than buffer.constants.MAX_STRING_LENGTH', t => {
t.plan(2)
try {
new Client({ // eslint-disable-line
node: 'http://localhost:9200',
maxCompressedResponseSize: buffer.constants.MAX_LENGTH + 10
})
t.fail('should throw')
} catch (err) {
t.ok(err instanceof errors.ConfigurationError)
t.equal(err.message, `The maxCompressedResponseSize cannot be bigger than ${buffer.constants.MAX_LENGTH}`)
}
})
test('Meta header enabled', t => { test('Meta header enabled', t => {
t.plan(2) t.plan(2)